Bootstrap Collection (#1)

* Bootstrap Collection

* Basic common bootstrapping of repo
* Correct Galaxy settings
* Attempt to get Shippable working

* correct directory

* patch is part of ansible.posix (not community.general

* tests/utils/shippable/ missing from ignore.txt

* shippable/units.sh from a/a:devel

* ignore.txt:patch

* CI: Only use group1 for integration

* Correct Repo URLs

Co-Authored-By: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>

* HACK: Install community.general

* run integration tests first

* Install community.general in correct location

* deleted too much

* Use Extended FQCN for community.general

* Use temp-2.10-devel

* Update tests/utils/shippable/sanity.sh

Co-Authored-By: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>

Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>
This commit is contained in:
John R Barker 2020-03-13 09:12:26 +00:00 committed by GitHub
parent 2fded0100e
commit 07f1a2e98e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
33 changed files with 874 additions and 24 deletions

61
.github/settings.yml vendored Normal file
View file

@ -0,0 +1,61 @@
###
# https://probot.github.io/apps/settings/
#
# DO NOT MODIFY
# this is a copy of https://github.com/gundalow-collection/.github/blob/master/.github/settings.yml
# Work around till https://github.com/probot/settings/pull/179 is merged
repository:
# See https://developer.github.com/v3/repos/#edit for all available settings.
has_issues: true
has_wiki: false
has_pages: false
default_branch: devel
allow_squash_merge: true
allow_merge_commit: false
allow_rebase_merge: true
# Labels: define labels for Issues and Pull Requests
labels:
- name: bug
color: fbca04
description: This issue/PR relates to a bug.
- name: feature
description: This issue/PR relates to a feature request.
color: 006b75
- name: migrated_from_ansible_ansible
color: 5319e7
description: This issue/PR was moved from gh/ansible/ansible
branches:
- name: master
# https://developer.github.com/v3/repos/branches/#update-branch-protection
# Branch Protection settings. Set to null to disable
protection:
# Required. Require at least one approving review on a pull request, before merging. Set to null to disable.
required_pull_request_reviews:
# The number of approvals required. (1-6)
required_approving_review_count: 1
# Dismiss approved reviews automatically when a new commit is pushed.
dismiss_stale_reviews: true
# Blocks merge until code owners have reviewed.
require_code_owner_reviews: true
# Specify which users and teams can dismiss pull request reviews. Pass an empty dismissal_restrictions object to disable. User and team dismissal_restrictions are only available for organization-owned repositories. Omit this parameter for personal repositories.
dismissal_restrictions:
users: []
teams: []
# Required. Require status checks to pass before merging. Set to null to disable
required_status_checks:
# Required. Require branches to be up to date before merging.
strict: true
# Required. The list of status checks to require in order to merge into this branch
contexts: []
# Required. Enforce all configured restrictions for administrators. Set to true to enforce required status checks for repository administrators. Set to null to disable.
enforce_admins: true
# Required. Restrict who can push to this branch. Team and user restrictions are only available for organization-owned repositories. Set to null to disable.
#restrictions:
# users: []
# teams: []

View file

@ -2,13 +2,14 @@ namespace: ansible
name: posix
version: 0.1.0
readme: README.md
authors: null
authors:
- Ansible (github.com/ansible)
description: null
license: GPL-3.0-or-later
license_file: COPYING
tags: null
dependencies: {}
repository: git@github.com:ansible-collection-migration/ansible.posix.git
documentation: https://github.com/ansible-collection-migration/ansible.posix/tree/master/docs
homepage: https://github.com/ansible-collection-migration/ansible.posix
issues: https://github.com/ansible-collection-migration/ansible.posix/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc
repository: https://github.com/ansible-collections/ansible.posix
documentation: https://github.com/ansible-collections/ansible.posix/tree/master/docs
homepage: https://github.com/ansible-collections/ansible.posix
issues: https://github.com/ansible-collections/ansible.posix

214
plugins/modules/patch.py Normal file
View file

@ -0,0 +1,214 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Luis Alberto Perez Lazaro <luisperlazaro@gmail.com>
# Copyright: (c) 2015, Jakub Jirutka <jakub@jirutka.cz>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: patch
author:
- Jakub Jirutka (@jirutka)
- Luis Alberto Perez Lazaro (@luisperlaz)
description:
- Apply patch files using the GNU patch tool.
short_description: Apply patch files using the GNU patch tool
options:
basedir:
description:
- Path of a base directory in which the patch file will be applied.
- May be omitted when C(dest) option is specified, otherwise required.
type: path
dest:
description:
- Path of the file on the remote machine to be patched.
- The names of the files to be patched are usually taken from the patch
file, but if there's just one file to be patched it can specified with
this option.
type: path
aliases: [ originalfile ]
src:
description:
- Path of the patch file as accepted by the GNU patch tool. If
C(remote_src) is 'no', the patch source file is looked up from the
module's I(files) directory.
type: path
required: true
aliases: [ patchfile ]
state:
description:
- Whether the patch should be applied or reverted.
type: str
choices: [ absent, present ]
default: present
remote_src:
description:
- If C(no), it will search for src at originating/master machine, if C(yes) it will
go to the remote/target machine for the C(src).
type: bool
default: no
strip:
description:
- Number that indicates the smallest prefix containing leading slashes
that will be stripped from each file name found in the patch file.
- For more information see the strip parameter of the GNU patch tool.
type: int
default: 0
backup:
description:
- Passes C(--backup --version-control=numbered) to patch, producing numbered backup copies.
type: bool
default: no
binary:
description:
- Setting to C(yes) will disable patch's heuristic for transforming CRLF
line endings into LF.
- Line endings of src and dest must match.
- If set to C(no), C(patch) will replace CRLF in C(src) files on POSIX.
type: bool
default: no
notes:
- This module requires GNU I(patch) utility to be installed on the remote host.
'''
EXAMPLES = r'''
- name: Apply patch to one file
patch:
src: /tmp/index.html.patch
dest: /var/www/index.html
- name: Apply patch to multiple files under basedir
patch:
src: /tmp/customize.patch
basedir: /var/www
strip: 1
- name: Revert patch to one file
patch:
src: /tmp/index.html.patch
dest: /var/www/index.html
state: absent
'''
import os
import platform
from traceback import format_exc
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
class PatchError(Exception):
pass
def add_dry_run_option(opts):
# Older versions of FreeBSD, OpenBSD and NetBSD support the --check option only.
if platform.system().lower() in ['openbsd', 'netbsd', 'freebsd']:
opts.append('--check')
else:
opts.append('--dry-run')
def is_already_applied(patch_func, patch_file, basedir, dest_file=None, binary=False, strip=0, state='present'):
opts = ['--quiet', '--forward',
"--strip=%s" % strip, "--directory='%s'" % basedir,
"--input='%s'" % patch_file]
add_dry_run_option(opts)
if binary:
opts.append('--binary')
if dest_file:
opts.append("'%s'" % dest_file)
if state == 'present':
opts.append('--reverse')
(rc, _, _) = patch_func(opts)
return rc == 0
def apply_patch(patch_func, patch_file, basedir, dest_file=None, binary=False, strip=0, dry_run=False, backup=False, state='present'):
opts = ['--quiet', '--forward', '--batch', '--reject-file=-',
"--strip=%s" % strip, "--directory='%s'" % basedir,
"--input='%s'" % patch_file]
if dry_run:
add_dry_run_option(opts)
if binary:
opts.append('--binary')
if dest_file:
opts.append("'%s'" % dest_file)
if backup:
opts.append('--backup --version-control=numbered')
if state == 'absent':
opts.append('--reverse')
(rc, out, err) = patch_func(opts)
if rc != 0:
msg = err or out
raise PatchError(msg)
def main():
module = AnsibleModule(
argument_spec=dict(
src=dict(type='path', required=True, aliases=['patchfile']),
dest=dict(type='path', aliases=['originalfile']),
basedir=dict(type='path'),
strip=dict(type='int', default=0),
remote_src=dict(type='bool', default=False),
# NB: for 'backup' parameter, semantics is slightly different from standard
# since patch will create numbered copies, not strftime("%Y-%m-%d@%H:%M:%S~")
backup=dict(type='bool', default=False),
binary=dict(type='bool', default=False),
state=dict(type='str', default='present', choices=['absent', 'present']),
),
required_one_of=[['dest', 'basedir']],
supports_check_mode=True,
)
# Create type object as namespace for module params
p = type('Params', (), module.params)
if not os.access(p.src, os.R_OK):
module.fail_json(msg="src %s doesn't exist or not readable" % (p.src))
if p.dest and not os.access(p.dest, os.W_OK):
module.fail_json(msg="dest %s doesn't exist or not writable" % (p.dest))
if p.basedir and not os.path.exists(p.basedir):
module.fail_json(msg="basedir %s doesn't exist" % (p.basedir))
if not p.basedir:
p.basedir = os.path.dirname(p.dest)
patch_bin = module.get_bin_path('patch')
if patch_bin is None:
module.fail_json(msg="patch command not found")
def patch_func(opts):
return module.run_command('%s %s' % (patch_bin, ' '.join(opts)))
# patch need an absolute file name
p.src = os.path.abspath(p.src)
changed = False
if not is_already_applied(patch_func, p.src, p.basedir, dest_file=p.dest, binary=p.binary, strip=p.strip, state=p.state):
try:
apply_patch(patch_func, p.src, p.basedir, dest_file=p.dest, binary=p.binary, strip=p.strip,
dry_run=module.check_mode, backup=p.backup, state=p.state)
changed = True
except PatchError as e:
module.fail_json(msg=to_native(e), exception=format_exc())
module.exit_json(changed=changed)
if __name__ == '__main__':
main()

94
shippable.yml Normal file
View file

@ -0,0 +1,94 @@
language: python
env:
matrix:
- T=none
matrix:
exclude:
- env: T=none
include:
# FIXME Move integration tests after units
- env: T=aix/7.2/1
- env: T=osx/10.11/1
- env: T=rhel/7.6/1
- env: T=rhel/8.1/1
- env: T=freebsd/11.1/1
- env: T=freebsd/12.1/1
- env: T=linux/centos6/1
- env: T=linux/centos7/1
- env: T=linux/centos8/1
- env: T=linux/fedora30/1
- env: T=linux/fedora31/1
- env: T=linux/opensuse15py2/1
- env: T=linux/opensuse15/1
- env: T=linux/ubuntu1604/1
- env: T=linux/ubuntu1804/1
- env: T=sanity/1
- env: T=sanity/2
- env: T=sanity/3
- env: T=sanity/4
- env: T=sanity/5
- env: T=units/2.6/1
- env: T=units/2.7/1
- env: T=units/3.5/1
- env: T=units/3.6/1
- env: T=units/3.7/1
- env: T=units/3.8/1
- env: T=units/3.9/1
- env: T=units/2.6/2
- env: T=units/2.7/2
- env: T=units/3.5/2
- env: T=units/3.6/2
- env: T=units/3.7/2
- env: T=units/3.8/2
- env: T=units/3.9/2
- env: T=units/2.6/3
- env: T=units/2.7/3
- env: T=units/3.5/3
- env: T=units/3.6/3
- env: T=units/3.7/3
- env: T=units/3.8/3
- env: T=units/3.9/3
- env: T=fallaxy/2.7/1
- env: T=fallaxy/3.6/1
# - env: T=i/aix/7.2
# - env: T=i/osx/10.11
# - env: T=i/rhel/7.6
# - env: T=i/rhel/8.1
# - env: T=i/freebsd/11.1
# - env: T=i/freebsd/12.1
# - env: T=i/linux/centos6
# - env: T=i/linux/centos7
# - env: T=i/linux/centos8
# - env: T=i/linux/fedora30
# - env: T=i/linux/fedora31
# - env: T=i/linux/opensuse15py2
# - env: T=i/linux/opensuse15
# - env: T=i/linux/ubuntu1604
# - env: T=i/linux/ubuntu1804
branches:
except:
- "*-patch-*"
- "revert-*-*"
build:
ci:
- tests/utils/shippable/timing.sh tests/utils/shippable/shippable.sh $T
integrations:
notifications:
- integrationName: email
type: email
on_success: never
on_failure: never
on_start: never
on_pull_request: never

View file

@ -1,2 +1,2 @@
shippable/posix/group2
shippable/posix/group1
skip/aix

View file

@ -1,2 +1,2 @@
needs/root
shippable/posix/group2
shippable/posix/group1

View file

@ -1,4 +1,4 @@
needs/privileged
needs/root
shippable/posix/group2
shippable/posix/group1
skip/aix

View file

@ -231,7 +231,7 @@
when: ansible_system in ('Linux')
- name: Format FS
when: ansible_system in ('Linux')
community.general.filesystem:
community.general.system.filesystem:
fstype: ext3
dev: /tmp/myfs.img
- name: Mount the FS for the first time

View file

@ -1,3 +1,3 @@
destructive
shippable/posix/group2
shippable/posix/group1
skip/aix

View file

@ -19,7 +19,7 @@
- name: patch the origin file in check mode
check_mode: true
register: result
community.general.patch:
patch:
src: result.patch
dest: '{{ output_dir }}/patch/workfile.txt'
- name: verify patch the origin file in check mode
@ -28,7 +28,7 @@
- result is changed
- name: patch the origin file
register: result
community.general.patch:
patch:
src: result.patch
dest: '{{ output_dir }}/patch/workfile.txt'
- name: verify patch the origin file
@ -37,7 +37,7 @@
- result is changed
- name: test patch the origin file idempotency
register: result
community.general.patch:
patch:
src: result.patch
dest: '{{ output_dir }}/patch/workfile.txt'
- name: verify test patch the origin file idempotency
@ -53,7 +53,7 @@
- name: patch the workfile file in check mode state absent
check_mode: true
register: result
community.general.patch:
patch:
src: result.patch
dest: '{{ output_dir }}/patch/workfile.txt'
state: absent
@ -63,7 +63,7 @@
- result is changed
- name: patch the workfile file state absent
register: result
community.general.patch:
patch:
src: result.patch
dest: '{{ output_dir }}/patch/workfile.txt'
state: absent
@ -73,7 +73,7 @@
- result is changed
- name: patch the workfile file state absent idempotency
register: result
community.general.patch:
patch:
src: result.patch
dest: '{{ output_dir }}/patch/workfile.txt'
state: absent

View file

@ -1,3 +1,3 @@
needs/root
shippable/posix/group2
shippable/posix/group1
skip/aix

View file

@ -1,3 +1,3 @@
needs/root
shippable/posix/group2
shippable/posix/group1
skip/aix

View file

@ -4,7 +4,7 @@
- name: attempt to add mapping without 'seuser'
register: selogin_error
ignore_errors: true
community.general.selogin:
community.general.system.selogin:
login: seuser
- name: verify failure
assert:
@ -18,7 +18,7 @@
- false
- true
- false
community.general.selogin:
community.general.system.selogin:
login: seuser
seuser: staff_u
- name: new mapping- verify functionality and check_mode
@ -36,7 +36,7 @@
- false
- true
- false
community.general.selogin:
community.general.system.selogin:
login: seuser
seuser: user_u
- name: changed mapping- verify functionality and check_mode
@ -54,7 +54,7 @@
- false
- true
- false
community.general.selogin:
community.general.system.selogin:
login: seuser
state: absent
- name: delete mapping- verify functionality and check_mode

View file

@ -1 +1 @@
shippable/posix/group2
shippable/posix/group1

View file

@ -1,6 +1,7 @@
plugins/module_utils/ismount.py future-import-boilerplate
plugins/module_utils/ismount.py metaclass-boilerplate
plugins/modules/acl.py validate-modules:parameter-type-not-in-doc
plugins/modules/patch.py pylint:blacklisted-name
plugins/modules/synchronize.py pylint:blacklisted-name
plugins/modules/synchronize.py use-argspec-type-path
plugins/modules/synchronize.py validate-modules:doc-default-does-not-match-spec
@ -29,4 +30,6 @@ tests/unit/modules/system/test_mount.py metaclass-boilerplate
tests/unit/modules/utils.py future-import-boilerplate
tests/unit/modules/utils.py metaclass-boilerplate
tests/unit/plugins/action/test_synchronize.py future-import-boilerplate
tests/unit/plugins/action/test_synchronize.py metaclass-boilerplate
tests/unit/plugins/action/test_synchronize.py metaclass-boilerplate
tests/utils/shippable/check_matrix.py replace-urlopen
tests/utils/shippable/timing.py shebang

View file

@ -0,0 +1 @@
remote.sh

View file

@ -0,0 +1 @@
cloud.sh

View file

@ -0,0 +1 @@
cloud.sh

View file

@ -0,0 +1,120 @@
#!/usr/bin/env python
"""Verify the currently executing Shippable test matrix matches the one defined in the "shippable.yml" file."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import json
import os
import re
import sys
import time
try:
from typing import NoReturn
except ImportError:
NoReturn = None
try:
# noinspection PyCompatibility
from urllib2 import urlopen # pylint: disable=ansible-bad-import-from
except ImportError:
# noinspection PyCompatibility
from urllib.request import urlopen
def main(): # type: () -> None
"""Main entry point."""
repo_full_name = os.environ['REPO_FULL_NAME']
required_repo_full_name = 'ansible-collections/ansible.posix'
if repo_full_name != required_repo_full_name:
sys.stderr.write('Skipping matrix check on repo "%s" which is not "%s".\n' % (repo_full_name, required_repo_full_name))
return
with open('shippable.yml', 'rb') as yaml_file:
yaml = yaml_file.read().decode('utf-8').splitlines()
defined_matrix = [match.group(1) for match in [re.search(r'^ *- env: T=(.*)$', line) for line in yaml] if match and match.group(1) != 'none']
if not defined_matrix:
fail('No matrix entries found in the "shippable.yml" file.',
'Did you modify the "shippable.yml" file?')
run_id = os.environ['SHIPPABLE_BUILD_ID']
sleep = 1
jobs = []
for attempts_remaining in range(4, -1, -1):
try:
jobs = json.loads(urlopen('https://api.shippable.com/jobs?runIds=%s' % run_id).read())
if not isinstance(jobs, list):
raise Exception('Shippable run %s data is not a list.' % run_id)
break
except Exception as ex:
if not attempts_remaining:
fail('Unable to retrieve Shippable run %s matrix.' % run_id,
str(ex))
sys.stderr.write('Unable to retrieve Shippable run %s matrix: %s\n' % (run_id, ex))
sys.stderr.write('Trying again in %d seconds...\n' % sleep)
time.sleep(sleep)
sleep *= 2
if len(jobs) != len(defined_matrix):
if len(jobs) == 1:
hint = '\n\nMake sure you do not use the "Rebuild with SSH" option.'
else:
hint = ''
fail('Shippable run %s has %d jobs instead of the expected %d jobs.' % (run_id, len(jobs), len(defined_matrix)),
'Try re-running the entire matrix.%s' % hint)
actual_matrix = dict((job.get('jobNumber'), dict(tuple(line.split('=', 1)) for line in job.get('env', [])).get('T', '')) for job in jobs)
errors = [(job_number, test, actual_matrix.get(job_number)) for job_number, test in enumerate(defined_matrix, 1) if actual_matrix.get(job_number) != test]
if len(errors):
error_summary = '\n'.join('Job %s expected "%s" but found "%s" instead.' % (job_number, expected, actual) for job_number, expected, actual in errors)
fail('Shippable run %s has a job matrix mismatch.' % run_id,
'Try re-running the entire matrix.\n\n%s' % error_summary)
def fail(message, output): # type: (str, str) -> NoReturn
# Include a leading newline to improve readability on Shippable "Tests" tab.
# Without this, the first line becomes indented.
output = '\n' + output.strip()
timestamp = datetime.datetime.utcnow().replace(microsecond=0).isoformat()
# hack to avoid requiring junit-xml, which isn't pre-installed on Shippable outside our test containers
xml = '''
<?xml version="1.0" encoding="utf-8"?>
<testsuites disabled="0" errors="1" failures="0" tests="1" time="0.0">
\t<testsuite disabled="0" errors="1" failures="0" file="None" log="None" name="ansible-test" skipped="0" tests="1" time="0" timestamp="%s" url="None">
\t\t<testcase classname="timeout" name="timeout">
\t\t\t<error message="%s" type="error">%s</error>
\t\t</testcase>
\t</testsuite>
</testsuites>
''' % (timestamp, message, output)
path = 'shippable/testresults/check-matrix.xml'
dir_path = os.path.dirname(path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(path, 'w') as junit_fd:
junit_fd.write(xml.lstrip())
sys.stderr.write(message + '\n')
sys.stderr.write(output + '\n')
sys.exit(1)
if __name__ == '__main__':
main()

34
tests/utils/shippable/cloud.sh Executable file
View file

@ -0,0 +1,34 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
cloud="${args[0]}"
python="${args[1]}"
group="${args[2]}"
target="shippable/${cloud}/group${group}/"
stage="${S:-prod}"
changed_all_target="shippable/${cloud}/smoketest/"
if ! ansible-test integration "${changed_all_target}" --list-targets > /dev/null 2>&1; then
# no smoketest tests are available for this cloud
changed_all_target="none"
fi
if [ "${group}" == "1" ]; then
# only run smoketest tests for group1
changed_all_mode="include"
else
# smoketest tests already covered by group1
changed_all_mode="exclude"
fi
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--remote-terminate always --remote-stage "${stage}" \
--docker --python "${python}" --changed-all-target "${changed_all_target}" --changed-all-mode "${changed_all_mode}"

1
tests/utils/shippable/cs.sh Symbolic link
View file

@ -0,0 +1 @@
cloud.sh

View file

@ -0,0 +1 @@
remote.sh

View file

@ -0,0 +1 @@
cloud.sh

18
tests/utils/shippable/linux.sh Executable file
View file

@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
image="${args[1]}"
if [ "${#args[@]}" -gt 2 ]; then
target="shippable/posix/group${args[2]}/"
else
target="shippable/posix/"
fi
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--docker "${image}"

View file

@ -0,0 +1 @@
remote.sh

22
tests/utils/shippable/remote.sh Executable file
View file

@ -0,0 +1,22 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
platform="${args[0]}"
version="${args[1]}"
if [ "${#args[@]}" -gt 2 ]; then
target="shippable/posix/group${args[2]}/"
else
target="shippable/posix/"
fi
stage="${S:-prod}"
provider="${P:-default}"
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"

View file

@ -0,0 +1 @@
remote.sh

View file

@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -o pipefail -eux
# shellcheck disable=SC2086
ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
--docker

View file

@ -0,0 +1,136 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
script="${args[0]}"
test="$1"
docker images ansible/ansible
docker images quay.io/ansible/*
docker ps
for container in $(docker ps --format '{{.Image}} {{.ID}}' | grep -v '^drydock/' | sed 's/^.* //'); do
docker rm -f "${container}" || true # ignore errors
done
docker ps
if [ -d /home/shippable/cache/ ]; then
ls -la /home/shippable/cache/
fi
command -v python
python -V
command -v pip
pip --version
pip list --disable-pip-version-check
export PATH="${PWD}/bin:${PATH}"
export PYTHONIOENCODING='utf-8'
if [ "${JOB_TRIGGERED_BY_NAME:-}" == "nightly-trigger" ]; then
COVERAGE=yes
COMPLETE=yes
fi
if [ -n "${COVERAGE:-}" ]; then
# on-demand coverage reporting triggered by setting the COVERAGE environment variable to a non-empty value
export COVERAGE="--coverage"
elif [[ "${COMMIT_MESSAGE}" =~ ci_coverage ]]; then
# on-demand coverage reporting triggered by having 'ci_coverage' in the latest commit message
export COVERAGE="--coverage"
else
# on-demand coverage reporting disabled (default behavior, always-on coverage reporting remains enabled)
export COVERAGE="--coverage-check"
fi
if [ -n "${COMPLETE:-}" ]; then
# disable change detection triggered by setting the COMPLETE environment variable to a non-empty value
export CHANGED=""
elif [[ "${COMMIT_MESSAGE}" =~ ci_complete ]]; then
# disable change detection triggered by having 'ci_complete' in the latest commit message
export CHANGED=""
else
# enable change detection (default behavior)
export CHANGED="--changed"
fi
if [ "${IS_PULL_REQUEST:-}" == "true" ]; then
# run unstable tests which are targeted by focused changes on PRs
export UNSTABLE="--allow-unstable-changed"
else
# do not run unstable tests outside PRs
export UNSTABLE=""
fi
virtualenv --python /usr/bin/python3.7 ~/ansible-venv
set +ux
. ~/ansible-venv/bin/activate
set -ux
#pip install ansible==2.9.0 --disable-pip-version-check
pip install git+https://github.com/ansible/ansible.git@temp-2.10-devel --disable-pip-version-check
COLLECTION_DIR="${HOME}/.ansible/ansible_collections/"
TEST_DIR="${COLLECTION_DIR}/ansible/posix"
mkdir -p "${TEST_DIR}"
cp -aT "${SHIPPABLE_BUILD_DIR}" "${TEST_DIR}"
cd "${TEST_DIR}"
function cleanup
{
if [ -d tests/output/coverage/ ]; then
if find tests/output/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then
# for complete on-demand coverage generate a report for all files with no coverage on the "other" job so we only have one copy
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/1" ]; then
stub="--stub"
else
stub=""
fi
# shellcheck disable=SC2086
ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"}
cp -a tests/output/reports/coverage=*.xml shippable/codecoverage/
fi
fi
if [ -d tests/output/junit/ ]; then
cp -aT tests/output/junit/ shippable/testresults/
fi
if [ -d tests/output/data/ ]; then
cp -a tests/output/data/ shippable/testresults/
fi
if [ -d tests/output/bot/ ]; then
cp -aT tests/output/bot/ shippable/testresults/
fi
}
trap cleanup EXIT
if [[ "${COVERAGE:-}" == "--coverage" ]]; then
timeout=60
else
timeout=45
fi
# STAR: HACK install dependencies
(
mkdir /tmp/collection_deps
git clone https://github.com/ansible-collections/community.general.git /tmp/collection_deps/community.general
cd /tmp/collection_deps/community.general
ansible-galaxy collection build
ansible-galaxy collection install community-general* -p "${COLLECTION_DIR}"
)
# END: HACK
ansible-test env --dump --show --timeout "${timeout}" --color -v
"tests/utils/shippable/check_matrix.py"
"tests/utils/shippable/${script}.sh" "${test}"

16
tests/utils/shippable/timing.py Executable file
View file

@ -0,0 +1,16 @@
#!/usr/bin/env python3.7
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import time
start = time.time()
sys.stdin.reconfigure(errors='surrogateescape')
sys.stdout.reconfigure(errors='surrogateescape')
for line in sys.stdin:
seconds = time.time() - start
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
sys.stdout.flush()

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -o pipefail -eu
"$@" 2>&1 | "$(dirname "$0")/timing.py"

View file

@ -0,0 +1 @@
cloud.sh

110
tests/utils/shippable/units.sh Executable file
View file

@ -0,0 +1,110 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
version="${args[1]}"
group="${args[2]}"
if [[ "${COVERAGE:-}" == "--coverage" ]]; then
timeout=90
else
timeout=30
fi
group1=()
group2=()
group3=()
# create three groups by putting network tests into separate groups
# add or remove network platforms as needed to balance the groups
networks2=(
aireos
apconos
aruba
asa
avi
check_point
cloudengine
cloudvision
cnos
cumulus
dellos10
dellos6
dellos9
edgeos
edgeswitch
enos
eos
eric_eccli
exos
f5
fortimanager
frr
ftd
icx
ingate
ios
iosxr
ironware
itential
junos
netact
netscaler
netvisor
nos
nso
nuage
nxos
onyx
opx
ovs
radware
routeros
slxos
voss
vyos
)
networks3=(
fortios
)
for network in "${networks2[@]}"; do
test_path="test/units/modules/network/${network}/"
if [ -d "${test_path}" ]; then
group1+=(--exclude "${test_path}")
group2+=("${test_path}")
fi
done
for network in "${networks3[@]}"; do
test_path="test/units/modules/network/${network}/"
if [ -d "${test_path}" ]; then
group1+=(--exclude "${test_path}")
group3+=("${test_path}")
fi
done
case "${group}" in
1) options=("${group1[@]:+${group1[@]}}") ;;
2) options=("${group2[@]:+${group2[@]}}") ;;
3) options=("${group3[@]:+${group3[@]}}") ;;
esac
if [ ${#options[@]} -eq 0 ] && [ "${group}" -gt 1 ]; then
# allow collection migration unit tests for groups other than 1 to "pass" without updating shippable.yml or this script during migration
echo "No unit tests found for group ${group}."
exit
fi
ansible-test env --timeout "${timeout}" --color -v
# shellcheck disable=SC2086
ansible-test units --color -v --docker default --python "${version}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
"${options[@]:+${options[@]}}" \