Compare commits

..

No commits in common. "main" and "v2.0.17" have entirely different histories.

89 changed files with 3174 additions and 2457 deletions

23
.chglog/CHANGELOG.tpl.md Executable file
View File

@ -0,0 +1,23 @@
# Changelog
{{ range .Versions -}}
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]({{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}){{ else }}{{ .Tag.Name }}{{ end }} ({{ datetime "2006-01-02" .Tag.Date }})
{{ range .CommitGroups -}}
### {{ .Title }}
{{ range .Commits -}}
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ (regexReplaceAll "(.*)/issues/(.*)" (regexReplaceAll "(Co-\\w*-by.*)" .Subject "") "${1}/pull/${2}") | trim }}
{{ end }}
{{- end -}}
{{- if .NoteGroups -}}
{{ range .NoteGroups -}}
### {{ .Title }}
{{ range .Notes }}
{{ .Body }}
{{ end }}
{{ end -}}
{{ end -}}
{{ end -}}

25
.chglog/config.yml Executable file
View File

@ -0,0 +1,25 @@
style: github
template: CHANGELOG.tpl.md
info:
title: CHANGELOG
repository_url: https://github.com/thegeeklab/ansible-later
options:
commit_groups:
title_maps:
feat: Features
fix: Bug Fixes
perf: Performance Improvements
refactor: Code Refactoring
chore: Others
test: Testing
ci: CI Pipeline
docs: Documentation
header:
pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$"
pattern_maps:
- Type
- Scope
- Subject
notes:
keywords:
- BREAKING CHANGE

View File

@ -18,9 +18,8 @@ HostVars
Rolesfile Rolesfile
Makefile Makefile
Jinja2 Jinja2
ANS([0-9]{3}) ANSIBLE([0-9]{4})
YML([0-9]{3}) LINT([0-9]{4})
SCM SCM
bools bools
Check[A-Z].+ Check[A-Z].+
(P|p)re-(C|c)ommit

506
.drone.jsonnet Normal file
View File

@ -0,0 +1,506 @@
local PythonVersion(pyversion='3.8') = {
name: 'python' + std.strReplace(pyversion, '.', '') + '-pytest',
image: 'python:' + pyversion,
environment: {
PY_COLORS: 1,
},
commands: [
'pip install poetry poetry-dynamic-versioning -qq',
'poetry config experimental.new-installer false',
'poetry install -E ansible-core',
'poetry run pytest',
'poetry version',
'poetry run ansible-later --help',
],
depends_on: [
'fetch',
],
};
local PipelineLint = {
kind: 'pipeline',
name: 'lint',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'yapf',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
},
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry config experimental.new-installer false',
'poetry install',
'poetry run yapf -dr ./ansiblelater',
],
},
{
name: 'flake8',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
},
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry install -E ansible-core',
'poetry run flake8 ./ansiblelater',
],
},
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineTest = {
kind: 'pipeline',
name: 'test',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'fetch',
image: 'python:3.10',
commands: [
'git fetch -tq',
],
},
PythonVersion(pyversion='3.8'),
PythonVersion(pyversion='3.9'),
PythonVersion(pyversion='3.10'),
{
name: 'codecov',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
CODECOV_TOKEN: { from_secret: 'codecov_token' },
},
commands: [
'pip install codecov -qq',
'codecov --required -X gcov',
],
depends_on: [
'python38-pytest',
'python39-pytest',
'python310-pytest',
],
},
],
depends_on: [
'lint',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineSecurity = {
kind: 'pipeline',
name: 'security',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'bandit',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
},
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry install -E ansible-core',
'poetry run bandit -r ./ansiblelater -x ./ansiblelater/test',
],
},
],
depends_on: [
'test',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineBuildPackage = {
kind: 'pipeline',
name: 'build-package',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'build',
image: 'python:3.10',
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry build',
],
},
{
name: 'checksum',
image: 'alpine',
commands: [
'cd dist/ && sha256sum * > ../sha256sum.txt',
],
},
{
name: 'changelog-generate',
image: 'thegeeklab/git-chglog',
commands: [
'git fetch -tq',
'git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}',
],
},
{
name: 'changelog-format',
image: 'thegeeklab/alpine-tools',
commands: [
'prettier CHANGELOG.md',
'prettier -w CHANGELOG.md',
],
},
{
name: 'publish-github',
image: 'plugins/github-release',
settings: {
overwrite: true,
api_key: { from_secret: 'github_token' },
files: ['dist/*', 'sha256sum.txt'],
title: '${DRONE_TAG}',
note: 'CHANGELOG.md',
},
when: {
ref: ['refs/tags/**'],
},
},
{
name: 'publish-pypi',
image: 'python:3.10',
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry publish -n',
],
environment: {
POETRY_HTTP_BASIC_PYPI_USERNAME: { from_secret: 'pypi_username' },
POETRY_HTTP_BASIC_PYPI_PASSWORD: { from_secret: 'pypi_password' },
},
when: {
ref: ['refs/tags/**'],
},
},
],
depends_on: [
'security',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineBuildContainer(arch='amd64') = {
local build = if arch == 'arm' then [{
name: 'build',
image: 'python:3.10-alpine',
commands: [
'apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo',
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry build',
],
environment: {
CARGO_NET_GIT_FETCH_WITH_CLI: true,
},
}] else [{
name: 'build',
image: 'python:3.10',
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry build',
],
}],
kind: 'pipeline',
name: 'build-container-' + arch,
platform: {
os: 'linux',
arch: arch,
},
steps: build + [
{
name: 'dryrun',
image: 'thegeeklab/drone-docker:19',
settings: {
dry_run: true,
dockerfile: 'docker/Dockerfile.' + arch,
repo: 'thegeeklab/${DRONE_REPO_NAME}',
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
},
depends_on: ['build'],
when: {
ref: ['refs/pull/**'],
},
},
{
name: 'publish-dockerhub',
image: 'thegeeklab/drone-docker:19',
settings: {
auto_tag: true,
auto_tag_suffix: arch,
dockerfile: 'docker/Dockerfile.' + arch,
repo: 'thegeeklab/${DRONE_REPO_NAME}',
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
},
when: {
ref: ['refs/heads/main', 'refs/tags/**'],
},
depends_on: ['dryrun'],
},
{
name: 'publish-quay',
image: 'thegeeklab/drone-docker:19',
settings: {
auto_tag: true,
auto_tag_suffix: arch,
dockerfile: 'docker/Dockerfile.' + arch,
registry: 'quay.io',
repo: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
username: { from_secret: 'quay_username' },
password: { from_secret: 'quay_password' },
},
when: {
ref: ['refs/heads/main', 'refs/tags/**'],
},
depends_on: ['dryrun'],
},
],
depends_on: [
'security',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineDocs = {
kind: 'pipeline',
name: 'docs',
platform: {
os: 'linux',
arch: 'amd64',
},
concurrency: {
limit: 1,
},
steps: [
{
name: 'assets',
image: 'thegeeklab/alpine-tools',
commands: [
'make doc',
],
},
{
name: 'markdownlint',
image: 'thegeeklab/markdownlint-cli',
commands: [
"markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'",
],
},
{
name: 'spellcheck',
image: 'thegeeklab/alpine-tools',
commands: [
"spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions",
],
environment: {
FORCE_COLOR: true,
NPM_CONFIG_LOGLEVEL: 'error',
},
},
{
name: 'testbuild',
image: 'thegeeklab/hugo:0.97.3',
commands: [
'hugo --panicOnWarning -s docs/ -b http://localhost:8000/',
],
},
{
name: 'link-validation',
image: 'thegeeklab/link-validator',
commands: [
'link-validator --nice --external --skip-file .linkcheckignore',
],
environment: {
LINK_VALIDATOR_BASE_DIR: 'docs/public',
},
},
{
name: 'build',
image: 'thegeeklab/hugo:0.97.3',
commands: [
'hugo --panicOnWarning -s docs/',
],
},
{
name: 'beautify',
image: 'thegeeklab/alpine-tools',
commands: [
"html-beautify -r -f 'docs/public/**/*.html'",
],
environment: {
FORCE_COLOR: true,
NPM_CONFIG_LOGLEVEL: 'error',
},
},
{
name: 'publish',
image: 'plugins/s3-sync',
settings: {
access_key: { from_secret: 's3_access_key' },
bucket: 'geekdocs',
delete: true,
endpoint: 'https://sp.rknet.org',
path_style: true,
secret_key: { from_secret: 's3_secret_access_key' },
source: 'docs/public/',
strip_prefix: 'docs/public/',
target: '/${DRONE_REPO_NAME}',
},
when: {
ref: ['refs/heads/main', 'refs/tags/**'],
},
},
],
depends_on: [
'build-package',
'build-container-amd64',
'build-container-arm64',
'build-container-arm',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineNotifications = {
kind: 'pipeline',
name: 'notifications',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
image: 'plugins/manifest',
name: 'manifest-dockerhub',
settings: {
ignore_missing: true,
auto_tag: true,
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
spec: 'docker/manifest.tmpl',
},
when: {
status: ['success'],
},
},
{
image: 'plugins/manifest',
name: 'manifest-quay',
settings: {
ignore_missing: true,
auto_tag: true,
username: { from_secret: 'quay_username' },
password: { from_secret: 'quay_password' },
spec: 'docker/manifest-quay.tmpl',
},
when: {
status: ['success'],
},
},
{
name: 'pushrm-dockerhub',
pull: 'always',
image: 'chko/docker-pushrm:1',
environment: {
DOCKER_PASS: {
from_secret: 'docker_password',
},
DOCKER_USER: {
from_secret: 'docker_username',
},
PUSHRM_FILE: 'README.md',
PUSHRM_SHORT: 'Another best practice scanner for Ansible roles and playbooks',
PUSHRM_TARGET: 'thegeeklab/${DRONE_REPO_NAME}',
},
when: {
status: ['success'],
},
},
{
name: 'pushrm-quay',
pull: 'always',
image: 'chko/docker-pushrm:1',
environment: {
APIKEY__QUAY_IO: {
from_secret: 'quay_token',
},
PUSHRM_FILE: 'README.md',
PUSHRM_TARGET: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
},
when: {
status: ['success'],
},
},
{
name: 'matrix',
image: 'thegeeklab/drone-matrix',
settings: {
homeserver: { from_secret: 'matrix_homeserver' },
roomid: { from_secret: 'matrix_roomid' },
template: 'Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}',
username: { from_secret: 'matrix_username' },
password: { from_secret: 'matrix_password' },
},
when: {
status: ['success', 'failure'],
},
},
],
depends_on: [
'docs',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**'],
status: ['success', 'failure'],
},
};
[
PipelineLint,
PipelineTest,
PipelineSecurity,
PipelineBuildPackage,
PipelineBuildContainer(arch='amd64'),
PipelineBuildContainer(arch='arm64'),
PipelineBuildContainer(arch='arm'),
PipelineDocs,
PipelineNotifications,
]

635
.drone.yml Normal file
View File

@ -0,0 +1,635 @@
---
kind: pipeline
name: lint
platform:
os: linux
arch: amd64
steps:
- name: yapf
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install
- poetry run yapf -dr ./ansiblelater
environment:
PY_COLORS: 1
- name: flake8
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run flake8 ./ansiblelater
environment:
PY_COLORS: 1
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
---
kind: pipeline
name: test
platform:
os: linux
arch: amd64
steps:
- name: fetch
image: python:3.10
commands:
- git fetch -tq
- name: python38-pytest
image: python:3.8
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install -E ansible-core
- poetry run pytest
- poetry version
- poetry run ansible-later --help
environment:
PY_COLORS: 1
depends_on:
- fetch
- name: python39-pytest
image: python:3.9
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install -E ansible-core
- poetry run pytest
- poetry version
- poetry run ansible-later --help
environment:
PY_COLORS: 1
depends_on:
- fetch
- name: python310-pytest
image: python:3.10
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install -E ansible-core
- poetry run pytest
- poetry version
- poetry run ansible-later --help
environment:
PY_COLORS: 1
depends_on:
- fetch
- name: codecov
image: python:3.10
commands:
- pip install codecov -qq
- codecov --required -X gcov
environment:
CODECOV_TOKEN:
from_secret: codecov_token
PY_COLORS: 1
depends_on:
- python38-pytest
- python39-pytest
- python310-pytest
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- lint
---
kind: pipeline
name: security
platform:
os: linux
arch: amd64
steps:
- name: bandit
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run bandit -r ./ansiblelater -x ./ansiblelater/test
environment:
PY_COLORS: 1
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- test
---
kind: pipeline
name: build-package
platform:
os: linux
arch: amd64
steps:
- name: build
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: checksum
image: alpine
commands:
- cd dist/ && sha256sum * > ../sha256sum.txt
- name: changelog-generate
image: thegeeklab/git-chglog
commands:
- git fetch -tq
- git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}
- name: changelog-format
image: thegeeklab/alpine-tools
commands:
- prettier CHANGELOG.md
- prettier -w CHANGELOG.md
- name: publish-github
image: plugins/github-release
settings:
api_key:
from_secret: github_token
files:
- dist/*
- sha256sum.txt
note: CHANGELOG.md
overwrite: true
title: ${DRONE_TAG}
when:
ref:
- refs/tags/**
- name: publish-pypi
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry publish -n
environment:
POETRY_HTTP_BASIC_PYPI_PASSWORD:
from_secret: pypi_password
POETRY_HTTP_BASIC_PYPI_USERNAME:
from_secret: pypi_username
when:
ref:
- refs/tags/**
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: build-container-amd64
platform:
os: linux
arch: amd64
steps:
- name: build
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: dryrun
image: thegeeklab/drone-docker:19
settings:
dockerfile: docker/Dockerfile.amd64
dry_run: true
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/pull/**
depends_on:
- build
- name: publish-dockerhub
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: amd64
dockerfile: docker/Dockerfile.amd64
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
- name: publish-quay
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: amd64
dockerfile: docker/Dockerfile.amd64
password:
from_secret: quay_password
registry: quay.io
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: quay_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: build-container-arm64
platform:
os: linux
arch: arm64
steps:
- name: build
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: dryrun
image: thegeeklab/drone-docker:19
settings:
dockerfile: docker/Dockerfile.arm64
dry_run: true
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/pull/**
depends_on:
- build
- name: publish-dockerhub
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm64
dockerfile: docker/Dockerfile.arm64
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
- name: publish-quay
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm64
dockerfile: docker/Dockerfile.arm64
password:
from_secret: quay_password
registry: quay.io
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: quay_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: build-container-arm
platform:
os: linux
arch: arm
steps:
- name: build
image: python:3.10-alpine
commands:
- apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
environment:
CARGO_NET_GIT_FETCH_WITH_CLI: true
- name: dryrun
image: thegeeklab/drone-docker:19
settings:
dockerfile: docker/Dockerfile.arm
dry_run: true
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/pull/**
depends_on:
- build
- name: publish-dockerhub
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm
dockerfile: docker/Dockerfile.arm
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
- name: publish-quay
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm
dockerfile: docker/Dockerfile.arm
password:
from_secret: quay_password
registry: quay.io
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: quay_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: docs
platform:
os: linux
arch: amd64
concurrency:
limit: 1
steps:
- name: assets
image: thegeeklab/alpine-tools
commands:
- make doc
- name: markdownlint
image: thegeeklab/markdownlint-cli
commands:
- markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'
- name: spellcheck
image: thegeeklab/alpine-tools
commands:
- spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions
environment:
FORCE_COLOR: true
NPM_CONFIG_LOGLEVEL: error
- name: testbuild
image: thegeeklab/hugo:0.97.3
commands:
- hugo --panicOnWarning -s docs/ -b http://localhost:8000/
- name: link-validation
image: thegeeklab/link-validator
commands:
- link-validator --nice --external --skip-file .linkcheckignore
environment:
LINK_VALIDATOR_BASE_DIR: docs/public
- name: build
image: thegeeklab/hugo:0.97.3
commands:
- hugo --panicOnWarning -s docs/
- name: beautify
image: thegeeklab/alpine-tools
commands:
- html-beautify -r -f 'docs/public/**/*.html'
environment:
FORCE_COLOR: true
NPM_CONFIG_LOGLEVEL: error
- name: publish
image: plugins/s3-sync
settings:
access_key:
from_secret: s3_access_key
bucket: geekdocs
delete: true
endpoint: https://sp.rknet.org
path_style: true
secret_key:
from_secret: s3_secret_access_key
source: docs/public/
strip_prefix: docs/public/
target: /${DRONE_REPO_NAME}
when:
ref:
- refs/heads/main
- refs/tags/**
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- build-package
- build-container-amd64
- build-container-arm64
- build-container-arm
---
kind: pipeline
name: notifications
platform:
os: linux
arch: amd64
steps:
- name: manifest-dockerhub
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
password:
from_secret: docker_password
spec: docker/manifest.tmpl
username:
from_secret: docker_username
when:
status:
- success
- name: manifest-quay
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
password:
from_secret: quay_password
spec: docker/manifest-quay.tmpl
username:
from_secret: quay_username
when:
status:
- success
- name: pushrm-dockerhub
pull: always
image: chko/docker-pushrm:1
environment:
DOCKER_PASS:
from_secret: docker_password
DOCKER_USER:
from_secret: docker_username
PUSHRM_FILE: README.md
PUSHRM_SHORT: Another best practice scanner for Ansible roles and playbooks
PUSHRM_TARGET: thegeeklab/${DRONE_REPO_NAME}
when:
status:
- success
- name: pushrm-quay
pull: always
image: chko/docker-pushrm:1
environment:
APIKEY__QUAY_IO:
from_secret: quay_token
PUSHRM_FILE: README.md
PUSHRM_TARGET: quay.io/thegeeklab/${DRONE_REPO_NAME}
when:
status:
- success
- name: matrix
image: thegeeklab/drone-matrix
settings:
homeserver:
from_secret: matrix_homeserver
password:
from_secret: matrix_password
roomid:
from_secret: matrix_roomid
template: "Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}"
username:
from_secret: matrix_username
when:
status:
- success
- failure
trigger:
ref:
- refs/heads/main
- refs/tags/**
status:
- success
- failure
depends_on:
- docs
---
kind: signature
hmac: 4bb6524bf4d0f793f53a9f7d97a641e3db52efab251d8f48bbd7bbccf03c431f
...

View File

@ -1,6 +1,7 @@
repository: repository:
name: ansible-later name: ansible-later
description: Another best practice scanner for Ansible roles and playbooks description: Another best practice scanner for Ansible roles and playbooks
homepage: https://ansible-later.geekdocs.de
topics: ansible, ansible-later, ansible-review, best practice topics: ansible, ansible-later, ansible-review, best practice
private: false private: false
@ -51,11 +52,7 @@ branches:
required_status_checks: required_status_checks:
strict: false strict: false
contexts: contexts:
- ci/woodpecker/pr/lint - continuous-integration/drone/pr
- ci/woodpecker/pr/test enforce_admins: true
- ci/woodpecker/pr/build-package
- ci/woodpecker/pr/build-container
- ci/woodpecker/pr/docs
enforce_admins: false
required_linear_history: true required_linear_history: true
restrictions: null restrictions: null

1
.gitignore vendored
View File

@ -110,4 +110,3 @@ resources/_gen/
# Misc # Misc
CHANGELOG.md CHANGELOG.md
.ruff_cache

View File

@ -1,47 +0,0 @@
---
version: "1.1"
versioning:
update-major: []
update-minor: [feat]
update-patch: [fix, perf, refactor, chore, test, ci, docs]
tag:
pattern: "v%d.%d.%d"
release-notes:
sections:
- name: Features
commit-types: [feat]
section-type: commits
- name: Bug Fixes
commit-types: [fix]
section-type: commits
- name: Performance Improvements
commit-types: [perf]
section-type: commits
- name: Code Refactoring
commit-types: [refactor]
section-type: commits
- name: Others
commit-types: [chore]
section-type: commits
- name: Testing
commit-types: [test]
section-type: commits
- name: CI Pipeline
commit-types: [ci]
section-type: commits
- name: Documentation
commit-types: [docs]
section-type: commits
- name: Breaking Changes
section-type: breaking-changes
commit-message:
footer:
issue:
key: issue
add-value-prefix: "#"
issue:
regex: "#?[0-9]+"

0
.linkcheckignore Normal file
View File

View File

@ -1 +0,0 @@
https://hub.docker.com/r/thegeeklab/*

View File

@ -1,10 +0,0 @@
---
- id: ansible-later
name: ansible-later
description: Run ansible-later, a best-practice scanner for Ansible.
entry: ansible-later
language: python
pass_filenames: False
always_run: True
additional_dependencies:
- .[ansible-core]

View File

@ -1,2 +1,3 @@
.drone.yml
*.tpl.md *.tpl.md
LICENSE LICENSE

View File

@ -1,82 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: build
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: security-build
image: quay.io/thegeeklab/wp-docker-buildx:5
depends_on: [build]
settings:
containerfile: Containerfile.multiarch
output: type=oci,dest=oci/${CI_REPO_NAME},tar=false
repo: ${CI_REPO}
- name: security-scan
image: docker.io/aquasec/trivy
depends_on: [security-build]
commands:
- trivy -v
- trivy image --input oci/${CI_REPO_NAME}
environment:
TRIVY_EXIT_CODE: "1"
TRIVY_IGNORE_UNFIXED: "true"
TRIVY_NO_PROGRESS: "true"
TRIVY_SEVERITY: HIGH,CRITICAL
TRIVY_TIMEOUT: 1m
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2
- name: publish-dockerhub
image: quay.io/thegeeklab/wp-docker-buildx:5
depends_on: [security-scan]
settings:
auto_tag: true
containerfile: Containerfile.multiarch
password:
from_secret: docker_password
platforms:
- linux/amd64
- linux/arm64
provenance: false
repo: ${CI_REPO}
username:
from_secret: docker_username
when:
- event: [tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
- name: publish-quay
image: quay.io/thegeeklab/wp-docker-buildx:5
depends_on: security-scan
settings:
auto_tag: true
containerfile: Containerfile.multiarch
password:
from_secret: quay_password
platforms:
- linux/amd64
- linux/arm64
provenance: false
registry: quay.io
repo: quay.io/${CI_REPO}
username:
from_secret: quay_username
when:
- event: [tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
depends_on:
- lint
- test

View File

@ -1,56 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: build
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: checksum
image: quay.io/thegeeklab/alpine-tools
commands:
- cd dist/ && sha256sum * > ../sha256sum.txt
- name: changelog
image: quay.io/thegeeklab/git-sv
commands:
- git sv current-version
- git sv release-notes -t ${CI_COMMIT_TAG:-next} -o CHANGELOG.md
- cat CHANGELOG.md
- name: publish-github
image: docker.io/plugins/github-release
settings:
api_key:
from_secret: github_token
files:
- dist/*
- sha256sum.txt
note: CHANGELOG.md
overwrite: true
title: ${CI_COMMIT_TAG}
when:
- event: [tag]
- name: publish-pypi
image: docker.io/library/python:3.12
environment:
POETRY_HTTP_BASIC_PYPI_PASSWORD:
from_secret: pypi_password
POETRY_HTTP_BASIC_PYPI_USERNAME:
from_secret: pypi_username
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry publish -n
when:
- event: [tag]
depends_on:
- lint
- test

View File

@ -1,101 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: assets
image: quay.io/thegeeklab/alpine-tools
commands:
- make doc
- name: markdownlint
image: quay.io/thegeeklab/markdownlint-cli
depends_on: [assets]
commands:
- markdownlint 'README.md' 'CONTRIBUTING.md'
- name: spellcheck
image: quay.io/thegeeklab/alpine-tools
depends_on: [assets]
commands:
- spellchecker --files 'docs/**/*.md' 'README.md' 'CONTRIBUTING.md' -d .dictionary -p spell indefinite-article syntax-urls
environment:
FORCE_COLOR: "true"
- name: link-validation
image: docker.io/lycheeverse/lychee
depends_on: [assets]
commands:
- lychee --no-progress --format detailed docs/content README.md
- name: build
image: quay.io/thegeeklab/hugo:0.136.5
depends_on: [link-validation]
commands:
- hugo --panicOnWarning -s docs/
- name: beautify
image: quay.io/thegeeklab/alpine-tools
depends_on: [build]
commands:
- html-beautify -r -f 'docs/public/**/*.html'
- name: publish
image: quay.io/thegeeklab/wp-s3-action
depends_on: [beautify]
settings:
access_key:
from_secret: s3_access_key
bucket: geekdocs
delete: true
endpoint:
from_secret: s3_endpoint
path_style: true
secret_key:
from_secret: s3_secret_access_key
source: docs/public/
strip_prefix: docs/public/
target: /${CI_REPO_NAME}
when:
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
status: [success, failure]
- name: pushrm-dockerhub
image: docker.io/chko/docker-pushrm:1
depends_on: [publish]
environment:
DOCKER_PASS:
from_secret: docker_password
DOCKER_USER:
from_secret: docker_username
PUSHRM_FILE: README.md
PUSHRM_SHORT: Another best practice scanner for Ansible roles and playbooks
PUSHRM_TARGET: ${CI_REPO}
when:
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
status: [success]
- name: pushrm-quay
image: docker.io/chko/docker-pushrm:1
depends_on: [publish]
environment:
APIKEY__QUAY_IO:
from_secret: quay_token
PUSHRM_FILE: README.md
PUSHRM_TARGET: quay.io/${CI_REPO}
when:
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
status: [success]
depends_on:
- build-package
- build-container

View File

@ -1,27 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: check-format
image: docker.io/library/python:3.12
depends_on: []
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install
- poetry run ruff format --check --diff ./${CI_REPO_NAME//-/}
environment:
PY_COLORS: "1"
- name: check-coding
image: docker.io/library/python:3.12
depends_on: []
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run ruff check ./${CI_REPO_NAME//-/}
environment:
PY_COLORS: "1"

View File

@ -1,26 +0,0 @@
---
when:
- event: [tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
runs_on: [success, failure]
steps:
- name: matrix
image: quay.io/thegeeklab/wp-matrix
settings:
homeserver:
from_secret: matrix_homeserver
room_id:
from_secret: matrix_room_id
user_id:
from_secret: matrix_user_id
access_token:
from_secret: matrix_access_token
when:
- status: [success, failure]
depends_on:
- docs

View File

@ -1,35 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
variables:
- &pytest_base
depends_on: []
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run pytest --cov-append
- poetry version
- poetry run ${CI_REPO_NAME} --help
environment:
PY_COLORS: "1"
steps:
- name: python-312
image: docker.io/library/python:3.12
<<: *pytest_base
- name: python-311
image: docker.io/library/python:3.11
<<: *pytest_base
- name: python-310
image: docker.io/library/python:3.10
<<: *pytest_base
- name: python-39
image: docker.io/library/python:3.9
<<: *pytest_base

View File

@ -3,7 +3,7 @@
## Security ## Security
If you think you have found a **security issue**, please do not mention it in this repository. If you think you have found a **security issue**, please do not mention it in this repository.
Instead, send an email to `security@thegeeklab.de` with as many details as possible so it can be handled confidential. Instead, send an email to security@thegeeklab.de with as many details as possible so it can be handled confidential.
## Bug Reports and Feature Requests ## Bug Reports and Feature Requests

View File

@ -1,5 +1,5 @@
# renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc # renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc
THEME_VERSION := v1.2.1 THEME_VERSION := v0.33.1
THEME := hugo-geekdoc THEME := hugo-geekdoc
BASEDIR := docs BASEDIR := docs
THEMEDIR := $(BASEDIR)/themes THEMEDIR := $(BASEDIR)/themes

View File

@ -2,22 +2,33 @@
Another best practice scanner for Ansible roles and playbooks Another best practice scanner for Ansible roles and playbooks
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-later/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-later) [![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-later?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-later)
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later) [![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later)
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later) [![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later)
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/) [![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/) [![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/) [![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![Codecov](https://img.shields.io/codecov/c/github/thegeeklab/ansible-later)](https://codecov.io/gh/thegeeklab/ansible-later)
[![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors) [![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors)
[![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later) [![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later)
[![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE) [![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE)
> **Discontinued:** This project is no longer maintained. Please use [ansible-lint](https://github.com/ansible-community/ansible-lint) instead.
ansible-later is a best practice scanner and linting tool. In most cases, if you write Ansible roles in a team, it helps to have a coding or best practice guideline in place. This will make Ansible roles more readable for all maintainers and can reduce the troubleshooting time. While ansible-later aims to be a fast and easy to use linting tool for your Ansible resources, it might not be that feature completed as required in some situations. If you need a more in-depth analysis you can take a look at [ansible-lint](https://github.com/ansible-community/ansible-lint). ansible-later is a best practice scanner and linting tool. In most cases, if you write Ansible roles in a team, it helps to have a coding or best practice guideline in place. This will make Ansible roles more readable for all maintainers and can reduce the troubleshooting time. While ansible-later aims to be a fast and easy to use linting tool for your Ansible resources, it might not be that feature completed as required in some situations. If you need a more in-depth analysis you can take a look at [ansible-lint](https://github.com/ansible-community/ansible-lint).
ansible-later does **not** ensure that your role will work as expected. For deployment tests you can use other tools like [molecule](https://github.com/ansible/molecule). ansible-later does **not** ensure that your role will work as expected. For deployment tests you can use other tools like [molecule](https://github.com/ansible/molecule).
You can find the full documentation at [https://ansible-later.geekdocs.de](https://ansible-later.geekdocs.de/).
## Community
<!-- prettier-ignore-start -->
<!-- spellchecker-disable -->
- [GitHub Action](https://github.com/patrickjahns/ansible-later-action) by [@patrickjahns](https://github.com/patrickjahns)
<!-- spellchecker-enable -->
<!-- prettier-ignore-end -->
## Contributors ## Contributors
Special thanks to all [contributors](https://github.com/thegeeklab/ansible-later/graphs/contributors). If you would like to contribute, Special thanks to all [contributors](https://github.com/thegeeklab/ansible-later/graphs/contributors). If you would like to contribute,

View File

@ -5,10 +5,12 @@ import argparse
import multiprocessing import multiprocessing
import sys import sys
from ansiblelater import LOG, __version__, logger from ansiblelater import LOG
from ansiblelater import __version__
from ansiblelater import logger
from ansiblelater.candidate import Candidate from ansiblelater.candidate import Candidate
from ansiblelater.rule import SingleRules
from ansiblelater.settings import Settings from ansiblelater.settings import Settings
from ansiblelater.standard import SingleStandards
def main(): def main():
@ -22,33 +24,33 @@ def main():
parser.add_argument( parser.add_argument(
"-r", "-r",
"--rules-dir", "--rules-dir",
dest="rules.dir", dest="rules.standards",
metavar="DIR", metavar="RULES",
action="append", action="append",
help="directory of rules", help="directory of standard rules"
) )
parser.add_argument( parser.add_argument(
"-B", "-B",
"--no-builtin", "--no-buildin",
dest="rules.builtin", dest="rules.buildin",
action="store_false", action="store_false",
help="disables built-in rules", help="disables build-in standard rules"
) )
parser.add_argument( parser.add_argument(
"-i", "-s",
"--include-rules", "--standards",
dest="rules.include_filter", dest="rules.filter",
metavar="TAGS", metavar="FILTER",
action="append", action="append",
help="limit rules to given id/tags", help="limit standards to given ID's"
) )
parser.add_argument( parser.add_argument(
"-x", "-x",
"--exclude-rules", "--exclude-standards",
dest="rules.exclude_filter", dest="rules.exclude_filter",
metavar="TAGS", metavar="EXCLUDE_FILTER",
action="append", action="append",
help="exclude rules by given it/tags", help="exclude standards by given ID's"
) )
parser.add_argument( parser.add_argument(
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level" "-v", dest="logging.level", action="append_const", const=-1, help="increase log level"
@ -57,7 +59,9 @@ def main():
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level" "-q", dest="logging.level", action="append_const", const=1, help="decrease log level"
) )
parser.add_argument("rules.files", nargs="*") parser.add_argument("rules.files", nargs="*")
parser.add_argument("-V", "--version", action="version", version=f"%(prog)s {__version__}") parser.add_argument(
"-V", "--version", action="version", version="%(prog)s {}".format(__version__)
)
args = parser.parse_args().__dict__ args = parser.parse_args().__dict__
@ -65,7 +69,7 @@ def main():
config = settings.config config = settings.config
logger.update_logger(LOG, config["logging"]["level"], config["logging"]["json"]) logger.update_logger(LOG, config["logging"]["level"], config["logging"]["json"])
SingleRules(config["rules"]["dir"]) SingleStandards(config["rules"]["standards"]).rules
workers = max(multiprocessing.cpu_count() - 2, 2) workers = max(multiprocessing.cpu_count() - 2, 2)
p = multiprocessing.Pool(workers) p = multiprocessing.Pool(workers)
@ -74,22 +78,25 @@ def main():
candidate = Candidate.classify(filename, settings) candidate = Candidate.classify(filename, settings)
if candidate: if candidate:
if candidate.binary: if candidate.binary:
LOG.info(f"Not reviewing binary file {filename}") LOG.info("Not reviewing binary file {name}".format(name=filename))
continue continue
if candidate.vault: if candidate.vault:
LOG.info(f"Not reviewing vault file {filename}") LOG.info("Not reviewing vault file {name}".format(name=filename))
continue continue
else:
LOG.info(f"Reviewing all of {candidate}") LOG.info("Reviewing all of {candidate}".format(candidate=candidate))
tasks.append(candidate) tasks.append(candidate)
else: else:
LOG.info(f"Couldn't classify file {filename}") LOG.info("Couldn't classify file {name}".format(name=filename))
errors = sum(p.map(_review_wrapper, tasks)) errors = (sum(p.map(_review_wrapper, tasks)))
p.close() p.close()
p.join() p.join()
return_code = 1 if errors != 0 else 0 if not errors == 0:
return_code = 1
else:
return_code = 0
sys.exit(return_code) sys.exit(return_code)

View File

@ -3,15 +3,19 @@
import codecs import codecs
import copy import copy
import os import os
import re
from distutils.version import LooseVersion
from ansible.plugins.loader import module_loader from ansible.plugins.loader import module_loader
from ansiblelater import LOG from ansiblelater import LOG
from ansiblelater import utils
from ansiblelater.logger import flag_extra from ansiblelater.logger import flag_extra
from ansiblelater.rule import RuleBase, SingleRules from ansiblelater.standard import SingleStandards
from ansiblelater.standard import StandardBase
class Candidate: class Candidate(object):
""" """
Meta object for all files which later has to process. Meta object for all files which later has to process.
@ -19,12 +23,12 @@ class Candidate:
bundled with necessary meta informations for rule processing. bundled with necessary meta informations for rule processing.
""" """
def __init__(self, filename, settings={}, rules=[]): # noqa def __init__(self, filename, settings={}, standards=[]):
self.path = filename self.path = filename
self.binary = False self.binary = False
self.vault = False self.vault = False
self.filemeta = type(self).__name__.lower() self.filetype = type(self).__name__.lower()
self.kind = type(self).__name__.lower() self.expected_version = True
self.faulty = False self.faulty = False
self.config = settings.config self.config = settings.config
self.settings = settings self.settings = settings
@ -36,58 +40,139 @@ class Candidate:
except UnicodeDecodeError: except UnicodeDecodeError:
self.binary = True self.binary = True
def _filter_rules(self): def _get_version(self):
target_rules = [] path = self.path
includes = self.config["rules"]["include_filter"] version = None
config_version = self.config["rules"]["version"].strip()
if config_version:
version_config_re = re.compile(r"([\d.]+)")
match = version_config_re.match(config_version)
if match:
version = match.group(1)
if not self.binary:
if isinstance(self, RoleFile):
parentdir = os.path.dirname(os.path.abspath(self.path))
while parentdir != os.path.dirname(parentdir):
meta_file = os.path.join(parentdir, "meta", "main.yml")
if os.path.exists(meta_file):
path = meta_file
break
parentdir = os.path.dirname(parentdir)
version_file_re = re.compile(r"^# Standards:\s*([\d.]+)")
with codecs.open(path, mode="rb", encoding="utf-8") as f:
for line in f:
match = version_file_re.match(line)
if match:
version = match.group(1)
if not version:
version = utils.standards_latest(self.standards)
if self.expected_version:
if isinstance(self, RoleFile):
LOG.warning(
"{name} {path} is in a role that contains a meta/main.yml without a "
"declared standards version. "
"Using latest standards version {version}".format(
name=type(self).__name__, path=self.path, version=version
)
)
else:
LOG.warning(
"{name} {path} does not present standards version. "
"Using latest standards version {version}".format(
name=type(self).__name__, path=self.path, version=version
)
)
else:
LOG.info(
"{name} {path} declares standards version {version}".format(
name=type(self).__name__, path=self.path, version=version
)
)
return version
def _filter_standards(self):
target_standards = []
includes = self.config["rules"]["filter"]
excludes = self.config["rules"]["exclude_filter"] excludes = self.config["rules"]["exclude_filter"]
if len(includes) == 0: if len(includes) == 0:
includes = [s.rid for s in self.rules] includes = [s.sid for s in self.standards]
for rule in self.rules: for standard in self.standards:
if rule.rid in includes and rule.rid not in excludes: if standard.sid in includes and standard.sid not in excludes:
target_rules.append(rule) target_standards.append(standard)
return target_rules return target_standards
def review(self): def review(self, lines=None):
errors = 0 errors = 0
self.rules = SingleRules(self.config["rules"]["dir"]).rules self.standards = SingleStandards(self.config["rules"]["standards"]).rules
self.version = self._get_version()
for rule in self._filter_rules(): for standard in self._filter_standards():
if self.kind not in rule.types: if type(self).__name__.lower() not in standard.types:
continue continue
result = rule.check(self, self.config) result = standard.check(self, self.config)
if not result: if not result:
LOG.error(f"rule '{rule.rid}' returns an empty result object. Check failed!") LOG.error(
"Standard '{id}' returns an empty result object. Check failed!".format(
id=standard.sid
)
)
continue continue
labels = { labels = {
"tag": "review", "tag": "review",
"rule": rule.description, "standard": standard.description,
"file": self.path, "file": self.path,
"passed": True, "passed": True
} }
if rule.rid and rule.rid.strip(): if standard.sid and standard.sid.strip():
labels["rid"] = rule.rid labels["sid"] = standard.sid
for err in result.errors: for err in result.errors:
err_labels = copy.copy(labels) err_labels = copy.copy(labels)
err_labels["passed"] = False err_labels["passed"] = False
if isinstance(err, StandardBase.Error):
rid = self._format_id(rule.rid)
path = self.path
description = rule.description
if isinstance(err, RuleBase.Error):
err_labels.update(err.to_dict()) err_labels.update(err.to_dict())
msg = f"{rid}rule '{description}' not met:\n{path}:{err}" if not standard.version:
LOG.warning(
"{sid}Best practice '{description}' not met:\n{path}:{error}".format(
sid=self._format_id(standard.sid),
description=standard.description,
path=self.path,
error=err
),
extra=flag_extra(err_labels)
)
elif LooseVersion(standard.version) > LooseVersion(self.version):
LOG.warning(
"{sid}Future standard '{description}' not met:\n{path}:{error}".format(
sid=self._format_id(standard.sid),
description=standard.description,
path=self.path,
error=err
),
extra=flag_extra(err_labels)
)
else:
msg = "{sid}Standard '{description}' not met:\n{path}:{error}".format(
sid=self._format_id(standard.sid),
description=standard.description,
path=self.path,
error=err
)
if rule.rid not in self.config["rules"]["warning_filter"]: if standard.sid not in self.config["rules"]["warning_filter"]:
LOG.error(msg, extra=flag_extra(err_labels)) LOG.error(msg, extra=flag_extra(err_labels))
errors = errors + 1 errors = errors + 1
else: else:
@ -96,67 +181,63 @@ class Candidate:
return errors return errors
@staticmethod @staticmethod
def classify(filename, settings={}, rules=[]): # noqa def classify(filename, settings={}, standards=[]):
parentdir = os.path.basename(os.path.dirname(filename)) parentdir = os.path.basename(os.path.dirname(filename))
basename = os.path.basename(filename) basename = os.path.basename(filename)
ext = os.path.splitext(filename)[1][1:]
if parentdir in ["tasks"]: if parentdir in ["tasks"]:
return Task(filename, settings, rules) return Task(filename, settings, standards)
if parentdir in ["handlers"]: if parentdir in ["handlers"]:
return Handler(filename, settings, rules) return Handler(filename, settings, standards)
if parentdir in ["vars", "defaults"]: if parentdir in ["vars", "defaults"]:
return RoleVars(filename, settings, rules) return RoleVars(filename, settings, standards)
if "group_vars" in filename.split(os.sep): if "group_vars" in filename.split(os.sep):
return GroupVars(filename, settings, rules) return GroupVars(filename, settings, standards)
if "host_vars" in filename.split(os.sep): if "host_vars" in filename.split(os.sep):
return HostVars(filename, settings, rules) return HostVars(filename, settings, standards)
if parentdir in ["meta"] and "main" in basename: if parentdir in ["meta"] and "main" in basename:
return Meta(filename, settings, rules) return Meta(filename, settings, standards)
if parentdir in ["meta"] and "argument_specs" in basename: if parentdir in ["meta"] and "argument_specs" in basename:
return ArgumentSpecs(filename, settings, rules) return ArgumentSpecs(filename, settings, standards)
if parentdir in [ if (
"library", parentdir in ["library", "lookup_plugins", "callback_plugins", "filter_plugins"]
"lookup_plugins", or filename.endswith(".py")
"callback_plugins", ):
"filter_plugins", return Code(filename, settings, standards)
] or filename.endswith(".py"):
return Code(filename, settings, rules)
if basename == "inventory" or basename == "hosts" or parentdir in ["inventories"]: if basename == "inventory" or basename == "hosts" or parentdir in ["inventories"]:
return Inventory(filename, settings, rules) return Inventory(filename, settings, standards)
if "rolesfile" in basename or ("requirements" in basename and ext in ["yaml", "yml"]): if "rolesfile" in basename or "requirements" in basename:
return Rolesfile(filename, settings, rules) return Rolesfile(filename, settings, standards)
if "Makefile" in basename: if "Makefile" in basename:
return Makefile(filename, settings, rules) return Makefile(filename, settings, standards)
if "templates" in filename.split(os.sep) or basename.endswith(".j2"): if "templates" in filename.split(os.sep) or basename.endswith(".j2"):
return Template(filename, settings, rules) return Template(filename, settings, standards)
if "files" in filename.split(os.sep): if "files" in filename.split(os.sep):
return File(filename, settings, rules) return File(filename, settings, standards)
if basename.endswith(".yml") or basename.endswith(".yaml"): if basename.endswith(".yml") or basename.endswith(".yaml"):
return Playbook(filename, settings, rules) return Playbook(filename, settings, standards)
if "README" in basename: if "README" in basename:
return Doc(filename, settings, rules) return Doc(filename, settings, standards)
return None return None
def _format_id(self, rule_id): def _format_id(self, standard_id):
rid = rule_id.strip() if standard_id and standard_id.strip():
if rid: standard_id = "[{id}] ".format(id=standard_id.strip())
rule_id = f"[{rid}] "
return rule_id return standard_id
def __repr__(self): def __repr__(self): # noqa
return f"{self.kind} ({self.path})" return "{name} ({path})".format(name=type(self).__name__, path=self.path)
def __getitem__(self, item): def __getitem__(self, item): # noqa
return self.__dict__.get(item) return self.__dict__.get(item)
class RoleFile(Candidate): class RoleFile(Candidate):
"""Object classified as Ansible role file.""" """Object classified as Ansible role file."""
def __init__(self, filename, settings={}, rules=[]): # noqa def __init__(self, filename, settings={}, standards=[]):
super().__init__(filename, settings, rules) super(RoleFile, self).__init__(filename, settings, standards)
parentdir = os.path.dirname(os.path.abspath(filename)) parentdir = os.path.dirname(os.path.abspath(filename))
while parentdir != os.path.dirname(parentdir): while parentdir != os.path.dirname(parentdir):
@ -176,17 +257,17 @@ class Playbook(Candidate):
class Task(RoleFile): class Task(RoleFile):
"""Object classified as Ansible task file.""" """Object classified as Ansible task file."""
def __init__(self, filename, settings={}, rules=[]): # noqa def __init__(self, filename, settings={}, standards=[]):
super().__init__(filename, settings, rules) super(Task, self).__init__(filename, settings, standards)
self.filemeta = "tasks" self.filetype = "tasks"
class Handler(RoleFile): class Handler(RoleFile):
"""Object classified as Ansible handler file.""" """Object classified as Ansible handler file."""
def __init__(self, filename, settings={}, rules=[]): # noqa def __init__(self, filename, settings={}, standards=[]):
super().__init__(filename, settings, rules) super(Handler, self).__init__(filename, settings, standards)
self.filemeta = "handlers" self.filetype = "handlers"
class Vars(Candidate): class Vars(Candidate):
@ -195,7 +276,15 @@ class Vars(Candidate):
pass pass
class InventoryVars(Candidate): class Unversioned(Candidate):
"""Object classified as unversioned file."""
def __init__(self, filename, settings={}, standards=[]):
super(Unversioned, self).__init__(filename, settings, standards)
self.expected_version = False
class InventoryVars(Unversioned):
"""Object classified as Ansible inventory vars.""" """Object classified as Ansible inventory vars."""
pass pass
@ -231,13 +320,13 @@ class ArgumentSpecs(RoleFile):
pass pass
class Inventory(Candidate): class Inventory(Unversioned):
"""Object classified as Ansible inventory file.""" """Object classified as Ansible inventory file."""
pass pass
class Code(Candidate): class Code(Unversioned):
"""Object classified as code file.""" """Object classified as code file."""
pass pass
@ -249,13 +338,13 @@ class Template(RoleFile):
pass pass
class Doc(Candidate): class Doc(Unversioned):
"""Object classified as documentation file.""" """Object classified as documentation file."""
pass pass
class Makefile(Candidate): class Makefile(Unversioned):
"""Object classified as makefile.""" """Object classified as makefile."""
pass pass
@ -267,7 +356,7 @@ class File(RoleFile):
pass pass
class Rolesfile(Candidate): class Rolesfile(Unversioned):
"""Object classified as Ansible roles file.""" """Object classified as Ansible roles file."""
pass pass

View File

@ -8,14 +8,14 @@ class LaterError(Exception):
def __init__(self, msg, original): def __init__(self, msg, original):
"""Initialize new exception.""" """Initialize new exception."""
super().__init__(f"{msg}: {original}") super(LaterError, self).__init__("{msg}: {org}".format(msg=msg, org=original))
self.original = original self.original = original
class LaterAnsibleError(Exception): class LaterAnsibleError(Exception):
"""Wrapper for ansible syntax errors.""" """Wrapper for ansible syntax errors."""
def __init__(self, original): def __init__(self, msg, original):
lines = original.message.splitlines() lines = original.message.splitlines()
line_no = re.search("line(.*?),", lines[2]) line_no = re.search("line(.*?),", lines[2])

View File

@ -3,6 +3,7 @@
import logging import logging
import os import os
import sys import sys
from distutils.util import strtobool
import colorama import colorama
from pythonjsonlogger import jsonlogger from pythonjsonlogger import jsonlogger
@ -11,35 +12,12 @@ CONSOLE_FORMAT = "{}%(levelname)s:{} %(message)s"
JSON_FORMAT = "%(asctime)s %(levelname)s %(message)s" JSON_FORMAT = "%(asctime)s %(levelname)s %(message)s"
def strtobool(value):
"""Convert a string representation of truth to true or false."""
_map = {
"y": True,
"yes": True,
"t": True,
"true": True,
"on": True,
"1": True,
"n": False,
"no": False,
"f": False,
"false": False,
"off": False,
"0": False,
}
try:
return _map[str(value).lower()]
except KeyError as err:
raise ValueError(f'"{value}" is not a valid bool value') from err
def to_bool(string): def to_bool(string):
return bool(strtobool(str(string))) return bool(strtobool(str(string)))
def _should_do_markup(): def _should_do_markup():
py_colors = os.environ.get("PY_COLORS", None) py_colors = os.environ.get("PY_COLORS", None)
if py_colors is not None: if py_colors is not None:
return to_bool(py_colors) return to_bool(py_colors)
@ -52,7 +30,7 @@ colorama.init(autoreset=True, strip=(not _should_do_markup()))
def flag_extra(extra): def flag_extra(extra):
"""Ensure extra args are prefixed.""" """Ensure extra args are prefixed."""
flagged = {} flagged = dict()
if isinstance(extra, dict): if isinstance(extra, dict):
for key, value in extra.items(): for key, value in extra.items():
@ -61,7 +39,7 @@ def flag_extra(extra):
return flagged return flagged
class LogFilter: class LogFilter(object):
"""A custom log filter which excludes log messages above the logged level.""" """A custom log filter which excludes log messages above the logged level."""
def __init__(self, level): def __init__(self, level):
@ -82,8 +60,8 @@ class LogFilter:
class MultilineFormatter(logging.Formatter): class MultilineFormatter(logging.Formatter):
"""Logging Formatter to reset color after newline characters.""" """Logging Formatter to reset color after newline characters."""
def format(self, record): def format(self, record): # noqa
record.msg = record.msg.replace("\n", f"\n{colorama.Style.RESET_ALL}... ") record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
record.msg = record.msg + "\n" record.msg = record.msg + "\n"
return logging.Formatter.format(self, record) return logging.Formatter.format(self, record)
@ -91,7 +69,7 @@ class MultilineFormatter(logging.Formatter):
class MultilineJsonFormatter(jsonlogger.JsonFormatter): class MultilineJsonFormatter(jsonlogger.JsonFormatter):
"""Logging Formatter to remove newline characters.""" """Logging Formatter to remove newline characters."""
def format(self, record): def format(self, record): # noqa
record.msg = record.msg.replace("\n", " ") record.msg = record.msg.replace("\n", " ")
return jsonlogger.JsonFormatter.format(self, record) return jsonlogger.JsonFormatter.format(self, record)
@ -207,4 +185,4 @@ def color_text(color, msg):
""" """
msg = msg.format(colorama.Style.BRIGHT, colorama.Style.NORMAL) msg = msg.format(colorama.Style.BRIGHT, colorama.Style.NORMAL)
return f"{color}{msg}{colorama.Style.RESET_ALL}" return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckBecomeUser(RuleBase): class CheckBecomeUser(StandardBase):
rid = "ANS115"
sid = "ANSIBLE0015"
description = "Become should be combined with become_user" description = "Become should be combined with become_user"
helptext = "the task has `become` enabled but `become_user` is missing" helptext = "the task has `become` enabled but `become_user` is missing"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -14,7 +16,7 @@ class CheckBecomeUser(RuleBase):
if not errors: if not errors:
gen = (task for task in tasks if "become" in task) gen = (task for task in tasks if "become" in task)
for task in gen: for task in gen:
if task["become"] in true_value and "become_user" not in task: if task["become"] in true_value and "become_user" not in task.keys():
errors.append(self.Error(task["__line__"], self.helptext)) errors.append(self.Error(task["__line__"], self.helptext))
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,13 +1,15 @@
import re import re
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
from ansiblelater.utils import count_spaces from ansiblelater.utils import count_spaces
class CheckBracesSpaces(RuleBase): class CheckBracesSpaces(StandardBase):
rid = "ANS104"
sid = "ANSIBLE0004"
description = "YAML should use consistent number of spaces around variables" description = "YAML should use consistent number of spaces around variables"
helptext = "no suitable numbers of spaces (min: {min} max: {max})" helptext = "no suitable numbers of spaces (min: {min} max: {max})"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -39,7 +41,7 @@ class CheckBracesSpaces(RuleBase):
i, i,
self.helptext.format( self.helptext.format(
min=conf["min-spaces-inside"], max=conf["max-spaces-inside"] min=conf["min-spaces-inside"], max=conf["max-spaces-inside"]
), )
) )
) )
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -17,14 +17,15 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE. # THE SOFTWARE.
from ansiblelater.standard import StandardBase
from ansiblelater.rule import RuleBase
class CheckChangedInWhen(RuleBase): class CheckChangedInWhen(StandardBase):
rid = "ANS126"
sid = "ANSIBLE0026"
description = "Use handlers instead of `when: changed`" description = "Use handlers instead of `when: changed`"
helptext = "tasks using `when: result.changed` setting are effectively acting as a handler" helptext = "tasks using `when: result.changed` setting are effectively acting as a handler"
version = "0.2"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -34,7 +35,7 @@ class CheckChangedInWhen(RuleBase):
for task in tasks: for task in tasks:
when = None when = None
if task["__ansible_action_type__"] in ["task", "meta"]: if task["__ansible_action_type__"] == "task":
when = task.get("when") when = task.get("when")
if isinstance(when, str): if isinstance(when, str):
@ -52,16 +53,6 @@ class CheckChangedInWhen(RuleBase):
if not isinstance(item, str): if not isinstance(item, str):
return False return False
if not {"and", "or", "not"}.isdisjoint(item.split()):
return False
return any( return any(
changed in item changed in item for changed in [".changed", "|changed", '["changed"]', "['changed']"]
for changed in [
".changed",
"|changed",
'["changed"]',
"['changed']",
"is changed",
]
) )

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckCommandHasChanges(RuleBase): class CheckCommandHasChanges(StandardBase):
rid = "ANS111"
sid = "ANSIBLE0011"
description = "Commands should be idempotent" description = "Commands should be idempotent"
helptext = ( helptext = (
"commands should only read while using `changed_when` or try to be " "commands should only read while using `changed_when` or try to be "
"idempotent while using controls like `creates`, `removes` or `when`" "idempotent while using controls like `creates`, `removes` or `when`"
) )
version = "0.1"
types = ["playbook", "task"] types = ["playbook", "task"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -16,12 +18,11 @@ class CheckCommandHasChanges(RuleBase):
if not errors: if not errors:
for task in tasks: for task in tasks:
if task["action"]["__ansible_module__"] in commands and ( if task["action"]["__ansible_module__"] in commands:
"changed_when" not in task if (
and "when" not in task "changed_when" not in task and "when" not in task
and "when" not in task.get("__ansible_action_meta__", []) and "when" not in task.get("__ansible_action_meta__", [])
and "creates" not in task["action"] and "creates" not in task["action"] and "removes" not in task["action"]
and "removes" not in task["action"]
): ):
errors.append(self.Error(task["__line__"], self.helptext)) errors.append(self.Error(task["__line__"], self.helptext))

View File

@ -20,13 +20,15 @@
import os import os
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckCommandInsteadOfArgument(RuleBase): class CheckCommandInsteadOfArgument(StandardBase):
rid = "ANS117"
sid = "ANSIBLE0017"
description = "Commands should not be used in place of module arguments" description = "Commands should not be used in place of module arguments"
helptext = "{exec} used in place of file modules argument {arg}" helptext = "{exec} used in place of file modules argument {arg}"
version = "0.2"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -39,7 +41,7 @@ class CheckCommandInsteadOfArgument(RuleBase):
"ln": "state=link", "ln": "state=link",
"mkdir": "state=directory", "mkdir": "state=directory",
"rmdir": "state=absent", "rmdir": "state=absent",
"rm": "state=absent", "rm": "state=absent"
} }
if not errors: if not errors:
@ -49,14 +51,13 @@ class CheckCommandInsteadOfArgument(RuleBase):
executable = os.path.basename(first_cmd_arg) executable = os.path.basename(first_cmd_arg)
if ( if (
first_cmd_arg first_cmd_arg and executable in arguments
and executable in arguments
and task["action"].get("warn", True) and task["action"].get("warn", True)
): ):
errors.append( errors.append(
self.Error( self.Error(
task["__line__"], task["__line__"],
self.helptext.format(exec=executable, arg=arguments[executable]), self.helptext.format(exec=executable, arg=arguments[executable])
) )
) )

View File

@ -1,12 +1,14 @@
import os import os
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckCommandInsteadOfModule(RuleBase): class CheckCommandInsteadOfModule(StandardBase):
rid = "ANS108"
sid = "ANSIBLE0008"
description = "Commands should not be used in place of modules" description = "Commands should not be used in place of modules"
helptext = "{exec} command used in place of {module} module" helptext = "{exec} command used in place of {module} module"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -29,7 +31,7 @@ class CheckCommandInsteadOfModule(RuleBase):
"rsync": "synchronize", "rsync": "synchronize",
"supervisorctl": "supervisorctl", "supervisorctl": "supervisorctl",
"systemctl": "systemd", "systemctl": "systemd",
"sed": "template or lineinfile", "sed": "template or lineinfile"
} }
if not errors: if not errors:
@ -37,19 +39,14 @@ class CheckCommandInsteadOfModule(RuleBase):
if task["action"]["__ansible_module__"] in commands: if task["action"]["__ansible_module__"] in commands:
first_cmd_arg = self.get_first_cmd_arg(task) first_cmd_arg = self.get_first_cmd_arg(task)
executable = os.path.basename(first_cmd_arg) executable = os.path.basename(first_cmd_arg)
cmd = cmd = self.get_safe_cmd(task)
if ( if (
first_cmd_arg first_cmd_arg and executable in modules
and executable in modules and task["action"].get("warn", True) and "register" not in task
and task["action"].get("warn", True)
and "register" not in task
and not any(ch in cmd for ch in self.SHELL_PIPE_CHARS)
): ):
errors.append( errors.append(
self.Error( self.Error(
task["__line__"], task["__line__"],
self.helptext.format(exec=executable, module=modules[executable]), self.helptext.format(exec=executable, module=modules[executable])
) )
) )

View File

@ -1,13 +1,15 @@
import re import re
from ansiblelater.candidate import Template from ansiblelater.candidate import Template
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckCompareToEmptyString(RuleBase): class CheckCompareToEmptyString(StandardBase):
rid = "ANS112"
description = 'Don\'t compare to empty string ""' sid = "ANSIBLE0012"
helptext = "use `when: var` rather than `when: var !=` (or conversely `when: not var`)" description = "Don't compare to empty string \"\""
helptext = ("use `when: var` rather than `when: var !=` (or conversely `when: not var`)")
version = "0.1"
types = ["playbook", "task", "handler", "template"] types = ["playbook", "task", "handler", "template"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,13 +1,15 @@
import re import re
from ansiblelater.candidate import Template from ansiblelater.candidate import Template
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckCompareToLiteralBool(RuleBase): class CheckCompareToLiteralBool(StandardBase):
rid = "ANS113"
sid = "ANSIBLE0013"
description = "Don't compare to True or False" description = "Don't compare to True or False"
helptext = "use `when: var` rather than `when: var == True` (or conversely `when: not var`)" helptext = ("use `when: var` rather than `when: var == True` (or conversely `when: not var`)")
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckDeprecated(RuleBase): class CheckDeprecated(StandardBase):
rid = "ANS999"
sid = "ANSIBLE9999"
description = "Deprecated features should not be used" description = "Deprecated features should not be used"
helptext = "`{old}` is deprecated and should not be used anymore. Use `{new}` instead." helptext = "'{old}' is deprecated and should not be used anymore. Use '{new}' instead."
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -18,7 +20,7 @@ class CheckDeprecated(RuleBase):
task["__line__"], task["__line__"],
self.helptext.format( self.helptext.format(
old="skip_ansible_lint", new="skip_ansible_later" old="skip_ansible_lint", new="skip_ansible_later"
), )
) )
) )
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,87 +0,0 @@
# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
from ansiblelater.rule import RuleBase
from ansiblelater.utils import has_glob, has_jinja
class CheckDeprecatedBareVars(RuleBase):
rid = "ANS127"
description = "Deprecated bare variables in loops must not be used"
helptext = (
"bare var '{barevar}' in '{loop_type}' must use full var syntax '{{{{ {barevar} }}}}' "
"or be converted to a list"
)
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
tasks, self.errors = self.get_normalized_tasks(candidate, settings)
if not self.errors:
for task in tasks:
loop_type = next((key for key in task if key.startswith("with_")), None)
if not loop_type:
continue
if loop_type in [
"with_nested",
"with_together",
"with_flattened",
"with_filetree",
"with_community.general.filetree",
]:
# These loops can either take a list defined directly in the task
# or a variable that is a list itself. When a single variable is used
# we just need to check that one variable, and not iterate over it like
# it's a list. Otherwise, loop through and check all items.
items = task[loop_type]
if not isinstance(items, (list, tuple)):
items = [items]
for var in items:
self._matchvar(var, task, loop_type)
elif loop_type == "with_subelements":
self._matchvar(task[loop_type][0], task, loop_type)
elif loop_type in ["with_sequence", "with_ini", "with_inventory_hostnames"]:
pass
else:
self._matchvar(task[loop_type], task, loop_type)
return self.Result(candidate.path, self.errors)
def _matchvar(self, varstring, task, loop_type):
if isinstance(varstring, str) and not has_jinja(varstring):
valid = loop_type == "with_fileglob" and bool(
has_jinja(varstring) or has_glob(varstring),
)
valid |= loop_type == "with_filetree" and bool(
has_jinja(varstring) or varstring.endswith(os.sep),
)
if not valid:
self.errors.append(
self.Error(
task["__line__"],
self.helptext.format(barevar=varstring, loop_type=loop_type),
)
)

View File

@ -1,132 +0,0 @@
# Original code written by the authors of ansible-lint
from ansiblelater.rule import RuleBase
from ansiblelater.utils import load_plugin
class CheckFQCNBuiltin(RuleBase):
rid = "ANS128"
helptext = "use FQCN `{module_alias}` for module action `{module}`"
description = "Module actions should use full qualified collection names"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
module_aliases = {"block/always/rescue": "block/always/rescue"}
def check(self, candidate, settings):
tasks, errors = self.get_normalized_tasks(candidate, settings)
_builtins = [
"add_host",
"apt",
"apt_key",
"apt_repository",
"assemble",
"assert",
"async_status",
"blockinfile",
"command",
"copy",
"cron",
"debconf",
"debug",
"dnf",
"dpkg_selections",
"expect",
"fail",
"fetch",
"file",
"find",
"gather_facts",
"get_url",
"getent",
"git",
"group",
"group_by",
"hostname",
"import_playbook",
"import_role",
"import_tasks",
"include",
"include_role",
"include_tasks",
"include_vars",
"iptables",
"known_hosts",
"lineinfile",
"meta",
"package",
"package_facts",
"pause",
"ping",
"pip",
"raw",
"reboot",
"replace",
"rpm_key",
"script",
"service",
"service_facts",
"set_fact",
"set_stats",
"setup",
"shell",
"slurp",
"stat",
"subversion",
"systemd",
"sysvinit",
"tempfile",
"template",
"unarchive",
"uri",
"user",
"wait_for",
"wait_for_connection",
"yum",
"yum_repository",
]
if errors:
return self.Result(candidate.path, errors)
for task in tasks:
module = task["action"]["__ansible_module_original__"]
if module not in self.module_aliases:
loaded_module = load_plugin(module)
target = loaded_module.resolved_fqcn
self.module_aliases[module] = target
if target is None:
self.module_aliases[module] = module
continue
if target not in self.module_aliases:
self.module_aliases[target] = target
if module != self.module_aliases[module]:
module_alias = self.module_aliases[module]
if module_alias.startswith("ansible.builtin"):
legacy_module = module_alias.replace(
"ansible.builtin.",
"ansible.legacy.",
1,
)
if module != legacy_module:
helptext = self.helptext.format(module_alias=module_alias, module=module)
if module == "ansible.builtin.include":
helptext = (
"`ansible.builtin.include_task` or `ansible.builtin.import_tasks` "
f"should be used instead of deprecated `{module}`",
)
errors.append(self.Error(task["__line__"], helptext))
else:
if module.count(".") < 2:
errors.append(
self.Error(
task["__line__"],
self.helptext.format(module_alias=module_alias, module=module),
)
)
return self.Result(candidate.path, errors)

View File

@ -19,16 +19,18 @@
# THE SOFTWARE. # THE SOFTWARE.
import re import re
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckFilePermissionMissing(RuleBase): class CheckFilePermissionMissing(StandardBase):
rid = "ANS118"
sid = "ANSIBLE0018"
description = "File permissions unset or incorrect" description = "File permissions unset or incorrect"
helptext = ( helptext = (
"`mode` parameter should set permissions explicitly (e.g. `mode: 0644`) " "`mode` parameter should set permissions explicitly (e.g. `mode: 0644`) "
"to avoid unexpected file permissions" "to avoid unexpected file permissions"
) )
version = "0.2"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
_modules = { _modules = {
@ -65,7 +67,8 @@ class CheckFilePermissionMissing(RuleBase):
mode = task["action"].get("mode", None) mode = task["action"].get("mode", None)
state = task["action"].get("state", "file") state = task["action"].get("state", "file")
if module not in self._modules and module not in self._create_modules: if module not in self._modules and \
module not in self._create_modules:
return False return False
if mode == "preserve" and module not in self._preserve_modules: if mode == "preserve" and module not in self._preserve_modules:

View File

@ -17,28 +17,22 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE. # THE SOFTWARE.
from ansiblelater.standard import StandardBase
from ansiblelater.rule import RuleBase
class CheckFilePermissionOctal(RuleBase): class CheckFilePermissionOctal(StandardBase):
rid = "ANS119"
description = "Numeric file permissions without a leading zero can behave unexpectedly" sid = "ANSIBLE0019"
helptext = '`mode: {mode}` should be strings with a leading zero `mode: "0{mode}"`' description = "Octal file permissions must contain leading zero or be a string"
helptext = "numeric file permissions without leading zero can behave in unexpected ways"
version = "0.2"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
tasks, errors = self.get_normalized_tasks(candidate, settings) tasks, errors = self.get_normalized_tasks(candidate, settings)
modules = [ modules = [
"assemble", "assemble", "copy", "file", "ini_file", "lineinfile", "replace", "synchronize",
"copy", "template", "unarchive"
"file",
"ini_file",
"lineinfile",
"replace",
"synchronize",
"template",
"unarchive",
] ]
if not errors: if not errors:
@ -46,33 +40,28 @@ class CheckFilePermissionOctal(RuleBase):
if task["action"]["__ansible_module__"] in modules: if task["action"]["__ansible_module__"] in modules:
mode = task["action"].get("mode", None) mode = task["action"].get("mode", None)
if isinstance(mode, int) and self._is_invalid_permission(mode): if isinstance(mode, int):
errors.append( if self._is_invalid_permission(mode):
self.Error(task["__line__"], self.helptext.format(mode=mode)) errors.append(self.Error(task["__line__"], self.helptext))
)
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)
@staticmethod @staticmethod
def _is_invalid_permission(mode): def _is_invalid_permission(mode):
other_write_without_read = ( other_write_without_read = (
mode % 8 and mode % 8 < 4 and not (mode % 8 == 1 and (mode >> 6) % 2 == 1) mode % 8 and mode % 8 < 4 and not (mode % 8 == 1 and (mode >> 6) % 2 == 1)
) )
group_write_without_read = ( group_write_without_read = ((mode >> 3) % 8 and (mode >> 3) % 8 < 4
(mode >> 3) % 8 and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1))
and (mode >> 3) % 8 < 4 user_write_without_read = ((mode >> 6) % 8 and (mode >> 6) % 8 < 4
and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1) and not (mode >> 6) % 8 == 1)
)
user_write_without_read = (mode >> 6) % 8 and (mode >> 6) % 8 < 4 and (mode >> 6) % 8 != 1
other_more_generous_than_group = mode % 8 > (mode >> 3) % 8 other_more_generous_than_group = mode % 8 > (mode >> 3) % 8
other_more_generous_than_user = mode % 8 > (mode >> 6) % 8 other_more_generous_than_user = mode % 8 > (mode >> 6) % 8
group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8 group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8
return bool( return bool(
other_write_without_read other_write_without_read or group_write_without_read or user_write_without_read
or group_write_without_read or other_more_generous_than_group or other_more_generous_than_user
or user_write_without_read
or other_more_generous_than_group
or other_more_generous_than_user
or group_more_generous_than_user or group_more_generous_than_user
) )

View File

@ -1,12 +1,14 @@
import re import re
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckFilterSeparation(RuleBase): class CheckFilterSeparation(StandardBase):
rid = "ANS116"
sid = "ANSIBLE0016"
description = "Jinja2 filters should be separated with spaces" description = "Jinja2 filters should be separated with spaces"
helptext = "no suitable numbers of spaces (required: 1)" helptext = "no suitable numbers of spaces (required: 1)"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -14,18 +16,16 @@ class CheckFilterSeparation(RuleBase):
matches = [] matches = []
braces = re.compile("{{(.*?)}}") braces = re.compile("{{(.*?)}}")
filters = re.compile(r"(?<=\|)((\s{2,})*\S+)|(\S+(\s{2,})*)(?=\|)") filters = re.compile(r"(?<=\|)([\s]{2,}[^\s}]+|[^\s]+)|([^\s{]+[\s]{2,}|[^\s]+)(?=\|)")
if not errors: if not errors:
for i, line in yamllines: for i, line in yamllines:
match = braces.findall(line) match = braces.findall(line)
if match: if match:
for item in match: for item in match:
# replace potential regex in filters
item = re.sub(r"\(.+\)", "(dummy)", item)
matches.append((i, item)) matches.append((i, item))
for i, item in matches: for i, line in matches:
if filters.findall(item): if filters.findall(line):
errors.append(self.Error(i, self.helptext)) errors.append(self.Error(i, self.helptext))
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -17,14 +17,15 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE. # THE SOFTWARE.
from ansiblelater.standard import StandardBase
from ansiblelater.rule import RuleBase
class CheckGitHasVersion(RuleBase): class CheckGitHasVersion(StandardBase):
rid = "ANS120"
sid = "ANSIBLE0020"
description = "Git checkouts should use explicit version" description = "Git checkouts should use explicit version"
helptext = "git checkouts should point to an explicit commit or tag, not `latest`" helptext = "git checkouts should point to an explicit commit or tag, not `latest`"
version = "0.2"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,41 +1,21 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckInstallUseLatest(RuleBase): class CheckInstallUseLatest(StandardBase):
rid = "ANS109"
sid = "ANSIBLE0009"
description = "Package installs should use present, not latest" description = "Package installs should use present, not latest"
helptext = "package installs should use `state=present` with or without a version" helptext = "package installs should use `state=present` with or without a version"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
tasks, errors = self.get_normalized_tasks(candidate, settings) tasks, errors = self.get_normalized_tasks(candidate, settings)
package_managers = [ package_managers = [
"yum", "yum", "apt", "dnf", "homebrew", "pacman", "openbsd_package", "pkg5", "portage",
"apt", "pkgutil", "slackpkg", "swdepot", "zypper", "bundler", "pip", "pear", "npm", "yarn",
"dnf", "gem", "easy_install", "bower", "package", "apk", "openbsd_pkg", "pkgng", "sorcery",
"homebrew", "xbps"
"pacman",
"openbsd_package",
"pkg5",
"portage",
"pkgutil",
"slackpkg",
"swdepot",
"zypper",
"bundler",
"pip",
"pear",
"npm",
"yarn",
"gem",
"easy_install",
"bower",
"package",
"apk",
"openbsd_pkg",
"pkgng",
"sorcery",
"xbps",
] ]
if not errors: if not errors:

View File

@ -1,89 +0,0 @@
# Original code written by the authors of ansible-lint
import functools
from ansiblelater.rule import RuleBase
SORTER_TASKS = (
"name",
# "__module__",
# "action",
# "args",
None, # <-- None include all modules that not using action and *
# "when",
# "notify",
# "tags",
"block",
"rescue",
"always",
)
class CheckKeyOrder(RuleBase):
rid = "ANS129"
description = "Check for recommended key order"
helptext = "{type} key order can be improved to `{sorted_keys}`"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
errors = []
tasks, err = self.get_normalized_tasks(candidate, settings)
if err:
return self.Result(candidate.path, err)
for task in tasks:
is_sorted, keys = self._sort_keys(task.get("__raw_task__"))
if not is_sorted:
errors.append(
self.Error(
task["__line__"],
self.helptext.format(type="task", sorted_keys=", ".join(keys)),
)
)
if candidate.kind == "playbook":
tasks, err = self.get_tasks(candidate, settings)
if err:
return self.Result(candidate.path, err)
for task in tasks:
is_sorted, keys = self._sort_keys(task)
if not is_sorted:
errors.append(
self.Error(
task["__line__"],
self.helptext.format(type="play", sorted_keys=", ".join(keys)),
)
)
return self.Result(candidate.path, errors)
@staticmethod
def _sort_keys(task):
if not task:
return True, []
keys = [str(key) for key in task if not key.startswith("_")]
sorted_keys = sorted(keys, key=functools.cmp_to_key(_task_property_sorter))
return (keys == sorted_keys), sorted_keys
def _task_property_sorter(property1, property2):
"""Sort task properties based on SORTER."""
v_1 = _get_property_sort_index(property1)
v_2 = _get_property_sort_index(property2)
return (v_1 > v_2) - (v_1 < v_2)
def _get_property_sort_index(name):
"""Return the index of the property in the sorter."""
a_index = -1
for i, v in enumerate(SORTER_TASKS):
if v == name:
return i
if v is None:
a_index = i
return a_index

View File

@ -1,12 +1,14 @@
import re import re
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckLiteralBoolFormat(RuleBase): class CheckLiteralBoolFormat(StandardBase):
rid = "ANS114"
sid = "ANSIBLE0014"
description = "Literal bools should be consistent" description = "Literal bools should be consistent"
helptext = "literal bools should be written as `{bools}`" helptext = "literal bools should be written as `{bools}`"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,12 +1,14 @@
# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp> # Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp>
# Copyright (c) 2018, Ansible Project # Copyright (c) 2018, Ansible Project
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckLocalAction(RuleBase): class CheckLocalAction(StandardBase):
rid = "ANS124"
sid = "ANSIBLE0024"
description = "Don't use local_action" description = "Don't use local_action"
helptext = "`delegate_to: localhost` should be used instead of `local_action`" helptext = ("`delegate_to: localhost` should be used instead of `local_action`")
version = "0.2"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,13 +1,15 @@
# Copyright (c) 2018, Ansible Project # Copyright (c) 2018, Ansible Project
from nested_lookup import nested_lookup from nested_lookup import nested_lookup
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckMetaChangeFromDefault(RuleBase): class CheckMetaChangeFromDefault(StandardBase):
rid = "ANS121"
sid = "ANSIBLE0021"
description = "Roles meta/main.yml default values should be changed" description = "Roles meta/main.yml default values should be changed"
helptext = "meta/main.yml default values should be changed for: `{field}`" helptext = "meta/main.yml default values should be changed for: `{field}`"
version = "0.2"
types = ["meta"] types = ["meta"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -22,7 +24,7 @@ class CheckMetaChangeFromDefault(RuleBase):
if not errors: if not errors:
for field, default in field_defaults: for field, default in field_defaults:
pair = f"{field}: {default}" pair = "{field}: {default}".format(field=field, default=default)
lookup = nested_lookup(field, content) lookup = nested_lookup(field, content)
if lookup and default in nested_lookup(field, content): if lookup and default in nested_lookup(field, content):
errors.append(self.Error(None, self.helptext.format(field=pair))) errors.append(self.Error(None, self.helptext.format(field=pair)))

View File

@ -1,12 +1,14 @@
from nested_lookup import nested_lookup from nested_lookup import nested_lookup
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckMetaMain(RuleBase): class CheckMetaMain(StandardBase):
rid = "ANS102"
sid = "ANSIBLE0002"
description = "Roles must contain suitable meta/main.yml" description = "Roles must contain suitable meta/main.yml"
helptext = "file should contain `{key}` key" helptext = "file should contain `{key}` key"
version = "0.1"
types = ["meta"] types = ["meta"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -14,8 +16,8 @@ class CheckMetaMain(RuleBase):
keys = ["author", "description", "min_ansible_version", "platforms"] keys = ["author", "description", "min_ansible_version", "platforms"]
if not errors: if not errors:
has_galaxy_info = isinstance(content, dict) and "galaxy_info" in content has_galaxy_info = (isinstance(content, dict) and "galaxy_info" in content.keys())
has_dependencies = isinstance(content, dict) and "dependencies" in content has_dependencies = (isinstance(content, dict) and "dependencies" in content.keys())
if not has_galaxy_info: if not has_galaxy_info:
errors.append(self.Error(None, self.helptext.format(key="galaxy_info"))) errors.append(self.Error(None, self.helptext.format(key="galaxy_info")))

View File

@ -1,12 +1,14 @@
from collections import defaultdict from collections import defaultdict
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckNameFormat(RuleBase): class CheckNameFormat(StandardBase):
rid = "ANS107"
sid = "ANSIBLE0007"
description = "Name of tasks and handlers must be formatted" description = "Name of tasks and handlers must be formatted"
helptext = "name `{name}` should start with uppercase" helptext = "name '{name}' should start with uppercase"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -17,7 +19,7 @@ class CheckNameFormat(RuleBase):
for task in tasks: for task in tasks:
if "name" in task: if "name" in task:
namelines[task["name"]].append(task["__line__"]) namelines[task["name"]].append(task["__line__"])
for name, lines in namelines.items(): for (name, lines) in namelines.items():
if name and not name[0].isupper(): if name and not name[0].isupper():
errors.append(self.Error(lines[-1], self.helptext.format(name=name))) errors.append(self.Error(lines[-1], self.helptext.format(name=name)))

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckNamedTask(RuleBase): class CheckNamedTask(StandardBase):
rid = "ANS106"
sid = "ANSIBLE0006"
description = "Tasks and handlers must be named" description = "Tasks and handlers must be named"
helptext = "module `{module}` used without or empty `name` attribute" helptext = "module '{module}' used without or empty `name` attribute"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckNativeYaml(RuleBase): class CheckNativeYaml(StandardBase):
rid = "YML108"
sid = "LINT0008"
description = "Use YAML format for tasks and handlers rather than key=value" description = "Use YAML format for tasks and handlers rather than key=value"
helptext = "task arguments appear to be in key value rather than YAML format" helptext = "task arguments appear to be in key value rather than YAML format"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# Author: Adrián Tóth <adtoth@redhat.com> # Author: Adrián Tóth <adtoth@redhat.com>
# #
# Copyright (c) 2020, Red Hat, Inc. # Copyright (c) 2020, Red Hat, Inc.
@ -21,16 +22,18 @@
# THE SOFTWARE. # THE SOFTWARE.
import re import re
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckNestedJinja(RuleBase): class CheckNestedJinja(StandardBase):
rid = "ANS123"
sid = "ANSIBLE0023"
description = "Don't use nested Jinja2 pattern" description = "Don't use nested Jinja2 pattern"
helptext = ( helptext = (
"there should not be any nested jinja pattern " "there should not be any nested jinja pattern "
"like `{{ list_one + {{ list_two | max }} }}`" "like `{{ list_one + {{ list_two | max }} }}`"
) )
version = "0.2"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -48,7 +51,7 @@ class CheckNestedJinja(RuleBase):
for item in match: for item in match:
matches.append((i, item)) matches.append((i, item))
for i, _ in matches: for i, line in matches:
errors.append(self.Error(i, self.helptext)) errors.append(self.Error(i, self.helptext))
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,12 +1,14 @@
# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp> # Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp>
# Copyright (c) 2018, Ansible Project # Copyright (c) 2018, Ansible Project
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckRelativeRolePaths(RuleBase): class CheckRelativeRolePaths(StandardBase):
rid = "ANS125"
sid = "ANSIBLE0025"
description = "Don't use a relative path in a role" description = "Don't use a relative path in a role"
helptext = "`copy` and `template` modules don't need relative path for `src`" helptext = "`copy` and `template` modules don't need relative path for `src`"
version = "0.2"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -24,7 +26,7 @@ class CheckRelativeRolePaths(RuleBase):
path_to_check = None path_to_check = None
if module in module_to_path_folder and "src" in task["action"]: if module in module_to_path_folder and "src" in task["action"]:
path_to_check = f"../{module_to_path_folder[module]}" path_to_check = "../{}".format(module_to_path_folder[module])
if path_to_check and path_to_check in task["action"]["src"]: if path_to_check and path_to_check in task["action"]["src"]:
errors.append(self.Error(task["__line__"], self.helptext)) errors.append(self.Error(task["__line__"], self.helptext))

View File

@ -1,12 +1,14 @@
from ansible.parsing.yaml.objects import AnsibleMapping from ansible.parsing.yaml.objects import AnsibleMapping
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckScmInSrc(RuleBase): class CheckScmInSrc(StandardBase):
rid = "ANS105"
sid = "ANSIBLE0005"
description = "Use `scm:` key rather than `src: scm+url`" description = "Use `scm:` key rather than `src: scm+url`"
helptext = "usage of `src: scm+url` not recommended" helptext = "usage of `src: scm+url` not recommended"
version = "0.1"
types = ["rolesfile"] types = ["rolesfile"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -14,11 +16,8 @@ class CheckScmInSrc(RuleBase):
if not errors: if not errors:
for role in roles: for role in roles:
if ( if isinstance(role, AnsibleMapping):
isinstance(role, AnsibleMapping) if "+" in role.get("src"):
and bool(role.get("src"))
and "+" in role.get("src")
):
errors.append(self.Error(role["__line__"], self.helptext)) errors.append(self.Error(role["__line__"], self.helptext))
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,10 +1,14 @@
from ansiblelater.rule import RuleBase import re
from ansiblelater.standard import StandardBase
class CheckShellInsteadCommand(RuleBase): class CheckShellInsteadCommand(StandardBase):
rid = "ANS110"
sid = "ANSIBLE0010"
description = "Shell should only be used when essential" description = "Shell should only be used when essential"
helptext = "shell should only be used when piping, redirecting or chaining commands" helptext = "shell should only be used when piping, redirecting or chaining commands"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -18,8 +22,13 @@ class CheckShellInsteadCommand(RuleBase):
if "executable" in task["action"]: if "executable" in task["action"]:
continue continue
cmd = self.get_safe_cmd(task) if "cmd" in task["action"]:
if not any(ch in cmd for ch in self.SHELL_PIPE_CHARS): cmd = task["action"].get("cmd", [])
else:
cmd = " ".join(task["action"].get("__ansible_arguments__", []))
unjinja = re.sub(r"\{\{[^\}]*\}\}", "JINJA_VAR", cmd)
if not any(ch in unjinja for ch in "&|<>;$\n*[]{}?"):
errors.append(self.Error(task["__line__"], self.helptext)) errors.append(self.Error(task["__line__"], self.helptext))
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,13 +1,15 @@
import re import re
from collections import defaultdict from collections import defaultdict
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckTaskSeparation(RuleBase): class CheckTaskSeparation(StandardBase):
rid = "ANS101"
sid = "ANSIBLE0001"
description = "Single tasks should be separated by empty line" description = "Single tasks should be separated by empty line"
helptext = "missing task separation (required: 1 empty line)" helptext = "missing task separation (required: 1 empty line)"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,12 +1,14 @@
from collections import defaultdict from collections import defaultdict
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckUniqueNamedTask(RuleBase): class CheckUniqueNamedTask(StandardBase):
rid = "ANS103"
sid = "ANSIBLE0003"
description = "Tasks and handlers must be uniquely named within a single file" description = "Tasks and handlers must be uniquely named within a single file"
helptext = "name `{name}` appears multiple times" helptext = "name '{name}' appears multiple times"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):
@ -18,7 +20,7 @@ class CheckUniqueNamedTask(RuleBase):
for task in tasks: for task in tasks:
if "name" in task: if "name" in task:
namelines[task["name"]].append(task["__line__"]) namelines[task["name"]].append(task["__line__"])
for name, lines in namelines.items(): for (name, lines) in namelines.items():
if name and len(lines) > 1: if name and len(lines) > 1:
errors.append(self.Error(lines[-1], self.helptext.format(name=name))) errors.append(self.Error(lines[-1], self.helptext.format(name=name)))

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckWhenFormat(RuleBase): class CheckWhenFormat(StandardBase):
rid = "ANS122"
sid = "ANSIBLE0022"
description = "Don't use Jinja2 in when" description = "Don't use Jinja2 in when"
helptext = ( helptext = (
"`when` is a raw Jinja2 expression, redundant `{{ }}` should be removed from variable(s)" "`when` is a raw Jinja2 expression, redundant {{ }} "
"should be removed from variable(s)"
) )
version = "0.2"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckYamlColons(RuleBase): class CheckYamlColons(StandardBase):
rid = "YML105"
sid = "LINT0005"
description = "YAML should use consistent number of spaces around colons" description = "YAML should use consistent number of spaces around colons"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings): def check(self, candidate, settings):
options = f"rules: {{colons: {settings['yamllint']['colons']}}}" options = "rules: {{colons: {conf}}}".format(conf=settings["yamllint"]["colons"])
errors = self.run_yamllint(candidate, options) errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,13 +1,17 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckYamlDocumentEnd(RuleBase): class CheckYamlDocumentEnd(StandardBase):
rid = "YML109"
description = "YAML document end marker should match configuration" sid = "LINT0009"
description = "YAML should contain document end marker"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings): def check(self, candidate, settings):
options = f"rules: {{document-end: {settings['yamllint']['document-end']}}}" options = "rules: {{document-end: {conf}}}".format(
conf=settings["yamllint"]["document-end"]
)
errors = self.run_yamllint(candidate, options) errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,13 +1,17 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckYamlDocumentStart(RuleBase): class CheckYamlDocumentStart(StandardBase):
rid = "YML104"
description = "YAML document start marker should match configuration" sid = "LINT0004"
description = "YAML should contain document start marker"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings): def check(self, candidate, settings):
options = f"rules: {{document-start: {settings['yamllint']['document-start']}}}" options = "rules: {{document-start: {conf}}}".format(
conf=settings["yamllint"]["document-start"]
)
errors = self.run_yamllint(candidate, options) errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckYamlEmptyLines(RuleBase): class CheckYamlEmptyLines(StandardBase):
rid = "YML101"
sid = "LINT0001"
description = "YAML should not contain unnecessarily empty lines" description = "YAML should not contain unnecessarily empty lines"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings): def check(self, candidate, settings):
options = f"rules: {{empty-lines: {settings['yamllint']['empty-lines']}}}" options = "rules: {{empty-lines: {conf}}}".format(conf=settings["yamllint"]["empty-lines"])
errors = self.run_yamllint(candidate, options) errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,12 +1,14 @@
import os import os
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckYamlFile(RuleBase): class CheckYamlFile(StandardBase):
rid = "YML106"
sid = "LINT0006"
description = "Roles file should be in yaml format" description = "Roles file should be in yaml format"
helptext = "file does not have a .yml extension" helptext = "file does not have a .yml extension"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckYamlHasContent(RuleBase): class CheckYamlHasContent(StandardBase):
rid = "YML107"
sid = "LINT0007"
description = "Files should contain useful content" description = "Files should contain useful content"
helptext = "the file appears to have no useful content" helptext = "the file appears to have no useful content"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "defaults", "meta"] types = ["playbook", "task", "handler", "rolevars", "defaults", "meta"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckYamlHyphens(RuleBase): class CheckYamlHyphens(StandardBase):
rid = "YML103"
sid = "LINT0003"
description = "YAML should use consistent number of spaces after hyphens" description = "YAML should use consistent number of spaces after hyphens"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings): def check(self, candidate, settings):
options = f"rules: {{hyphens: {settings['yamllint']['hyphens']}}}" options = "rules: {{hyphens: {conf}}}".format(conf=settings["yamllint"]["hyphens"])
errors = self.run_yamllint(candidate, options) errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,13 +1,17 @@
from ansiblelater.rule import RuleBase from ansiblelater.standard import StandardBase
class CheckYamlIndent(RuleBase): class CheckYamlIndent(StandardBase):
rid = "YML102"
sid = "LINT0002"
description = "YAML should not contain unnecessarily empty lines" description = "YAML should not contain unnecessarily empty lines"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"] types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings): def check(self, candidate, settings):
options = f"rules: {{document-start: {settings['yamllint']['document-start']}}}" options = "rules: {{document-start: {conf}}}".format(
conf=settings["yamllint"]["document-start"]
)
errors = self.run_yamllint(candidate, options) errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors) return self.Result(candidate.path, errors)

View File

@ -1,13 +0,0 @@
from ansiblelater.rule import RuleBase
class CheckYamlOctalValues(RuleBase):
rid = "YML110"
description = "YAML implicit/explicit octal value should match configuration"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings):
options = f"rules: {{octal-values: {settings['yamllint']['octal-values']}}}"
errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors)

View File

@ -1,6 +1,5 @@
"""Global settings object definition.""" """Global settings object definition."""
import importlib.resources
import os import os
import anyconfig import anyconfig
@ -8,6 +7,7 @@ import jsonschema.exceptions
import pathspec import pathspec
from appdirs import AppDirs from appdirs import AppDirs
from jsonschema._utils import format_as_index from jsonschema._utils import format_as_index
from pkg_resources import resource_filename
from ansiblelater import utils from ansiblelater import utils
@ -15,7 +15,7 @@ config_dir = AppDirs("ansible-later").user_config_dir
default_config_file = os.path.join(config_dir, "config.yml") default_config_file = os.path.join(config_dir, "config.yml")
class Settings: class Settings(object):
""" """
Create an object with all necessary settings. Create an object with all necessary settings.
@ -25,13 +25,14 @@ class Settings:
- provides cli parameters - provides cli parameters
""" """
def __init__(self, args, config_file=default_config_file): def __init__(self, args={}, config_file=default_config_file):
""" """
Initialize a new settings class. Initialize a new settings class.
:param args: An optional dict of options, arguments and commands from the CLI. :param args: An optional dict of options, arguments and commands from the CLI.
:param config_file: An optional path to a yaml config file. :param config_file: An optional path to a yaml config file.
:returns: None :returns: None
""" """
self.config_file = config_file self.config_file = config_file
self.schema = None self.schema = None
@ -41,9 +42,6 @@ class Settings:
self._update_filelist() self._update_filelist()
def _set_args(self, args): def _set_args(self, args):
if args is None:
args = {}
defaults = self._get_defaults() defaults = self._get_defaults()
self.config_file = args.get("config_file") or default_config_file self.config_file = args.get("config_file") or default_config_file
@ -104,13 +102,13 @@ class Settings:
if f not in defaults["ansible"]["custom_modules"]: if f not in defaults["ansible"]["custom_modules"]:
defaults["ansible"]["custom_modules"].append(f) defaults["ansible"]["custom_modules"].append(f)
if defaults["rules"]["builtin"]: if defaults["rules"]["buildin"]:
ref = importlib.resources.files("ansiblelater") / "rules" defaults["rules"]["standards"].append(
with importlib.resources.as_file(ref) as path: os.path.join(resource_filename("ansiblelater", "rules"))
defaults["rules"]["dir"].append(path) )
defaults["rules"]["dir"] = [ defaults["rules"]["standards"] = [
os.path.relpath(os.path.normpath(p)) for p in defaults["rules"]["dir"] os.path.relpath(os.path.normpath(p)) for p in defaults["rules"]["standards"]
] ]
return defaults return defaults
@ -118,20 +116,18 @@ class Settings:
def _get_defaults(self): def _get_defaults(self):
defaults = { defaults = {
"rules": { "rules": {
"builtin": True, "buildin": True,
"dir": [], "standards": [],
"include_filter": [], "filter": [],
"exclude_filter": [], "exclude_filter": [],
"warning_filter": [ "warning_filter": ["ANSIBLE9999"],
"ANS128",
"ANS999",
],
"ignore_dotfiles": True, "ignore_dotfiles": True,
"exclude_files": [], "exclude_files": [],
"version": ""
}, },
"logging": { "logging": {
"level": "WARNING", "level": "WARNING",
"json": False, "json": False
}, },
"ansible": { "ansible": {
"custom_modules": [], "custom_modules": [],
@ -144,7 +140,7 @@ class Settings:
"exclude": [ "exclude": [
"meta", "meta",
"debug", "debug",
"block/always/rescue", "block",
"include_role", "include_role",
"import_role", "import_role",
"include_tasks", "include_tasks",
@ -168,21 +164,17 @@ class Settings:
"indent-sequences": True, "indent-sequences": True,
}, },
"hyphens": { "hyphens": {
"max-spaces-after": 1, "max-spaces-after": 1
}, },
"document-start": { "document-start": {
"present": True, "present": True
}, },
"document-end": { "document-end": {
"present": False, "present": True
}, },
"colons": { "colons": {
"max-spaces-before": 0, "max-spaces-before": 0,
"max-spaces-after": 1, "max-spaces-after": 1
},
"octal-values": {
"forbid-implicit-octal": True,
"forbid-explicit-octal": True,
}, },
}, },
} }
@ -196,16 +188,14 @@ class Settings:
anyconfig.validate(config, self.schema, ac_schema_safe=False) anyconfig.validate(config, self.schema, ac_schema_safe=False)
return True return True
except jsonschema.exceptions.ValidationError as e: except jsonschema.exceptions.ValidationError as e:
validator = e.validator schema_error = (
path = format_as_index(
next(iter(e.absolute_path)),
list(e.absolute_path)[1:],
)
msg = e.message
utils.sysexit_with_message(
"Error while loading configuration:\n" "Error while loading configuration:\n"
f"Failed validating '{validator}' at {path}: {msg}" "Failed validating '{validator}' in schema{schema}"
).format(
validator=e.validator, schema=format_as_index(list(e.relative_schema_path)[:-1])
)
utils.sysexit_with_message(
"{schema}: {msg}".format(schema=schema_error, msg=e.message)
) )
def _update_filelist(self): def _update_filelist(self):
@ -213,14 +203,13 @@ class Settings:
excludes = self.config["rules"]["exclude_files"] excludes = self.config["rules"]["exclude_files"]
ignore_dotfiles = self.config["rules"]["ignore_dotfiles"] ignore_dotfiles = self.config["rules"]["ignore_dotfiles"]
if ignore_dotfiles: if ignore_dotfiles and not self.args_files:
excludes.append(".*") excludes.append(".*")
else:
if self.args_files:
del excludes[:] del excludes[:]
filelist = [] filelist = []
for root, _dirs, files in os.walk("."): for root, dirs, files in os.walk("."):
for filename in files: for filename in files:
filelist.append(os.path.relpath(os.path.normpath(os.path.join(root, filename)))) filelist.append(os.path.relpath(os.path.normpath(os.path.join(root, filename))))

View File

@ -1,90 +1,98 @@
"""Rule definition.""" """Standard definition."""
import codecs
import copy import copy
import importlib import importlib
import inspect import inspect
import os import os
import pathlib import pathlib
import re import re
from abc import ABCMeta, abstractmethod from abc import ABCMeta
from abc import abstractmethod
from collections import defaultdict from collections import defaultdict
from urllib.parse import urlparse
import toolz import toolz
import yaml import yaml
from yamllint import linter from yamllint import linter
from yamllint.config import YamlLintConfig from yamllint.config import YamlLintConfig
from ansiblelater.exceptions import LaterAnsibleError, LaterError from ansiblelater.exceptions import LaterAnsibleError
from ansiblelater.utils import Singleton, sysexit_with_message from ansiblelater.exceptions import LaterError
from ansiblelater.utils.yamlhelper import ( from ansiblelater.utils import Singleton
UnsafeTag, from ansiblelater.utils import sysexit_with_message
VaultTag, from ansiblelater.utils.yamlhelper import UnsafeTag
action_tasks, from ansiblelater.utils.yamlhelper import VaultTag
normalize_task, from ansiblelater.utils.yamlhelper import action_tasks
normalized_yaml, from ansiblelater.utils.yamlhelper import normalize_task
parse_yaml_linenumbers, from ansiblelater.utils.yamlhelper import normalized_yaml
) from ansiblelater.utils.yamlhelper import parse_yaml_linenumbers
class RuleMeta(type): class StandardMeta(type):
def __call__(cls, *args):
def __call__(cls, *args, **kwargs):
mcls = type.__call__(cls, *args) mcls = type.__call__(cls, *args)
mcls.rid = cls.rid setattr(mcls, "sid", cls.sid)
mcls.description = getattr(cls, "description", "__unknown__") setattr(mcls, "description", getattr(cls, "description", "__unknown__"))
mcls.helptext = getattr(cls, "helptext", "") setattr(mcls, "helptext", getattr(cls, "helptext", ""))
mcls.types = getattr(cls, "types", []) setattr(mcls, "version", getattr(cls, "version", None))
setattr(mcls, "types", getattr(cls, "types", []))
return mcls return mcls
class RuleExtendedMeta(RuleMeta, ABCMeta): class StandardExtendedMeta(StandardMeta, ABCMeta):
pass pass
class RuleBase(metaclass=RuleExtendedMeta): class StandardBase(object, metaclass=StandardExtendedMeta):
SHELL_PIPE_CHARS = "&|<>;$\n*[]{}?"
@property @property
@abstractmethod @abstractmethod
def rid(self): def sid(self):
pass pass
@abstractmethod @abstractmethod
def check(self, candidate, settings): def check(self, candidate, settings):
pass pass
def __repr__(self): def __repr__(self): # noqa
return f"Rule: {self.description} (types: {self.types})" return "Standard: {description} (version: {version}, types: {types})".format(
description=self.description, version=self.version, types=self.types
)
@staticmethod @staticmethod
def get_tasks(candidate, settings): # noqa def get_tasks(candidate, settings):
errors = [] errors = []
yamllines = [] yamllines = []
if not candidate.faulty: if not candidate.faulty:
try: try:
with open(candidate.path, encoding="utf-8") as f: with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
yamllines = parse_yaml_linenumbers(f, candidate.path) yamllines = parse_yaml_linenumbers(f, candidate.path)
except LaterError as ex: except LaterError as ex:
e = ex.original e = ex.original
errors.append( errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}") StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
) )
candidate.faulty = True candidate.faulty = True
except LaterAnsibleError as e: except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}")) errors.append(
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
)
candidate.faulty = True candidate.faulty = True
return yamllines, errors return yamllines, errors
@staticmethod @staticmethod
def get_action_tasks(candidate, settings): # noqa def get_action_tasks(candidate, settings):
tasks = [] tasks = []
errors = [] errors = []
if not candidate.faulty: if not candidate.faulty:
try: try:
with open(candidate.path, encoding="utf-8") as f: with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
yamllines = parse_yaml_linenumbers(f, candidate.path) yamllines = parse_yaml_linenumbers(f, candidate.path)
if yamllines: if yamllines:
@ -92,11 +100,13 @@ class RuleBase(metaclass=RuleExtendedMeta):
except LaterError as ex: except LaterError as ex:
e = ex.original e = ex.original
errors.append( errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}") StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
) )
candidate.faulty = True candidate.faulty = True
except LaterAnsibleError as e: except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}")) errors.append(StandardBase.Error(e.line, "syntax error: {}".format(e.message)))
candidate.faulty = True candidate.faulty = True
return tasks, errors return tasks, errors
@ -114,11 +124,15 @@ class RuleBase(metaclass=RuleExtendedMeta):
except LaterError as ex: except LaterError as ex:
e = ex.original e = ex.original
errors.append( errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}") StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
) )
candidate.faulty = True candidate.faulty = True
except LaterAnsibleError as e: except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}")) errors.append(
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
)
candidate.faulty = True candidate.faulty = True
return normalized, errors return normalized, errors
@ -130,7 +144,7 @@ class RuleBase(metaclass=RuleExtendedMeta):
if not candidate.faulty: if not candidate.faulty:
try: try:
with open(candidate.path, encoding="utf-8") as f: with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
yamllines = parse_yaml_linenumbers(f, candidate.path) yamllines = parse_yaml_linenumbers(f, candidate.path)
if yamllines: if yamllines:
@ -149,27 +163,30 @@ class RuleBase(metaclass=RuleExtendedMeta):
# No need to normalize_task if we are skipping it. # No need to normalize_task if we are skipping it.
continue continue
normalized_task = normalize_task( normalized.append(
normalize_task(
task, candidate.path, settings["ansible"]["custom_modules"] task, candidate.path, settings["ansible"]["custom_modules"]
) )
normalized_task["__raw_task__"] = task )
normalized.append(normalized_task)
except LaterError as ex: except LaterError as ex:
e = ex.original e = ex.original
errors.append( errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}") StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
) )
candidate.faulty = True candidate.faulty = True
except LaterAnsibleError as e: except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}")) errors.append(
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
)
candidate.faulty = True candidate.faulty = True
return normalized, errors return normalized, errors
@staticmethod @staticmethod
def get_normalized_yaml(candidate, settings, options=None): # noqa def get_normalized_yaml(candidate, settings, options=None):
errors = [] errors = []
yamllines = [] yamllines = []
@ -184,23 +201,27 @@ class RuleBase(metaclass=RuleExtendedMeta):
except LaterError as ex: except LaterError as ex:
e = ex.original e = ex.original
errors.append( errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}") StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
) )
candidate.faulty = True candidate.faulty = True
except LaterAnsibleError as e: except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}")) errors.append(
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
)
candidate.faulty = True candidate.faulty = True
return yamllines, errors return yamllines, errors
@staticmethod @staticmethod
def get_raw_yaml(candidate, settings): # noqa def get_raw_yaml(candidate, settings):
content = None content = None
errors = [] errors = []
if not candidate.faulty: if not candidate.faulty:
try: try:
with open(candidate.path, encoding="utf-8") as f: with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
yaml.add_constructor( yaml.add_constructor(
UnsafeTag.yaml_tag, UnsafeTag.yaml_constructor, Loader=yaml.SafeLoader UnsafeTag.yaml_tag, UnsafeTag.yaml_constructor, Loader=yaml.SafeLoader
) )
@ -210,7 +231,9 @@ class RuleBase(metaclass=RuleExtendedMeta):
content = yaml.safe_load(f) content = yaml.safe_load(f)
except yaml.YAMLError as e: except yaml.YAMLError as e:
errors.append( errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}") StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
) )
candidate.faulty = True candidate.faulty = True
@ -222,16 +245,15 @@ class RuleBase(metaclass=RuleExtendedMeta):
if not candidate.faulty: if not candidate.faulty:
try: try:
with open(candidate.path, encoding="utf-8") as f: with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
for problem in linter.run(f, YamlLintConfig(options)): for problem in linter.run(f, YamlLintConfig(options)):
errors.append(RuleBase.Error(problem.line, problem.desc)) errors.append(StandardBase.Error(problem.line, problem.desc))
except yaml.YAMLError as e: except yaml.YAMLError as e:
errors.append( errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}") StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
) )
candidate.faulty = True
except (TypeError, ValueError) as e:
errors.append(RuleBase.Error(None, f"yamllint error: {e}"))
candidate.faulty = True candidate.faulty = True
return errors return errors
@ -247,26 +269,10 @@ class RuleBase(metaclass=RuleExtendedMeta):
return first_cmd_arg return first_cmd_arg
@staticmethod class Error(object):
def get_safe_cmd(task):
if "cmd" in task["action"]:
cmd = task["action"].get("cmd", "")
else:
cmd = " ".join(task["action"].get("__ansible_arguments__", []))
cmd = re.sub(r"{{.+?}}", "JINJA_EXPRESSION", cmd)
cmd = re.sub(r"{%.+?%}", "JINJA_STATEMENT", cmd)
cmd = re.sub(r"{#.+?#}", "JINJA_COMMENT", cmd)
parts = cmd.split()
parts = [p if not urlparse(p.strip('"').strip("'")).scheme else "URL" for p in parts]
return " ".join(parts)
class Error:
"""Default error object created if a rule failed.""" """Default error object created if a rule failed."""
def __init__(self, lineno, message, **kwargs): def __init__(self, lineno, message, error_type=None, **kwargs):
""" """
Initialize a new error object and returns None. Initialize a new error object and returns None.
@ -277,21 +283,22 @@ class RuleBase(metaclass=RuleExtendedMeta):
self.lineno = lineno self.lineno = lineno
self.message = message self.message = message
self.kwargs = kwargs self.kwargs = kwargs
for key, value in kwargs.items(): for (key, value) in kwargs.items():
setattr(self, key, value) setattr(self, key, value)
def __repr__(self): def __repr__(self): # noqa
if self.lineno: if self.lineno:
return f"{self.lineno}: {self.message}" return "{no}: {msg}".format(no=self.lineno, msg=self.message)
return f" {self.message}" else:
return " {msg}".format(msg=self.message)
def to_dict(self): def to_dict(self):
result = {"lineno": self.lineno, "message": self.message} result = dict(lineno=self.lineno, message=self.message)
for key, value in self.kwargs.items(): for (key, value) in self.kwargs.items():
result[key] = value result[key] = value
return result return result
class Result: class Result(object):
"""Generic result object.""" """Generic result object."""
def __init__(self, candidate, errors=None): def __init__(self, candidate, errors=None):
@ -299,10 +306,11 @@ class RuleBase(metaclass=RuleExtendedMeta):
self.errors = errors or [] self.errors = errors or []
def message(self): def message(self):
return "\n".join([f"{self.candidate}:{error}" for error in self.errors]) return "\n".join(["{0}:{1}".format(self.candidate, error) for error in self.errors])
class RulesLoader: class StandardLoader():
def __init__(self, source): def __init__(self, source):
self.rules = [] self.rules = []
@ -318,33 +326,37 @@ class RulesLoader:
try: try:
spec.loader.exec_module(module) spec.loader.exec_module(module)
except (ImportError, NameError) as e: except (ImportError, NameError) as e:
sysexit_with_message(f"Failed to load roles file {filename}: \n {e!s}") sysexit_with_message(
"Failed to load roles file {module}: \n {msg}".format(
msg=str(e), module=filename
)
)
try: try:
for _name, obj in inspect.getmembers(module): for name, obj in inspect.getmembers(module):
if self._is_plugin(obj): if self._is_plugin(obj):
self.rules.append(obj()) self.rules.append(obj())
except TypeError as e: except TypeError as e:
sysexit_with_message(f"Failed to load roles file: \n {e!s}") sysexit_with_message("Failed to load roles file: \n {msg}".format(msg=str(e)))
self.validate() self.validate()
def _is_plugin(self, obj): def _is_plugin(self, obj):
return ( return inspect.isclass(obj) and issubclass(
inspect.isclass(obj) and issubclass(obj, RuleBase) and obj is not RuleBase and not None obj, StandardBase
) ) and obj is not StandardBase and not None
def validate(self): def validate(self):
normalize_rule = list(toolz.remove(lambda x: x.rid == "", self.rules)) normalized_std = (list(toolz.remove(lambda x: x.sid == "", self.rules)))
unique_rule = len(list(toolz.unique(normalize_rule, key=lambda x: x.rid))) unique_std = len(list(toolz.unique(normalized_std, key=lambda x: x.sid)))
all_rules = len(normalize_rule) all_std = len(normalized_std)
if all_rules != unique_rule: if not all_std == unique_std:
sysexit_with_message( sysexit_with_message(
"Found duplicate tags in rules definition. Please use unique tags only." "Detect duplicate ID's in standards definition. Please use unique ID's only."
) )
class SingleRules(RulesLoader, metaclass=Singleton): class SingleStandards(StandardLoader, metaclass=Singleton):
"""Singleton config class.""" """Singleton config class."""
pass pass

View File

@ -22,8 +22,10 @@ def test_critical(capsys, mocker):
_, stderr = capsys.readouterr() _, stderr = capsys.readouterr()
print( print(
f"{colorama.Fore.RED}{colorama.Style.BRIGHT}CRITICAL:{colorama.Style.NORMAL} foo\n" "{}{}CRITICAL:{} foo\n{}".format(
f"{colorama.Style.RESET_ALL}" colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.NORMAL,
colorama.Style.RESET_ALL
)
) )
x, _ = capsys.readouterr() x, _ = capsys.readouterr()
@ -36,8 +38,10 @@ def test_error(capsys, mocker):
_, stderr = capsys.readouterr() _, stderr = capsys.readouterr()
print( print(
f"{colorama.Fore.RED}{colorama.Style.BRIGHT}ERROR:{colorama.Style.NORMAL} foo\n" "{}{}ERROR:{} foo\n{}".format(
f"{colorama.Style.RESET_ALL}" colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.NORMAL,
colorama.Style.RESET_ALL
)
) )
x, _ = capsys.readouterr() x, _ = capsys.readouterr()
@ -50,8 +54,10 @@ def test_warn(capsys, mocker):
stdout, _ = capsys.readouterr() stdout, _ = capsys.readouterr()
print( print(
f"{colorama.Fore.YELLOW}{colorama.Style.BRIGHT}WARNING:{colorama.Style.NORMAL} foo\n" "{}{}WARNING:{} foo\n{}".format(
f"{colorama.Style.RESET_ALL}" colorama.Fore.YELLOW, colorama.Style.BRIGHT, colorama.Style.NORMAL,
colorama.Style.RESET_ALL
)
) )
x, _ = capsys.readouterr() x, _ = capsys.readouterr()
@ -64,8 +70,10 @@ def test_info(capsys, mocker):
stdout, _ = capsys.readouterr() stdout, _ = capsys.readouterr()
print( print(
f"{colorama.Fore.BLUE}{colorama.Style.BRIGHT}INFO:{colorama.Style.NORMAL} foo\n" "{}{}INFO:{} foo\n{}".format(
f"{colorama.Style.RESET_ALL}" colorama.Fore.BLUE, colorama.Style.BRIGHT, colorama.Style.NORMAL,
colorama.Style.RESET_ALL
)
) )
x, _ = capsys.readouterr() x, _ = capsys.readouterr()

View File

@ -1,13 +1,14 @@
"""Global utils collection.""" """Global utils collection."""
from __future__ import print_function
import contextlib import contextlib
import os
import re import re
import sys import sys
from contextlib import suppress from distutils.version import LooseVersion
from functools import lru_cache
import yaml import yaml
from ansible.plugins.loader import module_loader
from ansiblelater import logger from ansiblelater import logger
@ -23,17 +24,32 @@ def count_spaces(c_string):
leading_spaces = 0 leading_spaces = 0
trailing_spaces = 0 trailing_spaces = 0
for _i, e in enumerate(c_string): for i, e in enumerate(c_string):
if not e.isspace(): if not e.isspace():
break break
leading_spaces += 1 leading_spaces += 1
for _i, e in reversed(list(enumerate(c_string))): for i, e in reversed(list(enumerate(c_string))):
if not e.isspace(): if not e.isspace():
break break
trailing_spaces += 1 trailing_spaces += 1
return (leading_spaces, trailing_spaces) return ((leading_spaces, trailing_spaces))
def get_property(prop):
currentdir = os.path.dirname(os.path.realpath(__file__))
parentdir = os.path.dirname(currentdir)
result = re.search(
r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop),
open(os.path.join(parentdir, "__init__.py")).read()
)
return result.group(1)
def standards_latest(standards):
return max([standard.version for standard in standards if standard.version] or ["0.1"],
key=LooseVersion)
def lines_ranges(lines_spec): def lines_ranges(lines_spec):
@ -58,8 +74,10 @@ def safe_load(string):
:returns: dict :returns: dict
""" """
with suppress(yaml.scanner.ScannerError): try:
return yaml.safe_load(string) or {} return yaml.safe_load(string) or {}
except yaml.scanner.ScannerError as e:
print(str(e))
@contextlib.contextmanager @contextlib.contextmanager
@ -78,24 +96,14 @@ def open_file(filename, mode="r"):
def add_dict_branch(tree, vector, value): def add_dict_branch(tree, vector, value):
key = vector[0] key = vector[0]
tree[key] = ( tree[key] = value \
value if len(vector) == 1 else add_dict_branch(tree.get(key, {}), vector[1:], value) if len(vector) == 1 \
) else add_dict_branch(tree[key] if key in tree else {},
vector[1:],
value)
return tree return tree
def has_jinja(value):
"""Return true if a string seems to contain jinja templating."""
re_has_jinja = re.compile(r"{[{%#].*[%#}]}", re.DOTALL)
return bool(isinstance(value, str) and re_has_jinja.search(value))
def has_glob(value):
"""Return true if a string looks like having a glob pattern."""
re_has_glob = re.compile("[][*?]")
return bool(isinstance(value, str) and re_has_glob.search(value))
def sysexit(code=1): def sysexit(code=1):
sys.exit(code) sys.exit(code)
@ -112,23 +120,5 @@ class Singleton(type):
def __call__(cls, *args, **kwargs): def __call__(cls, *args, **kwargs):
if cls not in cls._instances: if cls not in cls._instances:
cls._instances[cls] = super().__call__(*args, **kwargs) cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls] return cls._instances[cls]
@lru_cache
def load_plugin(name):
"""Return loaded ansible plugin/module."""
loaded_module = module_loader.find_plugin_with_context(
name,
ignore_deprecated=True,
check_aliases=True,
)
if not loaded_module.resolved and name.startswith("ansible.builtin."):
# fallback to core behavior of using legacy
loaded_module = module_loader.find_plugin_with_context(
name.replace("ansible.builtin.", "ansible.legacy."),
ignore_deprecated=True,
check_aliases=True,
)
return loaded_module

View File

@ -21,13 +21,15 @@
# THE SOFTWARE. # THE SOFTWARE.
import codecs import codecs
import glob
import imp
import os import os
from contextlib import suppress
import ansible.parsing.mod_args import ansible.parsing.mod_args
import yaml import yaml
from ansible import constants from ansible import constants
from ansible.errors import AnsibleError, AnsibleParserError from ansible.errors import AnsibleError
from ansible.errors import AnsibleParserError
from ansible.parsing.dataloader import DataLoader from ansible.parsing.dataloader import DataLoader
from ansible.parsing.mod_args import ModuleArgsParser from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.constructor import AnsibleConstructor from ansible.parsing.yaml.constructor import AnsibleConstructor
@ -35,7 +37,8 @@ from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.template import Templar from ansible.template import Templar
from yaml.composer import Composer from yaml.composer import Composer
from ansiblelater.exceptions import LaterAnsibleError, LaterError from ansiblelater.exceptions import LaterAnsibleError
from ansiblelater.exceptions import LaterError
try: try:
# Try to import the Ansible 2 module first, it's the future-proof one # Try to import the Ansible 2 module first, it's the future-proof one
@ -65,9 +68,7 @@ def ansible_template(basedir, varname, templatevars, **kwargs):
try: try:
from ansible.plugins.loader import init_plugin_loader, module_loader from ansible.plugins import module_loader
init_plugin_loader()
except ImportError: except ImportError:
from ansible.plugins.loader import module_loader from ansible.plugins.loader import module_loader
@ -128,6 +129,24 @@ BLOCK_NAME_TO_ACTION_TYPE_MAP = {
} }
def load_plugins(directory):
result = []
fh = None
for pluginfile in glob.glob(os.path.join(directory, "[A-Za-z]*.py")):
pluginname = os.path.basename(pluginfile.replace(".py", ""))
try:
fh, filename, desc = imp.find_module(pluginname, [directory])
mod = imp.load_module(pluginname, fh, filename, desc)
obj = getattr(mod, pluginname)()
result.append(obj)
finally:
if fh:
fh.close()
return result
def tokenize(line): def tokenize(line):
tokens = line.lstrip().split(" ") tokens = line.lstrip().split(" ")
if tokens[0] == "-": if tokens[0] == "-":
@ -136,8 +155,8 @@ def tokenize(line):
tokens = tokens[1:] tokens = tokens[1:]
command = tokens[0].replace(":", "") command = tokens[0].replace(":", "")
args = [] args = list()
kwargs = {} kwargs = dict()
nonkvfound = False nonkvfound = False
for arg in tokens[1:]: for arg in tokens[1:]:
if "=" in arg and not nonkvfound: if "=" in arg and not nonkvfound:
@ -152,10 +171,9 @@ def tokenize(line):
def _playbook_items(pb_data): def _playbook_items(pb_data):
if isinstance(pb_data, dict): if isinstance(pb_data, dict):
return pb_data.items() return pb_data.items()
elif not pb_data:
if not pb_data:
return [] return []
else:
return [item for play in pb_data for item in play.items()] return [item for play in pb_data for item in play.items()]
@ -168,7 +186,7 @@ def find_children(playbook, playbook_dir):
try: try:
playbook_ds = parse_yaml_from_file(playbook[0]) playbook_ds = parse_yaml_from_file(playbook[0])
except AnsibleError as e: except AnsibleError as e:
raise SystemExit(str(e)) from e raise SystemExit(str(e))
results = [] results = []
basedir = os.path.dirname(playbook[0]) basedir = os.path.dirname(playbook[0])
items = _playbook_items(playbook_ds) items = _playbook_items(playbook_ds)
@ -176,7 +194,7 @@ def find_children(playbook, playbook_dir):
for child in play_children(basedir, item, playbook[1], playbook_dir): for child in play_children(basedir, item, playbook[1], playbook_dir):
if "$" in child["path"] or "{{" in child["path"]: if "$" in child["path"] or "{{" in child["path"]:
continue continue
valid_tokens = [] valid_tokens = list()
for token in split_args(child["path"]): for token in split_args(child["path"]):
if "=" in token: if "=" in token:
break break
@ -187,18 +205,20 @@ def find_children(playbook, playbook_dir):
def template(basedir, value, variables, fail_on_undefined=False, **kwargs): def template(basedir, value, variables, fail_on_undefined=False, **kwargs):
try:
value = ansible_template(
os.path.abspath(basedir), value, variables,
**dict(kwargs, fail_on_undefined=fail_on_undefined)
)
# Hack to skip the following exception when using to_json filter on a variable. # Hack to skip the following exception when using to_json filter on a variable.
# I guess the filter doesn't like empty vars... # I guess the filter doesn't like empty vars...
with suppress(AnsibleError, ValueError): except (AnsibleError, ValueError):
return ansible_template( # templating failed, so just keep value as is.
os.path.abspath(basedir), pass
value, return value
variables,
**dict(kwargs, fail_on_undefined=fail_on_undefined),
)
def play_children(basedir, item, parent_type): def play_children(basedir, item, parent_type, playbook_dir):
delegate_map = { delegate_map = {
"tasks": _taskshandlers_children, "tasks": _taskshandlers_children,
"pre_tasks": _taskshandlers_children, "pre_tasks": _taskshandlers_children,
@ -214,12 +234,13 @@ def play_children(basedir, item, parent_type):
play_library = os.path.join(os.path.abspath(basedir), "library") play_library = os.path.join(os.path.abspath(basedir), "library")
_load_library_if_exists(play_library) _load_library_if_exists(play_library)
if k in delegate_map and v: if k in delegate_map:
if v:
v = template( v = template(
os.path.abspath(basedir), os.path.abspath(basedir),
v, v,
{"playbook_dir": os.path.abspath(basedir)}, dict(playbook_dir=os.path.abspath(basedir)),
fail_on_undefined=False, fail_on_undefined=False
) )
return delegate_map[k](basedir, k, v, parent_type) return delegate_map[k](basedir, k, v, parent_type)
return [] return []
@ -227,7 +248,7 @@ def play_children(basedir, item, parent_type):
def _include_children(basedir, k, v, parent_type): def _include_children(basedir, k, v, parent_type):
# handle include: filename.yml tags=blah # handle include: filename.yml tags=blah
(command, args, kwargs) = tokenize(f"{k}: {v}") (command, args, kwargs) = tokenize("{0}: {1}".format(k, v))
result = path_dwim(basedir, args[0]) result = path_dwim(basedir, args[0])
if not os.path.exists(result) and not basedir.endswith("tasks"): if not os.path.exists(result) and not basedir.endswith("tasks"):
@ -250,20 +271,18 @@ def _taskshandlers_children(basedir, k, v, parent_type):
results.extend( results.extend(
_roles_children( _roles_children(
basedir, basedir,
k, k, [th["import_role"].get("name")],
[th["import_role"].get("name")],
parent_type, parent_type,
main=th["import_role"].get("tasks_from", "main"), main=th["import_role"].get("tasks_from", "main")
) )
) )
elif "include_role" in th: elif "include_role" in th:
results.extend( results.extend(
_roles_children( _roles_children(
basedir, basedir,
k, k, [th["include_role"].get("name")],
[th["include_role"].get("name")],
parent_type, parent_type,
main=th["include_role"].get("tasks_from", "main"), main=th["include_role"].get("tasks_from", "main")
) )
) )
elif "block" in th: elif "block" in th:
@ -279,11 +298,14 @@ def append_children(taskhandler, basedir, k, parent_type, results):
# when taskshandlers_children is called for playbooks, the # when taskshandlers_children is called for playbooks, the
# actual type of the included tasks is the section containing the # actual type of the included tasks is the section containing the
# include, e.g. tasks, pre_tasks, or handlers. # include, e.g. tasks, pre_tasks, or handlers.
playbook_section = k if parent_type == "playbook" else parent_type if parent_type == "playbook":
playbook_section = k
else:
playbook_section = parent_type
results.append({"path": path_dwim(basedir, taskhandler), "type": playbook_section}) results.append({"path": path_dwim(basedir, taskhandler), "type": playbook_section})
def _roles_children(basedir, k, v, parent_type, main="main"): # noqa def _roles_children(basedir, k, v, parent_type, main="main"):
results = [] results = []
for role in v: for role in v:
if isinstance(role, dict): if isinstance(role, dict):
@ -295,7 +317,10 @@ def _roles_children(basedir, k, v, parent_type, main="main"): # noqa
) )
) )
else: else:
raise SystemExit(f"role dict {role} does not contain a 'role' or 'name' key") raise SystemExit(
"role dict {0} does not contain a 'role' "
"or 'name' key".format(role)
)
else: else:
results.extend(_look_for_role_files(basedir, role, main=main)) results.extend(_look_for_role_files(basedir, role, main=main))
return results return results
@ -315,7 +340,7 @@ def _rolepath(basedir, role):
path_dwim(basedir, role), path_dwim(basedir, role),
# if included from roles/[role]/meta/main.yml # if included from roles/[role]/meta/main.yml
path_dwim(basedir, os.path.join("..", "..", "..", "roles", role)), path_dwim(basedir, os.path.join("..", "..", "..", "roles", role)),
path_dwim(basedir, os.path.join("..", "..", role)), path_dwim(basedir, os.path.join("..", "..", role))
] ]
if constants.DEFAULT_ROLES_PATH: if constants.DEFAULT_ROLES_PATH:
@ -358,33 +383,32 @@ def rolename(filepath):
if idx < 0: if idx < 0:
return "" return ""
role = filepath[idx + 6:] role = filepath[idx + 6:]
return role[: role.find("/")] role = role[:role.find("/")]
return role
def _kv_to_dict(v): def _kv_to_dict(v):
(command, args, kwargs) = tokenize(v) (command, args, kwargs) = tokenize(v)
return dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs) return (dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs))
def normalize_task(task, filename, custom_modules=None): def normalize_task(task, filename, custom_modules=[]):
"""Ensure tasks have an action key and strings are converted to python objects.""" """Ensure tasks have an action key and strings are converted to python objects."""
ansible_action_type = task.get("__ansible_action_type__", "task")
if "__ansible_action_type__" in task:
del (task["__ansible_action_type__"])
def _normalize(task, custom_modules): # temp. extract metadata
if custom_modules is None: ansible_meta = dict()
custom_modules = [] for key in ["__line__", "__file__", "__ansible_action_meta__"]:
default = None
normalized = {} if key == "__ansible_action_meta__":
ansible_parsed_keys = ("action", "local_action", "args", "delegate_to") default = dict()
if is_nested_task(task): ansible_meta[key] = task.pop(key, default)
_extract_ansible_parsed_keys_from_task(normalized, task, ansible_parsed_keys)
# Add dummy action for block/always/rescue statements normalized = dict()
normalized["action"] = {
"__ansible_module__": "block/always/rescue",
"__ansible_module_original__": "block/always/rescue",
"__ansible_arguments__": "block/always/rescue",
}
return normalized
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS) builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
builtin = list(set(builtin + custom_modules)) builtin = list(set(builtin + custom_modules))
@ -394,77 +418,57 @@ def normalize_task(task, filename, custom_modules=None):
try: try:
action, arguments, normalized["delegate_to"] = mod_arg_parser.parse() action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
except AnsibleParserError as e: except AnsibleParserError as e:
raise LaterAnsibleError(e) from e raise LaterAnsibleError("syntax error", e)
# denormalize shell -> command conversion # denormalize shell -> command conversion
if "_uses_shell" in arguments: if "_uses_shell" in arguments:
action = "shell" action = "shell"
del arguments["_uses_shell"] del (arguments["_uses_shell"])
for k, v in list(task.items()): for (k, v) in list(task.items()):
if k in ansible_parsed_keys or k == action: if k in ("action", "local_action", "args", "delegate_to") or k == action:
# we don"t want to re-assign these values, which were # we don"t want to re-assign these values, which were
# determined by the ModuleArgsParser() above # determined by the ModuleArgsParser() above
continue continue
else:
normalized[k] = v normalized[k] = v
# convert builtin fqn calls to short forms because most rules know only normalized["action"] = dict(__ansible_module__=action)
# about short calls
normalized["action"] = {
"__ansible_module__": action.removeprefix("ansible.builtin."),
"__ansible_module_original__": action,
}
if "_raw_params" in arguments: if "_raw_params" in arguments:
normalized["action"]["__ansible_arguments__"] = ( normalized["action"]["__ansible_arguments__"] = arguments["_raw_params"].strip().split()
arguments["_raw_params"].strip().split() del (arguments["_raw_params"])
)
del arguments["_raw_params"]
else: else:
normalized["action"]["__ansible_arguments__"] = [] normalized["action"]["__ansible_arguments__"] = list()
normalized["action"].update(arguments) normalized["action"].update(arguments)
return normalized
# temp. extract metadata
ansible_meta = {}
for key in ["__line__", "__file__", "__ansible_action_meta__"]:
default = None
if key == "__ansible_action_meta__":
default = {}
ansible_meta[key] = task.pop(key, default)
ansible_action_type = task.get("__ansible_action_type__", "task")
if "__ansible_action_type__" in task:
del task["__ansible_action_type__"]
normalized = _normalize(task, custom_modules)
normalized[FILENAME_KEY] = filename normalized[FILENAME_KEY] = filename
normalized["__ansible_action_type__"] = ansible_action_type normalized["__ansible_action_type__"] = ansible_action_type
# add back extracted metadata # add back extracted metadata
for k, v in ansible_meta.items(): for (k, v) in ansible_meta.items():
if v: if v:
normalized[k] = v normalized[k] = v
return normalized return normalized
def action_tasks(yaml, candidate): def action_tasks(yaml, file):
tasks = [] tasks = list()
if candidate.filemeta in ["tasks", "handlers"]: if file["filetype"] in ["tasks", "handlers"]:
tasks = add_action_type(yaml, candidate.filemeta) tasks = add_action_type(yaml, file["filetype"])
else: else:
tasks.extend(extract_from_list(yaml, ["tasks", "handlers", "pre_tasks", "post_tasks"])) tasks.extend(extract_from_list(yaml, ["tasks", "handlers", "pre_tasks", "post_tasks"]))
# Add sub-elements of block/rescue/always to tasks list # Add sub-elements of block/rescue/always to tasks list
tasks.extend(extract_from_list(tasks, ["block", "rescue", "always"])) tasks.extend(extract_from_list(tasks, ["block", "rescue", "always"]))
# Remove block/rescue/always elements from tasks list
block_rescue_always = ("block", "rescue", "always")
tasks[:] = [task for task in tasks if all(k not in task for k in block_rescue_always)]
return tasks allowed = ["include", "include_tasks", "import_playbook", "import_tasks"]
return [task for task in tasks if set(allowed).isdisjoint(task.keys())]
def task_to_str(task): def task_to_str(task):
@ -472,19 +476,16 @@ def task_to_str(task):
if name: if name:
return name return name
action = task.get("action") action = task.get("action")
args = " ".join( args = " ".join([
[ u"{0}={1}".format(k, v)
f"{k}={v}"
for (k, v) in action.items() for (k, v) in action.items()
if k not in ["__ansible_module__", "__ansible_arguments__"] if k not in ["__ansible_module__", "__ansible_arguments__"]
] ] + action.get("__ansible_arguments__"))
+ action.get("__ansible_arguments__") return u"{0} {1}".format(action["__ansible_module__"], args)
)
return "{} {}".format(action["__ansible_module__"], args)
def extract_from_list(blocks, candidates): def extract_from_list(blocks, candidates):
results = [] results = list()
for block in blocks: for block in blocks:
for candidate in candidates: for candidate in candidates:
delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"] delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"]
@ -493,19 +494,18 @@ def extract_from_list(blocks, candidates):
meta_data = dict(block) meta_data = dict(block)
for key in delete_meta_keys: for key in delete_meta_keys:
meta_data.pop(key, None) meta_data.pop(key, None)
results.extend(add_action_type(block[candidate], candidate, meta_data))
actions = add_action_type(block[candidate], candidate, meta_data)
results.extend(actions)
elif block[candidate] is not None: elif block[candidate] is not None:
raise RuntimeError( raise RuntimeError(
f"Key '{candidate}' defined, but bad value: '{block[candidate]!s}'" "Key '{candidate}' defined, but bad value: '{block}'".format(
candidate=candidate, block=str(block[candidate])
)
) )
return results return results
def add_action_type(actions, action_type, action_meta=None): def add_action_type(actions, action_type, action_meta=None):
results = [] results = list()
for action in actions: for action in actions:
action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type] action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
if action_meta: if action_meta:
@ -533,7 +533,7 @@ def parse_yaml_linenumbers(data, filename):
try: try:
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
except yaml.constructor.ConstructorError as e: except yaml.constructor.ConstructorError as e:
raise LaterError("syntax error", e) from e raise LaterError("syntax error", e)
if hasattr(node, "__line__"): if hasattr(node, "__line__"):
mapping[LINE_NUMBER_KEY] = node.__line__ mapping[LINE_NUMBER_KEY] = node.__line__
@ -548,15 +548,11 @@ def parse_yaml_linenumbers(data, filename):
loader.compose_node = compose_node loader.compose_node = compose_node
loader.construct_mapping = construct_mapping loader.construct_mapping = construct_mapping
data = loader.get_single_data() or [] data = loader.get_single_data() or []
except ( except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
yaml.parser.ParserError, raise LaterError("syntax error", e)
yaml.scanner.ScannerError, except (yaml.composer.ComposerError) as e:
yaml.constructor.ConstructorError, e.problem = "{} {}".format(e.context, e.problem)
) as e: raise LaterError("syntax error", e)
raise LaterError("syntax error", e) from e
except yaml.composer.ComposerError as e:
e.problem = f"{e.context} {e.problem}"
raise LaterError("syntax error", e) from e
return data return data
@ -581,34 +577,14 @@ def normalized_yaml(file, options):
for line in removes: for line in removes:
lines.remove(line) lines.remove(line)
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
raise LaterError("syntax error", e) from e raise LaterError("syntax error", e)
return lines return lines
def is_nested_task(task):
"""Check if task includes block/always/rescue."""
# Cannot really trust the input
if isinstance(task, str):
return False
return any(task.get(key) for key in ["block", "rescue", "always"])
def _extract_ansible_parsed_keys_from_task(result, task, keys):
"""Return a dict with existing key in task."""
for k, v in list(task.items()):
if k in keys:
# we don't want to re-assign these values, which were
# determined by the ModuleArgsParser() above
continue
result[k] = v
return result
class UnsafeTag: class UnsafeTag:
"""Handle custom yaml unsafe tag.""" """Handle custom yaml unsafe tag."""
yaml_tag = "!unsafe" yaml_tag = u"!unsafe"
def __init__(self, value): def __init__(self, value):
self.unsafe = value self.unsafe = value
@ -621,7 +597,7 @@ class UnsafeTag:
class VaultTag: class VaultTag:
"""Handle custom yaml vault tag.""" """Handle custom yaml vault tag."""
yaml_tag = "!vault" yaml_tag = u"!vault"
def __init__(self, value): def __init__(self, value):
self.unsafe = value self.unsafe = value

View File

@ -1,4 +1,4 @@
FROM python:3.12-alpine@sha256:38e179a0f0436c97ecc76bcd378d7293ab3ee79e4b8c440fdc7113670cb6e204 FROM python:3.10-alpine@sha256:ba6cfcca463537621aac63ffda4f93cd73e1f3dea59a83287603fbebd02444e4
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>" LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>" LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"

26
docker/Dockerfile.arm Normal file
View File

@ -0,0 +1,26 @@
FROM arm32v7/python:3.10-alpine@sha256:8042f9a87f0b40b9291884dbd24cdd0bfc353b07f1e779e780ec125787e2d22f
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.title="ansible-later"
LABEL org.opencontainers.image.url="https://ansible-later.geekdocs.de/"
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-later"
LABEL org.opencontainers.image.documentation="https://ansible-later.geekdocs.de/"
ENV PY_COLORS=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
ADD dist/ansible_later-*.whl /
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev musl-dev python3-dev cargo && \
apk --update add git && \
pip install --upgrade --no-cache-dir pip && \
pip install --no-cache-dir $(find / -name "ansible_later-*.whl")[ansible] && \
apk del .build-deps && \
rm -f ansible_later-*.whl && \
rm -rf /var/cache/apk/* && \
rm -rf /root/.cache/
USER root
CMD []
ENTRYPOINT ["/usr/local/bin/ansible-later"]

26
docker/Dockerfile.arm64 Normal file
View File

@ -0,0 +1,26 @@
FROM arm64v8/python:3.10-alpine@sha256:5e8298ed17e5ee5dbc54175603463e67a6d539424f3f522bed48982a8ae1796f
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.title="ansible-later"
LABEL org.opencontainers.image.url="https://ansible-later.geekdocs.de/"
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-later"
LABEL org.opencontainers.image.documentation="https://ansible-later.geekdocs.de/"
ENV PY_COLORS=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
ADD dist/ansible_later-*.whl /
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev musl-dev python3-dev cargo && \
apk --update add git && \
pip install --upgrade --no-cache-dir pip && \
pip install --no-cache-dir $(find / -name "ansible_later-*.whl")[ansible] && \
apk del .build-deps && \
rm -f ansible_later-*.whl && \
rm -rf /var/cache/apk/* && \
rm -rf /root/.cache/
USER root
CMD []
ENTRYPOINT ["/usr/local/bin/ansible-later"]

24
docker/manifest-quay.tmpl Normal file
View File

@ -0,0 +1,24 @@
image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
platform:
architecture: amd64
os: linux
- image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
platform:
architecture: arm
os: linux
variant: v7

24
docker/manifest.tmpl Normal file
View File

@ -0,0 +1,24 @@
image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
platform:
architecture: amd64
os: linux
- image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
platform:
architecture: arm
os: linux
variant: v7

View File

@ -2,12 +2,13 @@
title: Documentation title: Documentation
--- ---
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-later/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-later) [![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-later?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-later)
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later) [![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later)
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later) [![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later)
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/) [![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/) [![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/) [![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![Codecov](https://img.shields.io/codecov/c/github/thegeeklab/ansible-later)](https://codecov.io/gh/thegeeklab/ansible-later)
[![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors) [![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors)
[![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later) [![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later)
[![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE) [![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE)

View File

@ -1,17 +1,18 @@
--- ---
title: Write a rule title: Minimal standard checks
--- ---
A typical rule check will look like: A typical standards check will look like:
<!-- prettier-ignore-start --> <!-- prettier-ignore-start -->
<!-- spellchecker-disable --> <!-- spellchecker-disable -->
{{< highlight Python "linenos=table" >}} {{< highlight Python "linenos=table" >}}
class CheckBecomeUser(RuleBase): class CheckBecomeUser(StandardBase):
rid = "ANS115" sid = "ANSIBLE0015"
description = "Become should be combined with become_user" description = "Become should be combined with become_user"
helptext = "the task has `become` enabled but `become_user` is missing" helptext = "the task has `become` enabled but `become_user` is missing"
version = "0.1"
types = ["playbook", "task", "handler"] types = ["playbook", "task", "handler"]
def check(self, candidate, settings): def check(self, candidate, settings):

View File

@ -13,4 +13,4 @@ Changes can be made in a YAML configuration file or via CLI options, which are p
Please note that YAML attributes are overwritten while YAML lists are merged in any configuration files. Please note that YAML attributes are overwritten while YAML lists are merged in any configuration files.
To simplify the linting of individual files, e.g. for debugging purposes, ansible-later ignores the `exclude_files` and `ignore_dotfiles` options when files are passed to the CLI. To simplify single file linting, e.g. for debugging purposes, ansible-later ignores the `exclude_files` and `ignore_dotfiles` options when only one file is passed to the CLI.

View File

@ -8,27 +8,28 @@ You can get all available CLI options by running `ansible-later --help`:
<!-- spellchecker-disable --> <!-- spellchecker-disable -->
{{< highlight Shell "linenos=table" >}} {{< highlight Shell "linenos=table" >}}
$ ansible-later --help $ ansible-later --help
usage: ansible-later [-h] [-c CONFIG] [-r DIR] [-B] [-i TAGS] [-x TAGS] [-v] [-q] [-V] [rules.files ...] usage: ansible-later [-h] [-c CONFIG_FILE] [-r RULES.STANDARDS]
[-s RULES.FILTER] [-v] [-q] [--version]
[rules.files [rules.files ...]]
Validate Ansible files against best practice guideline Validate Ansible files against best practice guideline
positional arguments: positional arguments:
rules.files rules.files
options: optional arguments:
-h, --help show this help message and exit -h, --help show this help message and exit
-c CONFIG, --config CONFIG -c CONFIG_FILE, --config CONFIG_FILE
path to configuration file location of configuration file
-r DIR, --rules-dir DIR -r RULES.STANDARDS, --rules RULES.STANDARDS
directory of rules location of standards rules
-B, --no-builtin disables built-in rules -s RULES.FILTER, --standards RULES.FILTER
-i TAGS, --include-rules TAGS limit standards to given ID's
limit rules to given id/tags -x RULES.EXCLUDE_FILTER, --exclude-standards RULES.EXCLUDE_FILTER
-x TAGS, --exclude-rules TAGS exclude standards by given ID's
exclude rules by given it/tags
-v increase log level -v increase log level
-q decrease log level -q decrease log level
-V, --version show program's version number and exit --version show program's version number and exit
{{< /highlight >}} {{< /highlight >}}
<!-- spellchecker-enable --> <!-- spellchecker-enable -->
<!-- prettier-ignore-end --> <!-- prettier-ignore-end -->

View File

@ -11,37 +11,37 @@ The default configuration is used if no other value is specified. Each option ca
--- ---
ansible: ansible:
# Add the name of used custom Ansible modules. Otherwise ansible-later # Add the name of used custom Ansible modules. Otherwise ansible-later
# can't detect unknown modules and will throw an error. # can't detect unknown modules and will through an error.
# Modules which are bundled with the role and placed in a './library' # Modules which are bundled with the role and placed in a './library'
# directory will be auto-detected and don't need to be added to this list. # directory will be auto-detected and don't need to be added to this list.
custom_modules: [] custom_modules: []
# Settings for variable formatting rule (ANS104) # Settings for variable formatting rule (ANSIBLE0004)
double-braces: double-braces:
max-spaces-inside: 1 max-spaces-inside: 1
min-spaces-inside: 1 min-spaces-inside: 1
# List of allowed literal bools (ANS114) # List of allowed literal bools (ANSIBLE0014)
literal-bools: literal-bools:
- "True" - "True"
- "False" - "False"
- "yes" - "yes"
- "no" - "no"
# List of modules that don't need to be named (ANS106). # List of modules that don't need to be named (ANSIBLE0006).
# You must specify each individual module name, globs or wildcards do not work! # You must specify each individual module name, globs or wildcards do not work!
named-task: named-task:
exclude: exclude:
- "meta" - "meta"
- "debug" - "debug"
- "block/always/rescue" - "block"
- "include_role" - "include_role"
- "include_tasks" - "include_tasks"
- "include_vars" - "include_vars"
- "import_role" - "import_role"
- "import_tasks" - "import_tasks"
# List of modules that are allowed to use the key=value format instead of the native YAML format (YML108). # List of modules that are allowed to use the key=value format instead of the native YAML format (LINT0008).
# You must specify each individual module name, globs or wildcards do not work! # You must specify each individual module name, globs or wildcards do not work!
native-yaml: native-yaml:
exclude: [] exclude: []
@ -58,8 +58,8 @@ logging:
# Global settings for all defined rules # Global settings for all defined rules
rules: rules:
# Disable built-in rules if required # Disable build-in rules if required
builtin: True buildin: True
# List of files to exclude # List of files to exclude
exclude_files: [] exclude_files: []
@ -75,17 +75,21 @@ rules:
exclude_filter: [] exclude_filter: []
# List of rule ID's that should be displayed as a warning instead of an error. By default, # List of rule ID's that should be displayed as a warning instead of an error. By default,
# no rules are marked as warnings. This list allows to degrade errors to warnings for each rule. # only rules whose version is higher than the current default version are marked as warnings.
# This list allows to degrade errors to warnings for each rule.
warning_filter: warning_filter:
- "ANS128" - "ANSIBLE9999"
- "ANS999"
# All dotfiles (including hidden folders) are excluded by default. # All dotfiles (including hidden folders) are excluded by default.
# You can disable this setting and handle dotfiles by yourself with `exclude_files`. # You can disable this setting and handle dotfiles by yourself with `exclude_files`.
ignore_dotfiles: True ignore_dotfiles: True
# List of directories to load rules from (defaults to built-in) # List of directories to load standard rules from (defaults to build-in)
dir: [] standards: []
# Standard version to use. Standard version set in a roles meta file
# or playbook will takes precedence.
version:
# Block to control included yamllint rules. # Block to control included yamllint rules.
# See https://yamllint.readthedocs.io/en/stable/rules.html # See https://yamllint.readthedocs.io/en/stable/rules.html
@ -95,8 +99,6 @@ yamllint:
max-spaces-before: 0 max-spaces-before: 0
document-start: document-start:
present: True present: True
document-end:
present: True
empty-lines: empty-lines:
max: 1 max: 1
max-end: 1 max-end: 1

View File

@ -1,21 +0,0 @@
---
title: Pre-Commit setup
---
To use `ansible-later` with the [pre-commit](https://pre-commit.com/) framework, add the following to the `.pre-commit-config.yaml` file in your local repository.
<!-- prettier-ignore-start -->
<!-- markdownlint-disable -->
<!-- spellchecker-disable -->
{{< highlight yaml "linenos=table" >}}
- repo: https://github.com/thegeeklab/ansible-later
# change ref to the latest release from https://github.com/thegeeklab/ansible-later/releases
rev: v3.0.2
hooks:
- id: ansible-later
{{< /highlight >}}
<!-- spellchecker-enable -->
<!-- markdownlint-restore -->
<!-- prettier-ignore-end -->

View File

@ -2,47 +2,43 @@
title: Included rules title: Included rules
--- ---
Reviews are useless without some rules to check against. `ansible-later` comes with a set of built-in checks, which are explained in the following table. Reviews are useless without some rules or standards to check against. ansible-later comes with a set of built-in checks, which are explained in the following table.
| Rule | ID | Description | Parameter | | Rule | ID | Description | Parameter |
| ----------------------------- | ------ | ----------------------------------------------------------------- | -------------------------------------------------------------------------- | | ----------------------------- | ----------- | ----------------------------------------------------------------- | ---------------------------------------------------------------------- |
| CheckYamlEmptyLines | YML101 | YAML should not contain unnecessarily empty lines. | {max: 1, max-start: 0, max-end: 1} | | CheckYamlEmptyLines | LINT0001 | YAML should not contain unnecessarily empty lines. | {max: 1, max-start: 0, max-end: 1} |
| CheckYamlIndent | YML102 | YAML should be correctly indented. | {spaces: 2, check-multi-line-strings: false, indent-sequences: true} | | CheckYamlIndent | LINT0002 | YAML should be correctly indented. | {spaces: 2, check-multi-line-strings: false, indent-sequences: true} |
| CheckYamlHyphens | YML103 | YAML should use consistent number of spaces after hyphens (-). | {max-spaces-after: 1} | | CheckYamlHyphens | LINT0003 | YAML should use consistent number of spaces after hyphens (-). | {max-spaces-after: 1} |
| CheckYamlDocumentStart | YML104 | YAML should contain document start marker. | {document-start: {present: true}} | | CheckYamlDocumentStart | LINT0004 | YAML should contain document start marker. | {document-start: {present: true}} |
| CheckYamlColons | YML105 | YAML should use consistent number of spaces around colons. | {colons: {max-spaces-before: 0, max-spaces-after: 1}} | | CheckYamlColons | LINT0005 | YAML should use consistent number of spaces around colons. | {colons: {max-spaces-before: 0, max-spaces-after: 1}} |
| CheckYamlFile | YML106 | Roles file should be in YAML format. | | | CheckYamlFile | LINT0006 | Roles file should be in YAML format. | |
| CheckYamlHasContent | YML107 | Files should contain useful content. | | | CheckYamlHasContent | LINT0007 | Files should contain useful content. | |
| CheckNativeYaml | YML108 | Use YAML format for tasks and handlers rather than key=value. | {native-yaml: {exclude: []}} | | CheckNativeYaml | LINT0008 | Use YAML format for tasks and handlers rather than key=value. | {native-yaml: {exclude: []}} |
| CheckYamlDocumentEnd | YML109 | YAML should contain document end marker. | {document-end: {present: true}} | | CheckYamlDocumentEnd | LINT0009 | YAML should contain document end marker. | {document-end: {present: true}} |
| CheckYamlOctalValues | YML110 | YAML should not use forbidden implicit or explicit octal value. | {octal-values: {forbid-implicit-octal: true, forbid-explicit-octal: true}} | | CheckTaskSeparation | ANSIBLE0001 | Single tasks should be separated by an empty line. | |
| CheckTaskSeparation | ANS101 | Single tasks should be separated by an empty line. | | | CheckMetaMain | ANSIBLE0002 | Meta file should contain a basic subset of parameters. | author, description, min_ansible_version, platforms, dependencies |
| CheckMetaMain | ANS102 | Meta file should contain a basic subset of parameters. | author, description, min_ansible_version, platforms, dependencies | | CheckUniqueNamedTask | ANSIBLE0003 | Tasks and handlers must be uniquely named within a file. | |
| CheckUniqueNamedTask | ANS103 | Tasks and handlers must be uniquely named within a file. | | | CheckBraces | ANSIBLE0004 | YAML should use consistent number of spaces around variables. | {double-braces: max-spaces-inside: 1, min-spaces-inside: 1} |
| CheckBraces | ANS104 | YAML should use consistent number of spaces around variables. | {double-braces: max-spaces-inside: 1, min-spaces-inside: 1} | | CheckScmInSrc | ANSIBLE0005 | Use SCM key rather than `src: scm+url` in requirements file. | |
| CheckScmInSrc | ANS105 | Use SCM key rather than `src: scm+url` in requirements file. | | | CheckNamedTask | ANSIBLE0006 | Tasks and handlers must be named. | {named-task: {exclude: [meta, debug, block, include\_\*, import\_\*]}} |
| CheckNamedTask | ANS106 | Tasks and handlers must be named. | {named-task: {exclude: [meta, debug, block, include\_\*, import\_\*]}} | | CheckNameFormat | ANSIBLE0007 | Name of tasks and handlers must be formatted. | formats: first letter capital |
| CheckNameFormat | ANS107 | Name of tasks and handlers must be formatted. | formats: first letter capital | | CheckCommandInsteadofModule | ANSIBLE0008 | Commands should not be used in place of modules. | |
| CheckCommandInsteadofModule | ANS108 | Commands should not be used in place of modules. | | | CheckInstallUseLatest | ANSIBLE0009 | Package managers should not install with state=latest. | |
| CheckInstallUseLatest | ANS109 | Package managers should not install with state=latest. | | | CheckShellInsteadCommand | ANSIBLE0010 | Use Shell only when piping, redirecting or chaining commands. | |
| CheckShellInsteadCommand | ANS110 | Use Shell only when piping, redirecting or chaining commands. | | | CheckCommandHasChanges | ANSIBLE0011 | Commands should be idempotent and only used with some checks. | |
| CheckCommandHasChanges | ANS111 | Commands should be idempotent and only used with some checks. | | | CheckCompareToEmptyString | ANSIBLE0012 | Don't compare to "" - use `when: var` or `when: not var`. | |
| CheckCompareToEmptyString | ANS112 | Don't compare to "" - use `when: var` or `when: not var`. | | | CheckCompareToLiteralBool | ANSIBLE0013 | Don't compare to True/False - use `when: var` or `when: not var`. | |
| CheckCompareToLiteralBool | ANS113 | Don't compare to True/False - use `when: var` or `when: not var`. | | | CheckLiteralBoolFormat | ANSIBLE0014 | Literal bools should be consistent. | {literal-bools: [True, False, yes, no]} |
| CheckLiteralBoolFormat | ANS114 | Literal bools should be consistent. | {literal-bools: [True, False, yes, no]} | | CheckBecomeUser | ANSIBLE0015 | Become should be combined with become_user. | |
| CheckBecomeUser | ANS115 | Become should be combined with become_user. | | | CheckFilterSeparation | ANSIBLE0016 | Jinja2 filters should be separated with spaces. | |
| CheckFilterSeparation | ANS116 | Jinja2 filters should be separated with spaces. | | | CheckCommandInsteadOfArgument | ANSIBLE0017 | Commands should not be used in place of module arguments. | |
| CheckCommandInsteadOfArgument | ANS117 | Commands should not be used in place of module arguments. | | | CheckFilePermissionMissing | ANSIBLE0018 | File permissions unset or incorrect. | |
| CheckFilePermissionMissing | ANS118 | File permissions unset or incorrect. | | | CheckFilePermissionOctal | ANSIBLE0019 | Octal file permissions must contain leading zero or be a string. | |
| CheckFilePermissionOctal | ANS119 | Octal file permissions must contain leading zero or be a string. | | | CheckGitHasVersion | ANSIBLE0020 | Git checkouts should use explicit version. | |
| CheckGitHasVersion | ANS120 | Git checkouts should use explicit version. | | | CheckMetaChangeFromDefault | ANSIBLE0021 | Roles meta/main.yml default values should be changed. | |
| CheckMetaChangeFromDefault | ANS121 | Roles meta/main.yml default values should be changed. | | | CheckWhenFormat | ANSIBLE0022 | Don't use Jinja2 in `when`. | |
| CheckWhenFormat | ANS122 | Don't use Jinja2 in `when`. | | | CheckNestedJinja | ANSIBLE0023 | Don't use nested Jinja2 pattern. | |
| CheckNestedJinja | ANS123 | Don't use nested Jinja2 pattern. | | | CheckLocalAction | ANSIBLE0024 | Don't use local_action. | |
| CheckLocalAction | ANS124 | Don't use local_action. | | | CheckRelativeRolePaths | ANSIBLE0025 | Don't use a relative path in a role. | |
| CheckRelativeRolePaths | ANS125 | Don't use a relative path in a role. | | | CheckChangedInWhen | ANSIBLE0026 | Use handlers instead of `when: changed`. | |
| CheckChangedInWhen | ANS126 | Use handlers instead of `when: changed`. | | | CheckDeprecated | ANSIBLE9999 | Deprecated features of `ansible-later` should not be used. | |
| CheckChangedInWhen | ANS127 | Deprecated bare variables in loops must not be used. | |
| CheckFQCNBuiltin | ANS128 | Module actions should use full qualified collection names. | |
| CheckFQCNBuiltin | ANS129 | Check optimized playbook/tasks key order. | |
| CheckDeprecated | ANS999 | Deprecated features of `ansible-later` should not be used. | |

View File

@ -23,5 +23,5 @@ main:
sub: sub:
- name: Candidates - name: Candidates
ref: "/build_rules/candidates" ref: "/build_rules/candidates"
- name: Rules - name: Standards checks
ref: "/build_rules/rule" ref: "/build_rules/standards_check"

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 36 KiB

1551
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,40 +10,64 @@ classifiers = [
"Natural Language :: English", "Natural Language :: English",
"Operating System :: POSIX", "Operating System :: POSIX",
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Utilities", "Topic :: Utilities",
"Topic :: Software Development", "Topic :: Software Development",
] ]
description = "Reviews ansible playbooks, roles and inventories and suggests improvements." description = "Reviews ansible playbooks, roles and inventories and suggests improvements."
documentation = "https://ansible-later.geekdocs.de/" documentation = "https://ansible-later.geekdocs.de/"
homepage = "https://ansible-later.geekdocs.de/" homepage = "https://ansible-later.geekdocs.de/"
include = ["LICENSE"] include = [
"LICENSE",
]
keywords = ["ansible", "code", "review"] keywords = ["ansible", "code", "review"]
license = "MIT" license = "MIT"
name = "ansible-later" name = "ansible-later"
packages = [{ include = "ansiblelater" }] packages = [
{include = "ansiblelater"},
]
readme = "README.md" readme = "README.md"
repository = "https://github.com/thegeeklab/ansible-later/" repository = "https://github.com/thegeeklab/ansible-later/"
version = "0.0.0" version = "0.0.0"
[tool.poetry.dependencies] [tool.poetry.dependencies]
PyYAML = "6.0.2" PyYAML = "6.0"
ansible-core = { version = "2.14.17", optional = true } ansible = {version = "6.1.0", optional = true}
ansible = { version = "7.7.0", optional = true } ansible-core = {version = "2.13.2", optional = true}
anyconfig = "0.14.0" anyconfig = "0.13.0"
appdirs = "1.4.4" appdirs = "1.4.4"
colorama = "0.4.6" colorama = "0.4.5"
jsonschema = "4.23.0" flake8 = "4.0.1"
jsonschema = "4.7.2"
nested-lookup = "0.2.25" nested-lookup = "0.2.25"
pathspec = "0.12.1" pathspec = "0.9.0"
python = "^3.9.0" python = "^3.8.0"
python-json-logger = "2.0.7" python-json-logger = "2.0.4"
toolz = "1.0.0" toolz = "0.12.0"
unidiff = "0.7.5" unidiff = "0.7.4"
yamllint = "1.35.1" yamllint = "1.27.1"
[tool.poetry.dev-dependencies]
bandit = "1.7.4"
flake8-blind-except = "0.2.1"
flake8-builtins = "1.5.3"
flake8-docstrings = "1.6.0"
flake8-eradicate = "1.2.1"
flake8-isort = "4.1.1"
flake8-logging-format = "0.6.0"
flake8-pep3101 = "1.3.0"
flake8-polyfill = "1.0.2"
flake8-quotes = "3.3.1"
pep8-naming = "0.13.1"
pydocstyle = "6.1.1"
pytest = "7.1.2"
pytest-cov = "3.0.0"
pytest-mock = "3.8.2"
tomli = "2.0.1"
yapf = "0.32.0"
toml = "0.10.2"
[tool.poetry.extras] [tool.poetry.extras]
ansible = ["ansible"] ansible = ["ansible"]
@ -52,23 +76,23 @@ ansible-core = ["ansible-core"]
[tool.poetry.scripts] [tool.poetry.scripts]
ansible-later = "ansiblelater.__main__:main" ansible-later = "ansiblelater.__main__:main"
[tool.poetry.group.dev.dependencies]
ruff = "0.7.2"
pytest = "8.3.3"
pytest-mock = "3.14.0"
pytest-cov = "6.0.0"
toml = "0.10.2"
[tool.poetry-dynamic-versioning] [tool.poetry-dynamic-versioning]
enable = true enable = true
style = "semver" style = "semver"
vcs = "git" vcs = "git"
[tool.isort]
default_section = "THIRDPARTY"
force_single_line = true
line_length = 99
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
skip_glob = ["**/.env*", "**/env/*", "**/.venv/*", "**/docs/*"]
[tool.pytest.ini_options] [tool.pytest.ini_options]
addopts = "ansiblelater --cov=ansiblelater --cov-report=xml:coverage.xml --cov-report=term --no-cov-on-fail" addopts = "ansiblelater --cov=ansiblelater --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
filterwarnings = [ filterwarnings = [
"ignore::FutureWarning", "ignore::FutureWarning",
"ignore::DeprecationWarning", "ignore:.*collections.*:DeprecationWarning",
"ignore:.*pep8.*:FutureWarning", "ignore:.*pep8.*:FutureWarning",
] ]
@ -76,74 +100,5 @@ filterwarnings = [
omit = ["**/test/*"] omit = ["**/test/*"]
[build-system] [build-system]
build-backend = "poetry_dynamic_versioning.backend" build-backend = "poetry.core.masonry.api"
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"] requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
[tool.ruff]
exclude = [
".git",
"__pycache__",
"build",
"dist",
"test",
"*.pyc",
"*.egg-info",
".cache",
".eggs",
"env*",
]
line-length = 99
indent-width = 4
[tool.ruff.lint]
# Explanation of errors
#
# D100: Missing docstring in public module
# D101: Missing docstring in public class
# D102: Missing docstring in public method
# D103: Missing docstring in public function
# D105: Missing docstring in magic method
# D107: Missing docstring in __init__
# D202: No blank lines allowed after function docstring
# D203: One blank line required before class docstring
# D212: Multi-line docstring summary should start at the first line
ignore = [
"D100",
"D101",
"D102",
"D103",
"D105",
"D107",
"D202",
"D203",
"D212",
"UP038",
"RUF012",
]
select = [
"D",
"E",
"F",
"Q",
"W",
"I",
"S",
"BLE",
"N",
"UP",
"B",
"A",
"C4",
"T20",
"SIM",
"RET",
"ARG",
"ERA",
"RUF",
]
[tool.ruff.format]
quote-style = "double"
indent-style = "space"
line-ending = "lf"

View File

@ -6,12 +6,6 @@
"description": "Ansible base dependencies", "description": "Ansible base dependencies",
"matchPackageNames": ["ansible", "ansible-core"], "matchPackageNames": ["ansible", "ansible-core"],
"separateMinorPatch": true "separateMinorPatch": true
},
{
"matchManagers": ["woodpecker"],
"matchFileNames": [".woodpecker/test.yml"],
"matchPackageNames": ["docker.io/library/python"],
"enabled": false
} }
] ]
} }

24
setup.cfg Normal file
View File

@ -0,0 +1,24 @@
[flake8]
# Explanation of errors
#
# D100: Missing docstring in public module
# D101: Missing docstring in public class
# D102: Missing docstring in public method
# D103: Missing docstring in public function
# D105: Missing docstring in magic method
# D107: Missing docstring in __init__
# D202: No blank lines allowed after function docstring
# W503:Line break occurred before a binary operator
ignore = D100, D101, D102, D103, D107, D202, W503
max-line-length = 99
inline-quotes = double
exclude = .git, __pycache__, build, dist, test, *.pyc, *.egg-info, .cache, .eggs, env*
[yapf]
based_on_style = google
column_limit = 99
dedent_closing_brackets = true
coalesce_brackets = true
split_before_logical_operator = true
indent_dictionary_value = true
allow_split_before_dict_value = false