Compare commits

..

No commits in common. "main" and "v2.0.1" have entirely different histories.
main ... v2.0.1

90 changed files with 3220 additions and 2427 deletions

23
.chglog/CHANGELOG.tpl.md Executable file
View File

@ -0,0 +1,23 @@
# Changelog
{{ range .Versions -}}
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]({{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}){{ else }}{{ .Tag.Name }}{{ end }} ({{ datetime "2006-01-02" .Tag.Date }})
{{ range .CommitGroups -}}
### {{ .Title }}
{{ range .Commits -}}
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ (regexReplaceAll "(.*)/issues/(.*)" (regexReplaceAll "(Co-\\w*-by.*)" .Subject "") "${1}/pull/${2}") | trim }}
{{ end }}
{{- end -}}
{{- if .NoteGroups -}}
{{ range .NoteGroups -}}
### {{ .Title }}
{{ range .Notes }}
{{ .Body }}
{{ end }}
{{ end -}}
{{ end -}}
{{ end -}}

25
.chglog/config.yml Executable file
View File

@ -0,0 +1,25 @@
style: github
template: CHANGELOG.tpl.md
info:
title: CHANGELOG
repository_url: https://github.com/thegeeklab/ansible-later
options:
commit_groups:
title_maps:
feat: Features
fix: Bug Fixes
perf: Performance Improvements
refactor: Code Refactoring
chore: Others
test: Testing
ci: CI Pipeline
docs: Documentation
header:
pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$"
pattern_maps:
- Type
- Scope
- Subject
notes:
keywords:
- BREAKING CHANGE

View File

@ -18,9 +18,8 @@ HostVars
Rolesfile
Makefile
Jinja2
ANS([0-9]{3})
YML([0-9]{3})
ANSIBLE([0-9]{4})
LINT([0-9]{4})
SCM
bools
Check[A-Z].+
(P|p)re-(C|c)ommit

508
.drone.jsonnet Normal file
View File

@ -0,0 +1,508 @@
local PythonVersion(pyversion='3.8') = {
name: 'python' + std.strReplace(pyversion, '.', '') + '-pytest',
image: 'python:' + pyversion,
environment: {
PY_COLORS: 1,
},
commands: [
'pip install poetry poetry-dynamic-versioning -qq',
'poetry config experimental.new-installer false',
'poetry install -E ansible-core',
'poetry run pytest',
'poetry version',
'poetry run ansible-later --help',
],
depends_on: [
'fetch',
],
};
local PipelineLint = {
kind: 'pipeline',
name: 'lint',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'yapf',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
},
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry config experimental.new-installer false',
'poetry install',
'poetry run yapf -dr ./ansiblelater',
],
},
{
name: 'flake8',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
},
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry install -E ansible-core',
'poetry run flake8 ./ansiblelater',
],
},
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineTest = {
kind: 'pipeline',
name: 'test',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'fetch',
image: 'python:3.10',
commands: [
'git fetch -tq',
],
},
PythonVersion(pyversion='3.8'),
PythonVersion(pyversion='3.9'),
PythonVersion(pyversion='3.10'),
{
name: 'codecov',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
CODECOV_TOKEN: { from_secret: 'codecov_token' },
},
commands: [
'pip install codecov -qq',
'codecov --required -X gcov',
],
depends_on: [
'python38-pytest',
'python39-pytest',
'python310-pytest',
],
},
],
depends_on: [
'lint',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineSecurity = {
kind: 'pipeline',
name: 'security',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'bandit',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
},
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry install -E ansible-core',
'poetry run bandit -r ./ansiblelater -x ./ansiblelater/test',
],
},
],
depends_on: [
'test',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineBuildPackage = {
kind: 'pipeline',
name: 'build-package',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'build',
image: 'python:3.10',
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry build',
],
},
{
name: 'checksum',
image: 'alpine',
commands: [
'cd dist/ && sha256sum * > ../sha256sum.txt',
],
},
{
name: 'changelog-generate',
image: 'thegeeklab/git-chglog',
commands: [
'git fetch -tq',
'git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}',
],
},
{
name: 'changelog-format',
image: 'thegeeklab/alpine-tools',
commands: [
'prettier CHANGELOG.md',
'prettier -w CHANGELOG.md',
],
},
{
name: 'publish-github',
image: 'plugins/github-release',
settings: {
overwrite: true,
api_key: { from_secret: 'github_token' },
files: ['dist/*', 'sha256sum.txt'],
title: '${DRONE_TAG}',
note: 'CHANGELOG.md',
},
when: {
ref: ['refs/tags/**'],
},
},
{
name: 'publish-pypi',
image: 'python:3.10',
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry publish -n',
],
environment: {
POETRY_HTTP_BASIC_PYPI_USERNAME: { from_secret: 'pypi_username' },
POETRY_HTTP_BASIC_PYPI_PASSWORD: { from_secret: 'pypi_password' },
},
when: {
ref: ['refs/tags/**'],
},
},
],
depends_on: [
'security',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineBuildContainer(arch='amd64') = {
local build = if arch == 'arm' then [{
name: 'build',
image: 'python:3.10-alpine',
commands: [
'apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo',
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry build',
],
environment: {
CARGO_NET_GIT_FETCH_WITH_CLI: true,
},
}] else [{
name: 'build',
image: 'python:3.10',
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry build',
],
}],
kind: 'pipeline',
name: 'build-container-' + arch,
platform: {
os: 'linux',
arch: arch,
},
steps: build + [
{
name: 'dryrun',
image: 'thegeeklab/drone-docker:19',
settings: {
dry_run: true,
dockerfile: 'docker/Dockerfile.' + arch,
repo: 'thegeeklab/${DRONE_REPO_NAME}',
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
},
depends_on: ['build'],
when: {
ref: ['refs/pull/**'],
},
},
{
name: 'publish-dockerhub',
image: 'thegeeklab/drone-docker:19',
settings: {
auto_tag: true,
auto_tag_suffix: arch,
dockerfile: 'docker/Dockerfile.' + arch,
repo: 'thegeeklab/${DRONE_REPO_NAME}',
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
},
when: {
ref: ['refs/heads/main', 'refs/tags/**'],
},
depends_on: ['dryrun'],
},
{
name: 'publish-quay',
image: 'thegeeklab/drone-docker:19',
settings: {
auto_tag: true,
auto_tag_suffix: arch,
dockerfile: 'docker/Dockerfile.' + arch,
registry: 'quay.io',
repo: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
username: { from_secret: 'quay_username' },
password: { from_secret: 'quay_password' },
},
when: {
ref: ['refs/heads/main', 'refs/tags/**'],
},
depends_on: ['dryrun'],
},
],
depends_on: [
'security',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineDocs = {
kind: 'pipeline',
name: 'docs',
platform: {
os: 'linux',
arch: 'amd64',
},
concurrency: {
limit: 1,
},
steps: [
{
name: 'assets',
image: 'thegeeklab/alpine-tools',
commands: [
'make doc',
],
},
{
name: 'markdownlint',
image: 'thegeeklab/markdownlint-cli',
commands: [
"markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'",
],
},
{
name: 'spellcheck',
image: 'node:lts-alpine',
commands: [
'npm install -g spellchecker-cli',
"spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions",
],
environment: {
FORCE_COLOR: true,
NPM_CONFIG_LOGLEVEL: 'error',
},
},
{
name: 'testbuild',
image: 'thegeeklab/hugo:0.91.0',
commands: [
'hugo -s docs/ -b http://localhost/',
],
},
{
name: 'link-validation',
image: 'thegeeklab/link-validator',
commands: [
'link-validator -ro',
],
environment: {
LINK_VALIDATOR_BASE_DIR: 'docs/public',
},
},
{
name: 'build',
image: 'thegeeklab/hugo:0.91.0',
commands: [
'hugo -s docs/',
],
},
{
name: 'beautify',
image: 'node:lts-alpine',
commands: [
'npm install -g js-beautify',
"html-beautify -r -f 'docs/public/**/*.html'",
],
environment: {
FORCE_COLOR: true,
NPM_CONFIG_LOGLEVEL: 'error',
},
},
{
name: 'publish',
image: 'plugins/s3-sync',
settings: {
access_key: { from_secret: 's3_access_key' },
bucket: 'geekdocs',
delete: true,
endpoint: 'https://sp.rknet.org',
path_style: true,
secret_key: { from_secret: 's3_secret_access_key' },
source: 'docs/public/',
strip_prefix: 'docs/public/',
target: '/${DRONE_REPO_NAME}',
},
when: {
ref: ['refs/heads/main', 'refs/tags/**'],
},
},
],
depends_on: [
'build-package',
'build-container-amd64',
'build-container-arm64',
'build-container-arm',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineNotifications = {
kind: 'pipeline',
name: 'notifications',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
image: 'plugins/manifest',
name: 'manifest-dockerhub',
settings: {
ignore_missing: true,
auto_tag: true,
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
spec: 'docker/manifest.tmpl',
},
when: {
status: ['success'],
},
},
{
image: 'plugins/manifest',
name: 'manifest-quay',
settings: {
ignore_missing: true,
auto_tag: true,
username: { from_secret: 'quay_username' },
password: { from_secret: 'quay_password' },
spec: 'docker/manifest-quay.tmpl',
},
when: {
status: ['success'],
},
},
{
name: 'pushrm-dockerhub',
pull: 'always',
image: 'chko/docker-pushrm:1',
environment: {
DOCKER_PASS: {
from_secret: 'docker_password',
},
DOCKER_USER: {
from_secret: 'docker_username',
},
PUSHRM_FILE: 'README.md',
PUSHRM_SHORT: 'Another best practice scanner for Ansible roles and playbooks',
PUSHRM_TARGET: 'thegeeklab/${DRONE_REPO_NAME}',
},
when: {
status: ['success'],
},
},
{
name: 'pushrm-quay',
pull: 'always',
image: 'chko/docker-pushrm:1',
environment: {
APIKEY__QUAY_IO: {
from_secret: 'quay_token',
},
PUSHRM_FILE: 'README.md',
PUSHRM_TARGET: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
},
when: {
status: ['success'],
},
},
{
name: 'matrix',
image: 'thegeeklab/drone-matrix',
settings: {
homeserver: { from_secret: 'matrix_homeserver' },
roomid: { from_secret: 'matrix_roomid' },
template: 'Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}',
username: { from_secret: 'matrix_username' },
password: { from_secret: 'matrix_password' },
},
when: {
status: ['success', 'failure'],
},
},
],
depends_on: [
'docs',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**'],
status: ['success', 'failure'],
},
};
[
PipelineLint,
PipelineTest,
PipelineSecurity,
PipelineBuildPackage,
PipelineBuildContainer(arch='amd64'),
PipelineBuildContainer(arch='arm64'),
PipelineBuildContainer(arch='arm'),
PipelineDocs,
PipelineNotifications,
]

637
.drone.yml Normal file
View File

@ -0,0 +1,637 @@
---
kind: pipeline
name: lint
platform:
os: linux
arch: amd64
steps:
- name: yapf
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install
- poetry run yapf -dr ./ansiblelater
environment:
PY_COLORS: 1
- name: flake8
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run flake8 ./ansiblelater
environment:
PY_COLORS: 1
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
---
kind: pipeline
name: test
platform:
os: linux
arch: amd64
steps:
- name: fetch
image: python:3.10
commands:
- git fetch -tq
- name: python38-pytest
image: python:3.8
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install -E ansible-core
- poetry run pytest
- poetry version
- poetry run ansible-later --help
environment:
PY_COLORS: 1
depends_on:
- fetch
- name: python39-pytest
image: python:3.9
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install -E ansible-core
- poetry run pytest
- poetry version
- poetry run ansible-later --help
environment:
PY_COLORS: 1
depends_on:
- fetch
- name: python310-pytest
image: python:3.10
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install -E ansible-core
- poetry run pytest
- poetry version
- poetry run ansible-later --help
environment:
PY_COLORS: 1
depends_on:
- fetch
- name: codecov
image: python:3.10
commands:
- pip install codecov -qq
- codecov --required -X gcov
environment:
CODECOV_TOKEN:
from_secret: codecov_token
PY_COLORS: 1
depends_on:
- python38-pytest
- python39-pytest
- python310-pytest
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- lint
---
kind: pipeline
name: security
platform:
os: linux
arch: amd64
steps:
- name: bandit
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run bandit -r ./ansiblelater -x ./ansiblelater/test
environment:
PY_COLORS: 1
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- test
---
kind: pipeline
name: build-package
platform:
os: linux
arch: amd64
steps:
- name: build
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: checksum
image: alpine
commands:
- cd dist/ && sha256sum * > ../sha256sum.txt
- name: changelog-generate
image: thegeeklab/git-chglog
commands:
- git fetch -tq
- git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}
- name: changelog-format
image: thegeeklab/alpine-tools
commands:
- prettier CHANGELOG.md
- prettier -w CHANGELOG.md
- name: publish-github
image: plugins/github-release
settings:
api_key:
from_secret: github_token
files:
- dist/*
- sha256sum.txt
note: CHANGELOG.md
overwrite: true
title: ${DRONE_TAG}
when:
ref:
- refs/tags/**
- name: publish-pypi
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry publish -n
environment:
POETRY_HTTP_BASIC_PYPI_PASSWORD:
from_secret: pypi_password
POETRY_HTTP_BASIC_PYPI_USERNAME:
from_secret: pypi_username
when:
ref:
- refs/tags/**
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: build-container-amd64
platform:
os: linux
arch: amd64
steps:
- name: build
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: dryrun
image: thegeeklab/drone-docker:19
settings:
dockerfile: docker/Dockerfile.amd64
dry_run: true
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/pull/**
depends_on:
- build
- name: publish-dockerhub
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: amd64
dockerfile: docker/Dockerfile.amd64
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
- name: publish-quay
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: amd64
dockerfile: docker/Dockerfile.amd64
password:
from_secret: quay_password
registry: quay.io
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: quay_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: build-container-arm64
platform:
os: linux
arch: arm64
steps:
- name: build
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: dryrun
image: thegeeklab/drone-docker:19
settings:
dockerfile: docker/Dockerfile.arm64
dry_run: true
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/pull/**
depends_on:
- build
- name: publish-dockerhub
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm64
dockerfile: docker/Dockerfile.arm64
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
- name: publish-quay
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm64
dockerfile: docker/Dockerfile.arm64
password:
from_secret: quay_password
registry: quay.io
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: quay_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: build-container-arm
platform:
os: linux
arch: arm
steps:
- name: build
image: python:3.10-alpine
commands:
- apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
environment:
CARGO_NET_GIT_FETCH_WITH_CLI: true
- name: dryrun
image: thegeeklab/drone-docker:19
settings:
dockerfile: docker/Dockerfile.arm
dry_run: true
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/pull/**
depends_on:
- build
- name: publish-dockerhub
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm
dockerfile: docker/Dockerfile.arm
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
- name: publish-quay
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm
dockerfile: docker/Dockerfile.arm
password:
from_secret: quay_password
registry: quay.io
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: quay_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: docs
platform:
os: linux
arch: amd64
concurrency:
limit: 1
steps:
- name: assets
image: thegeeklab/alpine-tools
commands:
- make doc
- name: markdownlint
image: thegeeklab/markdownlint-cli
commands:
- markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'
- name: spellcheck
image: node:lts-alpine
commands:
- npm install -g spellchecker-cli
- spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions
environment:
FORCE_COLOR: true
NPM_CONFIG_LOGLEVEL: error
- name: testbuild
image: thegeeklab/hugo:0.91.0
commands:
- hugo -s docs/ -b http://localhost/
- name: link-validation
image: thegeeklab/link-validator
commands:
- link-validator -ro
environment:
LINK_VALIDATOR_BASE_DIR: docs/public
- name: build
image: thegeeklab/hugo:0.91.0
commands:
- hugo -s docs/
- name: beautify
image: node:lts-alpine
commands:
- npm install -g js-beautify
- html-beautify -r -f 'docs/public/**/*.html'
environment:
FORCE_COLOR: true
NPM_CONFIG_LOGLEVEL: error
- name: publish
image: plugins/s3-sync
settings:
access_key:
from_secret: s3_access_key
bucket: geekdocs
delete: true
endpoint: https://sp.rknet.org
path_style: true
secret_key:
from_secret: s3_secret_access_key
source: docs/public/
strip_prefix: docs/public/
target: /${DRONE_REPO_NAME}
when:
ref:
- refs/heads/main
- refs/tags/**
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- build-package
- build-container-amd64
- build-container-arm64
- build-container-arm
---
kind: pipeline
name: notifications
platform:
os: linux
arch: amd64
steps:
- name: manifest-dockerhub
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
password:
from_secret: docker_password
spec: docker/manifest.tmpl
username:
from_secret: docker_username
when:
status:
- success
- name: manifest-quay
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
password:
from_secret: quay_password
spec: docker/manifest-quay.tmpl
username:
from_secret: quay_username
when:
status:
- success
- name: pushrm-dockerhub
pull: always
image: chko/docker-pushrm:1
environment:
DOCKER_PASS:
from_secret: docker_password
DOCKER_USER:
from_secret: docker_username
PUSHRM_FILE: README.md
PUSHRM_SHORT: Another best practice scanner for Ansible roles and playbooks
PUSHRM_TARGET: thegeeklab/${DRONE_REPO_NAME}
when:
status:
- success
- name: pushrm-quay
pull: always
image: chko/docker-pushrm:1
environment:
APIKEY__QUAY_IO:
from_secret: quay_token
PUSHRM_FILE: README.md
PUSHRM_TARGET: quay.io/thegeeklab/${DRONE_REPO_NAME}
when:
status:
- success
- name: matrix
image: thegeeklab/drone-matrix
settings:
homeserver:
from_secret: matrix_homeserver
password:
from_secret: matrix_password
roomid:
from_secret: matrix_roomid
template: "Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}"
username:
from_secret: matrix_username
when:
status:
- success
- failure
trigger:
ref:
- refs/heads/main
- refs/tags/**
status:
- success
- failure
depends_on:
- docs
---
kind: signature
hmac: 7ba0390cdab04c73a8c95d8b2362abfd7aa440e86c7b7e76421f845c312dc9bf
...

View File

@ -52,11 +52,6 @@ branches:
required_status_checks:
strict: false
contexts:
- ci/woodpecker/pr/lint
- ci/woodpecker/pr/test
- ci/woodpecker/pr/build-package
- ci/woodpecker/pr/build-container
- ci/woodpecker/pr/docs
enforce_admins: false
required_linear_history: true
- continuous-integration/drone/pr
enforce_admins: null
restrictions: null

2
.gitignore vendored
View File

@ -106,8 +106,6 @@ pip-wheel-metadata
docs/themes/
docs/public/
resources/_gen/
.hugo_build.lock
# Misc
CHANGELOG.md
.ruff_cache

View File

@ -1,47 +0,0 @@
---
version: "1.1"
versioning:
update-major: []
update-minor: [feat]
update-patch: [fix, perf, refactor, chore, test, ci, docs]
tag:
pattern: "v%d.%d.%d"
release-notes:
sections:
- name: Features
commit-types: [feat]
section-type: commits
- name: Bug Fixes
commit-types: [fix]
section-type: commits
- name: Performance Improvements
commit-types: [perf]
section-type: commits
- name: Code Refactoring
commit-types: [refactor]
section-type: commits
- name: Others
commit-types: [chore]
section-type: commits
- name: Testing
commit-types: [test]
section-type: commits
- name: CI Pipeline
commit-types: [ci]
section-type: commits
- name: Documentation
commit-types: [docs]
section-type: commits
- name: Breaking Changes
section-type: breaking-changes
commit-message:
footer:
issue:
key: issue
add-value-prefix: "#"
issue:
regex: "#?[0-9]+"

View File

@ -1 +0,0 @@
https://hub.docker.com/r/thegeeklab/*

View File

@ -1,10 +0,0 @@
---
- id: ansible-later
name: ansible-later
description: Run ansible-later, a best-practice scanner for Ansible.
entry: ansible-later
language: python
pass_filenames: False
always_run: True
additional_dependencies:
- .[ansible-core]

View File

@ -1,2 +1,3 @@
.drone.yml
*.tpl.md
LICENSE

View File

@ -1,73 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: build
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: dryrun
image: quay.io/thegeeklab/wp-docker-buildx:4
settings:
containerfile: Containerfile.multiarch
dry_run: true
platforms:
- linux/amd64
- linux/arm64
provenance: false
repo: ${CI_REPO}
when:
- event: [pull_request]
- name: publish-dockerhub
image: quay.io/thegeeklab/wp-docker-buildx:4
group: container
settings:
auto_tag: true
containerfile: Containerfile.multiarch
password:
from_secret: docker_password
platforms:
- linux/amd64
- linux/arm64
provenance: false
repo: ${CI_REPO}
username:
from_secret: docker_username
when:
- event: [tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
- name: publish-quay
image: quay.io/thegeeklab/wp-docker-buildx:4
group: container
settings:
auto_tag: true
containerfile: Containerfile.multiarch
password:
from_secret: quay_password
platforms:
- linux/amd64
- linux/arm64
provenance: false
registry: quay.io
repo: quay.io/${CI_REPO}
username:
from_secret: quay_username
when:
- event: [tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
depends_on:
- lint
- test

View File

@ -1,56 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: build
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: checksum
image: quay.io/thegeeklab/alpine-tools
commands:
- cd dist/ && sha256sum * > ../sha256sum.txt
- name: changelog
image: quay.io/thegeeklab/git-sv
commands:
- git sv current-version
- git sv release-notes -t ${CI_COMMIT_TAG:-next} -o CHANGELOG.md
- cat CHANGELOG.md
- name: publish-github
image: docker.io/plugins/github-release
settings:
api_key:
from_secret: github_token
files:
- dist/*
- sha256sum.txt
note: CHANGELOG.md
overwrite: true
title: ${CI_COMMIT_TAG}
when:
- event: [tag]
- name: publish-pypi
image: docker.io/library/python:3.12
secrets:
- source: pypi_password
target: POETRY_HTTP_BASIC_PYPI_PASSWORD
- source: pypi_username
target: POETRY_HTTP_BASIC_PYPI_USERNAME
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry publish -n
when:
- event: [tag]
depends_on:
- lint
- test

View File

@ -1,100 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: assets
image: quay.io/thegeeklab/alpine-tools
commands:
- make doc
- name: markdownlint
image: quay.io/thegeeklab/markdownlint-cli
group: test
commands:
- markdownlint 'README.md' 'CONTRIBUTING.md'
- name: spellcheck
image: quay.io/thegeeklab/alpine-tools
group: test
commands:
- spellchecker --files 'docs/**/*.md' 'README.md' 'CONTRIBUTING.md' -d .dictionary -p spell indefinite-article syntax-urls
environment:
FORCE_COLOR: "true"
- name: link-validation
image: docker.io/lycheeverse/lychee
group: test
commands:
- lychee --no-progress --format detailed docs/content README.md
- name: build
image: quay.io/thegeeklab/hugo:0.127.0
commands:
- hugo --panicOnWarning -s docs/
- name: beautify
image: quay.io/thegeeklab/alpine-tools
commands:
- html-beautify -r -f 'docs/public/**/*.html'
environment:
FORCE_COLOR: "true"
- name: publish
image: quay.io/thegeeklab/wp-s3-action
settings:
access_key:
from_secret: s3_access_key
bucket: geekdocs
delete: true
endpoint:
from_secret: s3_endpoint
path_style: true
secret_key:
from_secret: s3_secret_access_key
source: docs/public/
strip_prefix: docs/public/
target: /${CI_REPO_NAME}
when:
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
status: [success, failure]
- name: pushrm-dockerhub
image: docker.io/chko/docker-pushrm:1
secrets:
- source: docker_password
target: DOCKER_PASS
- source: docker_username
target: DOCKER_USER
environment:
PUSHRM_FILE: README.md
PUSHRM_SHORT: Another best practice scanner for Ansible roles and playbooks
PUSHRM_TARGET: ${CI_REPO}
when:
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
status: [success]
- name: pushrm-quay
image: docker.io/chko/docker-pushrm:1
secrets:
- source: quay_token
target: APIKEY__QUAY_IO
environment:
PUSHRM_FILE: README.md
PUSHRM_TARGET: quay.io/${CI_REPO}
when:
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
status: [success]
depends_on:
- build-package
- build-container

View File

@ -1,25 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: check-format
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install
- poetry run ruff format --check --diff ./${CI_REPO_NAME//-/}
environment:
PY_COLORS: "1"
- name: check-coding
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run ruff check ./${CI_REPO_NAME//-/}
environment:
PY_COLORS: "1"

View File

@ -1,26 +0,0 @@
---
when:
- event: [tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
runs_on: [success, failure]
steps:
- name: matrix
image: quay.io/thegeeklab/wp-matrix
settings:
homeserver:
from_secret: matrix_homeserver
password:
from_secret: matrix_password
roomid:
from_secret: matrix_roomid
username:
from_secret: matrix_username
when:
- status: [success, failure]
depends_on:
- docs

View File

@ -1,35 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
variables:
- &pytest_base
group: pytest
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run pytest --cov-append
- poetry version
- poetry run ${CI_REPO_NAME} --help
environment:
PY_COLORS: "1"
steps:
- name: python-312
image: docker.io/library/python:3.12
<<: *pytest_base
- name: python-311
image: docker.io/library/python:3.11
<<: *pytest_base
- name: python-310
image: docker.io/library/python:3.10
<<: *pytest_base
- name: python-39
image: docker.io/library/python:3.9
<<: *pytest_base

View File

@ -3,7 +3,7 @@
## Security
If you think you have found a **security issue**, please do not mention it in this repository.
Instead, send an email to `security@thegeeklab.de` with as many details as possible so it can be handled confidential.
Instead, send an email to security@thegeeklab.de with as many details as possible so it can be handled confidential.
## Bug Reports and Feature Requests

View File

@ -1,5 +1,5 @@
# renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc
THEME_VERSION := v0.46.0
THEME_VERSION := v0.22.2
THEME := hugo-geekdoc
BASEDIR := docs
THEMEDIR := $(BASEDIR)/themes

View File

@ -2,12 +2,13 @@
Another best practice scanner for Ansible roles and playbooks
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-later/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-later)
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-later?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-later)
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later)
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later)
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![Codecov](https://img.shields.io/codecov/c/github/thegeeklab/ansible-later)](https://codecov.io/gh/thegeeklab/ansible-later)
[![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors)
[![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later)
[![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE)
@ -18,9 +19,19 @@ ansible-later does **not** ensure that your role will work as expected. For depl
You can find the full documentation at [https://ansible-later.geekdocs.de](https://ansible-later.geekdocs.de/).
## Community
<!-- prettier-ignore-start -->
<!-- spellchecker-disable -->
- [GitHub Action](https://github.com/patrickjahns/ansible-later-action) by [@patrickjahns](https://github.com/patrickjahns)
<!-- spellchecker-enable -->
<!-- prettier-ignore-end -->
## Contributors
Special thanks to all [contributors](https://github.com/thegeeklab/ansible-later/graphs/contributors). If you would like to contribute,
Special thanks goes to all [contributors](https://github.com/thegeeklab/ansible-later/graphs/contributors). If you would like to contribute,
please see the [instructions](https://github.com/thegeeklab/ansible-later/blob/main/CONTRIBUTING.md).
ansible-later is a fork of Will Thames [ansible-review](https://github.com/willthames/ansible-review). Thanks for your work on ansible-review and ansible-lint.

View File

@ -5,10 +5,12 @@ import argparse
import multiprocessing
import sys
from ansiblelater import LOG, __version__, logger
from ansiblelater import LOG
from ansiblelater import __version__
from ansiblelater import logger
from ansiblelater.candidate import Candidate
from ansiblelater.rule import SingleRules
from ansiblelater.settings import Settings
from ansiblelater.standard import SingleStandards
def main():
@ -22,33 +24,33 @@ def main():
parser.add_argument(
"-r",
"--rules-dir",
dest="rules.dir",
metavar="DIR",
dest="rules.standards",
metavar="RULES",
action="append",
help="directory of rules",
help="directory of standard rules"
)
parser.add_argument(
"-B",
"--no-builtin",
dest="rules.builtin",
"--no-buildin",
dest="rules.buildin",
action="store_false",
help="disables built-in rules",
help="disables build-in standard rules"
)
parser.add_argument(
"-i",
"--include-rules",
dest="rules.include_filter",
metavar="TAGS",
"-s",
"--standards",
dest="rules.filter",
metavar="FILTER",
action="append",
help="limit rules to given id/tags",
help="limit standards to given ID's"
)
parser.add_argument(
"-x",
"--exclude-rules",
"--exclude-standards",
dest="rules.exclude_filter",
metavar="TAGS",
metavar="EXCLUDE_FILTER",
action="append",
help="exclude rules by given it/tags",
help="exclude standards by given ID's"
)
parser.add_argument(
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level"
@ -57,7 +59,9 @@ def main():
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level"
)
parser.add_argument("rules.files", nargs="*")
parser.add_argument("-V", "--version", action="version", version=f"%(prog)s {__version__}")
parser.add_argument(
"-V", "--version", action="version", version="%(prog)s {}".format(__version__)
)
args = parser.parse_args().__dict__
@ -65,7 +69,7 @@ def main():
config = settings.config
logger.update_logger(LOG, config["logging"]["level"], config["logging"]["json"])
SingleRules(config["rules"]["dir"])
SingleStandards(config["rules"]["standards"]).rules
workers = max(multiprocessing.cpu_count() - 2, 2)
p = multiprocessing.Pool(workers)
@ -74,22 +78,25 @@ def main():
candidate = Candidate.classify(filename, settings)
if candidate:
if candidate.binary:
LOG.info(f"Not reviewing binary file {filename}")
LOG.info("Not reviewing binary file {name}".format(name=filename))
continue
if candidate.vault:
LOG.info(f"Not reviewing vault file {filename}")
LOG.info("Not reviewing vault file {name}".format(name=filename))
continue
LOG.info(f"Reviewing all of {candidate}")
tasks.append(candidate)
else:
LOG.info("Reviewing all of {candidate}".format(candidate=candidate))
tasks.append(candidate)
else:
LOG.info(f"Couldn't classify file {filename}")
LOG.info("Couldn't classify file {name}".format(name=filename))
errors = sum(p.map(_review_wrapper, tasks))
errors = (sum(p.map(_review_wrapper, tasks)))
p.close()
p.join()
return_code = 1 if errors != 0 else 0
if not errors == 0:
return_code = 1
else:
return_code = 0
sys.exit(return_code)

View File

@ -3,15 +3,19 @@
import codecs
import copy
import os
import re
from distutils.version import LooseVersion
from ansible.plugins.loader import module_loader
from ansiblelater import LOG
from ansiblelater import utils
from ansiblelater.logger import flag_extra
from ansiblelater.rule import RuleBase, SingleRules
from ansiblelater.standard import SingleStandards
from ansiblelater.standard import StandardBase
class Candidate:
class Candidate(object):
"""
Meta object for all files which later has to process.
@ -19,12 +23,12 @@ class Candidate:
bundled with necessary meta informations for rule processing.
"""
def __init__(self, filename, settings={}, rules=[]): # noqa
def __init__(self, filename, settings={}, standards=[]):
self.path = filename
self.binary = False
self.vault = False
self.filemeta = type(self).__name__.lower()
self.kind = type(self).__name__.lower()
self.filetype = type(self).__name__.lower()
self.expected_version = True
self.faulty = False
self.config = settings.config
self.settings = settings
@ -36,127 +40,204 @@ class Candidate:
except UnicodeDecodeError:
self.binary = True
def _filter_rules(self):
target_rules = []
includes = self.config["rules"]["include_filter"]
def _get_version(self):
path = self.path
version = None
config_version = self.config["rules"]["version"].strip()
if config_version:
version_config_re = re.compile(r"([\d.]+)")
match = version_config_re.match(config_version)
if match:
version = match.group(1)
if not self.binary:
if isinstance(self, RoleFile):
parentdir = os.path.dirname(os.path.abspath(self.path))
while parentdir != os.path.dirname(parentdir):
meta_file = os.path.join(parentdir, "meta", "main.yml")
if os.path.exists(meta_file):
path = meta_file
break
parentdir = os.path.dirname(parentdir)
version_file_re = re.compile(r"^# Standards:\s*([\d.]+)")
with codecs.open(path, mode="rb", encoding="utf-8") as f:
for line in f:
match = version_file_re.match(line)
if match:
version = match.group(1)
if not version:
version = utils.standards_latest(self.standards)
if self.expected_version:
if isinstance(self, RoleFile):
LOG.warning(
"{name} {path} is in a role that contains a meta/main.yml without a "
"declared standards version. "
"Using latest standards version {version}".format(
name=type(self).__name__, path=self.path, version=version
)
)
else:
LOG.warning(
"{name} {path} does not present standards version. "
"Using latest standards version {version}".format(
name=type(self).__name__, path=self.path, version=version
)
)
else:
LOG.info(
"{name} {path} declares standards version {version}".format(
name=type(self).__name__, path=self.path, version=version
)
)
return version
def _filter_standards(self):
target_standards = []
includes = self.config["rules"]["filter"]
excludes = self.config["rules"]["exclude_filter"]
if len(includes) == 0:
includes = [s.rid for s in self.rules]
includes = [s.sid for s in self.standards]
for rule in self.rules:
if rule.rid in includes and rule.rid not in excludes:
target_rules.append(rule)
for standard in self.standards:
if standard.sid in includes and standard.sid not in excludes:
target_standards.append(standard)
return target_rules
return target_standards
def review(self):
def review(self, lines=None):
errors = 0
self.rules = SingleRules(self.config["rules"]["dir"]).rules
self.standards = SingleStandards(self.config["rules"]["standards"]).rules
self.version = self._get_version()
for rule in self._filter_rules():
if self.kind not in rule.types:
for standard in self._filter_standards():
if type(self).__name__.lower() not in standard.types:
continue
result = rule.check(self, self.config)
result = standard.check(self, self.config)
if not result:
LOG.error(f"rule '{rule.rid}' returns an empty result object. Check failed!")
LOG.error(
"Standard '{id}' returns an empty result object. Check failed!".format(
id=standard.sid
)
)
continue
labels = {
"tag": "review",
"rule": rule.description,
"standard": standard.description,
"file": self.path,
"passed": True,
"passed": True
}
if rule.rid and rule.rid.strip():
labels["rid"] = rule.rid
if standard.sid and standard.sid.strip():
labels["sid"] = standard.sid
for err in result.errors:
err_labels = copy.copy(labels)
err_labels["passed"] = False
rid = self._format_id(rule.rid)
path = self.path
description = rule.description
if isinstance(err, RuleBase.Error):
if isinstance(err, StandardBase.Error):
err_labels.update(err.to_dict())
msg = f"{rid}rule '{description}' not met:\n{path}:{err}"
if rule.rid not in self.config["rules"]["warning_filter"]:
LOG.error(msg, extra=flag_extra(err_labels))
errors = errors + 1
if not standard.version:
LOG.warning(
"{sid}Best practice '{description}' not met:\n{path}:{error}".format(
sid=self._format_id(standard.sid),
description=standard.description,
path=self.path,
error=err
),
extra=flag_extra(err_labels)
)
elif LooseVersion(standard.version) > LooseVersion(self.version):
LOG.warning(
"{sid}Future standard '{description}' not met:\n{path}:{error}".format(
sid=self._format_id(standard.sid),
description=standard.description,
path=self.path,
error=err
),
extra=flag_extra(err_labels)
)
else:
LOG.warning(msg, extra=flag_extra(err_labels))
msg = "{sid}Standard '{description}' not met:\n{path}:{error}".format(
sid=self._format_id(standard.sid),
description=standard.description,
path=self.path,
error=err
)
if standard.sid not in self.config["rules"]["warning_filter"]:
LOG.error(msg, extra=flag_extra(err_labels))
errors = errors + 1
else:
LOG.warning(msg, extra=flag_extra(err_labels))
return errors
@staticmethod
def classify(filename, settings={}, rules=[]): # noqa
def classify(filename, settings={}, standards=[]):
parentdir = os.path.basename(os.path.dirname(filename))
basename = os.path.basename(filename)
ext = os.path.splitext(filename)[1][1:]
if parentdir in ["tasks"]:
return Task(filename, settings, rules)
return Task(filename, settings, standards)
if parentdir in ["handlers"]:
return Handler(filename, settings, rules)
return Handler(filename, settings, standards)
if parentdir in ["vars", "defaults"]:
return RoleVars(filename, settings, rules)
return RoleVars(filename, settings, standards)
if "group_vars" in filename.split(os.sep):
return GroupVars(filename, settings, rules)
return GroupVars(filename, settings, standards)
if "host_vars" in filename.split(os.sep):
return HostVars(filename, settings, rules)
return HostVars(filename, settings, standards)
if parentdir in ["meta"] and "main" in basename:
return Meta(filename, settings, rules)
return Meta(filename, settings, standards)
if parentdir in ["meta"] and "argument_specs" in basename:
return ArgumentSpecs(filename, settings, rules)
if parentdir in [
"library",
"lookup_plugins",
"callback_plugins",
"filter_plugins",
] or filename.endswith(".py"):
return Code(filename, settings, rules)
return ArgumentSpecs(filename, settings, standards)
if (
parentdir in ["library", "lookup_plugins", "callback_plugins", "filter_plugins"]
or filename.endswith(".py")
):
return Code(filename, settings, standards)
if basename == "inventory" or basename == "hosts" or parentdir in ["inventories"]:
return Inventory(filename, settings, rules)
if "rolesfile" in basename or ("requirements" in basename and ext in ["yaml", "yml"]):
return Rolesfile(filename, settings, rules)
return Inventory(filename, settings, standards)
if "rolesfile" in basename or "requirements" in basename:
return Rolesfile(filename, settings, standards)
if "Makefile" in basename:
return Makefile(filename, settings, rules)
return Makefile(filename, settings, standards)
if "templates" in filename.split(os.sep) or basename.endswith(".j2"):
return Template(filename, settings, rules)
return Template(filename, settings, standards)
if "files" in filename.split(os.sep):
return File(filename, settings, rules)
return File(filename, settings, standards)
if basename.endswith(".yml") or basename.endswith(".yaml"):
return Playbook(filename, settings, rules)
return Playbook(filename, settings, standards)
if "README" in basename:
return Doc(filename, settings, rules)
return Doc(filename, settings, standards)
return None
def _format_id(self, rule_id):
rid = rule_id.strip()
if rid:
rule_id = f"[{rid}] "
def _format_id(self, standard_id):
if standard_id and standard_id.strip():
standard_id = "[{id}] ".format(id=standard_id.strip())
return rule_id
return standard_id
def __repr__(self):
return f"{self.kind} ({self.path})"
def __repr__(self): # noqa
return "{name} ({path})".format(name=type(self).__name__, path=self.path)
def __getitem__(self, item):
def __getitem__(self, item): # noqa
return self.__dict__.get(item)
class RoleFile(Candidate):
"""Object classified as Ansible role file."""
def __init__(self, filename, settings={}, rules=[]): # noqa
super().__init__(filename, settings, rules)
def __init__(self, filename, settings={}, standards=[]):
super(RoleFile, self).__init__(filename, settings, standards)
parentdir = os.path.dirname(os.path.abspath(filename))
while parentdir != os.path.dirname(parentdir):
@ -176,17 +257,17 @@ class Playbook(Candidate):
class Task(RoleFile):
"""Object classified as Ansible task file."""
def __init__(self, filename, settings={}, rules=[]): # noqa
super().__init__(filename, settings, rules)
self.filemeta = "tasks"
def __init__(self, filename, settings={}, standards=[]):
super(Task, self).__init__(filename, settings, standards)
self.filetype = "tasks"
class Handler(RoleFile):
"""Object classified as Ansible handler file."""
def __init__(self, filename, settings={}, rules=[]): # noqa
super().__init__(filename, settings, rules)
self.filemeta = "handlers"
def __init__(self, filename, settings={}, standards=[]):
super(Handler, self).__init__(filename, settings, standards)
self.filetype = "handlers"
class Vars(Candidate):
@ -195,7 +276,15 @@ class Vars(Candidate):
pass
class InventoryVars(Candidate):
class Unversioned(Candidate):
"""Object classified as unversioned file."""
def __init__(self, filename, settings={}, standards=[]):
super(Unversioned, self).__init__(filename, settings, standards)
self.expected_version = False
class InventoryVars(Unversioned):
"""Object classified as Ansible inventory vars."""
pass
@ -231,13 +320,13 @@ class ArgumentSpecs(RoleFile):
pass
class Inventory(Candidate):
class Inventory(Unversioned):
"""Object classified as Ansible inventory file."""
pass
class Code(Candidate):
class Code(Unversioned):
"""Object classified as code file."""
pass
@ -249,13 +338,13 @@ class Template(RoleFile):
pass
class Doc(Candidate):
class Doc(Unversioned):
"""Object classified as documentation file."""
pass
class Makefile(Candidate):
class Makefile(Unversioned):
"""Object classified as makefile."""
pass
@ -267,7 +356,7 @@ class File(RoleFile):
pass
class Rolesfile(Candidate):
class Rolesfile(Unversioned):
"""Object classified as Ansible roles file."""
pass

View File

@ -8,14 +8,14 @@ class LaterError(Exception):
def __init__(self, msg, original):
"""Initialize new exception."""
super().__init__(f"{msg}: {original}")
super(LaterError, self).__init__("{msg}: {org}".format(msg=msg, org=original))
self.original = original
class LaterAnsibleError(Exception):
"""Wrapper for ansible syntax errors."""
def __init__(self, original):
def __init__(self, msg, original):
lines = original.message.splitlines()
line_no = re.search("line(.*?),", lines[2])

View File

@ -3,6 +3,7 @@
import logging
import os
import sys
from distutils.util import strtobool
import colorama
from pythonjsonlogger import jsonlogger
@ -11,35 +12,12 @@ CONSOLE_FORMAT = "{}%(levelname)s:{} %(message)s"
JSON_FORMAT = "%(asctime)s %(levelname)s %(message)s"
def strtobool(value):
"""Convert a string representation of truth to true or false."""
_map = {
"y": True,
"yes": True,
"t": True,
"true": True,
"on": True,
"1": True,
"n": False,
"no": False,
"f": False,
"false": False,
"off": False,
"0": False,
}
try:
return _map[str(value).lower()]
except KeyError as err:
raise ValueError(f'"{value}" is not a valid bool value') from err
def to_bool(string):
return bool(strtobool(str(string)))
def _should_do_markup():
py_colors = os.environ.get("PY_COLORS", None)
if py_colors is not None:
return to_bool(py_colors)
@ -52,7 +30,7 @@ colorama.init(autoreset=True, strip=(not _should_do_markup()))
def flag_extra(extra):
"""Ensure extra args are prefixed."""
flagged = {}
flagged = dict()
if isinstance(extra, dict):
for key, value in extra.items():
@ -61,7 +39,7 @@ def flag_extra(extra):
return flagged
class LogFilter:
class LogFilter(object):
"""A custom log filter which excludes log messages above the logged level."""
def __init__(self, level):
@ -82,8 +60,8 @@ class LogFilter:
class MultilineFormatter(logging.Formatter):
"""Logging Formatter to reset color after newline characters."""
def format(self, record):
record.msg = record.msg.replace("\n", f"\n{colorama.Style.RESET_ALL}... ")
def format(self, record): # noqa
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
record.msg = record.msg + "\n"
return logging.Formatter.format(self, record)
@ -91,7 +69,7 @@ class MultilineFormatter(logging.Formatter):
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
"""Logging Formatter to remove newline characters."""
def format(self, record):
def format(self, record): # noqa
record.msg = record.msg.replace("\n", " ")
return jsonlogger.JsonFormatter.format(self, record)
@ -207,4 +185,4 @@ def color_text(color, msg):
"""
msg = msg.format(colorama.Style.BRIGHT, colorama.Style.NORMAL)
return f"{color}{msg}{colorama.Style.RESET_ALL}"
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckBecomeUser(RuleBase):
rid = "ANS115"
class CheckBecomeUser(StandardBase):
sid = "ANSIBLE0015"
description = "Become should be combined with become_user"
helptext = "the task has `become` enabled but `become_user` is missing"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
@ -14,7 +16,7 @@ class CheckBecomeUser(RuleBase):
if not errors:
gen = (task for task in tasks if "become" in task)
for task in gen:
if task["become"] in true_value and "become_user" not in task:
if task["become"] in true_value and "become_user" not in task.keys():
errors.append(self.Error(task["__line__"], self.helptext))
return self.Result(candidate.path, errors)

View File

@ -1,13 +1,15 @@
import re
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
from ansiblelater.utils import count_spaces
class CheckBracesSpaces(RuleBase):
rid = "ANS104"
class CheckBracesSpaces(StandardBase):
sid = "ANSIBLE0004"
description = "YAML should use consistent number of spaces around variables"
helptext = "no suitable numbers of spaces (min: {min} max: {max})"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings):
@ -39,7 +41,7 @@ class CheckBracesSpaces(RuleBase):
i,
self.helptext.format(
min=conf["min-spaces-inside"], max=conf["max-spaces-inside"]
),
)
)
)
return self.Result(candidate.path, errors)

View File

@ -17,14 +17,15 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckChangedInWhen(RuleBase):
rid = "ANS126"
class CheckChangedInWhen(StandardBase):
sid = "ANSIBLE0026"
description = "Use handlers instead of `when: changed`"
helptext = "tasks using `when: result.changed` setting are effectively acting as a handler"
version = "0.2"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
@ -34,7 +35,7 @@ class CheckChangedInWhen(RuleBase):
for task in tasks:
when = None
if task["__ansible_action_type__"] in ["task", "meta"]:
if task["__ansible_action_type__"] == "task":
when = task.get("when")
if isinstance(when, str):
@ -52,16 +53,6 @@ class CheckChangedInWhen(RuleBase):
if not isinstance(item, str):
return False
if not {"and", "or", "not"}.isdisjoint(item.split()):
return False
return any(
changed in item
for changed in [
".changed",
"|changed",
'["changed"]',
"['changed']",
"is changed",
]
changed in item for changed in [".changed", "|changed", '["changed"]', "['changed']"]
)

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckCommandHasChanges(RuleBase):
rid = "ANS111"
class CheckCommandHasChanges(StandardBase):
sid = "ANSIBLE0011"
description = "Commands should be idempotent"
helptext = (
"commands should only read while using `changed_when` or try to be "
"idempotent while using controls like `creates`, `removes` or `when`"
)
version = "0.1"
types = ["playbook", "task"]
def check(self, candidate, settings):
@ -16,13 +18,12 @@ class CheckCommandHasChanges(RuleBase):
if not errors:
for task in tasks:
if task["action"]["__ansible_module__"] in commands and (
"changed_when" not in task
and "when" not in task
and "when" not in task.get("__ansible_action_meta__", [])
and "creates" not in task["action"]
and "removes" not in task["action"]
):
errors.append(self.Error(task["__line__"], self.helptext))
if task["action"]["__ansible_module__"] in commands:
if (
"changed_when" not in task and "when" not in task
and "when" not in task.get("__ansible_action_meta__", [])
and "creates" not in task["action"] and "removes" not in task["action"]
):
errors.append(self.Error(task["__line__"], self.helptext))
return self.Result(candidate.path, errors)

View File

@ -20,13 +20,15 @@
import os
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckCommandInsteadOfArgument(RuleBase):
rid = "ANS117"
class CheckCommandInsteadOfArgument(StandardBase):
sid = "ANSIBLE0017"
description = "Commands should not be used in place of module arguments"
helptext = "{exec} used in place of file modules argument {arg}"
version = "0.2"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
@ -39,7 +41,7 @@ class CheckCommandInsteadOfArgument(RuleBase):
"ln": "state=link",
"mkdir": "state=directory",
"rmdir": "state=absent",
"rm": "state=absent",
"rm": "state=absent"
}
if not errors:
@ -49,14 +51,13 @@ class CheckCommandInsteadOfArgument(RuleBase):
executable = os.path.basename(first_cmd_arg)
if (
first_cmd_arg
and executable in arguments
first_cmd_arg and executable in arguments
and task["action"].get("warn", True)
):
errors.append(
self.Error(
task["__line__"],
self.helptext.format(exec=executable, arg=arguments[executable]),
self.helptext.format(exec=executable, arg=arguments[executable])
)
)

View File

@ -1,12 +1,14 @@
import os
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckCommandInsteadOfModule(RuleBase):
rid = "ANS108"
class CheckCommandInsteadOfModule(StandardBase):
sid = "ANSIBLE0008"
description = "Commands should not be used in place of modules"
helptext = "{exec} command used in place of {module} module"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
@ -29,7 +31,7 @@ class CheckCommandInsteadOfModule(RuleBase):
"rsync": "synchronize",
"supervisorctl": "supervisorctl",
"systemctl": "systemd",
"sed": "template or lineinfile",
"sed": "template or lineinfile"
}
if not errors:
@ -37,19 +39,14 @@ class CheckCommandInsteadOfModule(RuleBase):
if task["action"]["__ansible_module__"] in commands:
first_cmd_arg = self.get_first_cmd_arg(task)
executable = os.path.basename(first_cmd_arg)
cmd = cmd = self.get_safe_cmd(task)
if (
first_cmd_arg
and executable in modules
and task["action"].get("warn", True)
and "register" not in task
and not any(ch in cmd for ch in self.SHELL_PIPE_CHARS)
first_cmd_arg and executable in modules
and task["action"].get("warn", True) and "register" not in task
):
errors.append(
self.Error(
task["__line__"],
self.helptext.format(exec=executable, module=modules[executable]),
self.helptext.format(exec=executable, module=modules[executable])
)
)

View File

@ -1,13 +1,15 @@
import re
from ansiblelater.candidate import Template
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckCompareToEmptyString(RuleBase):
rid = "ANS112"
description = 'Don\'t compare to empty string ""'
helptext = "use `when: var` rather than `when: var !=` (or conversely `when: not var`)"
class CheckCompareToEmptyString(StandardBase):
sid = "ANSIBLE0012"
description = "Don't compare to empty string \"\""
helptext = ("use `when: var` rather than `when: var !=` (or conversely `when: not var`)")
version = "0.1"
types = ["playbook", "task", "handler", "template"]
def check(self, candidate, settings):

View File

@ -1,13 +1,15 @@
import re
from ansiblelater.candidate import Template
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckCompareToLiteralBool(RuleBase):
rid = "ANS113"
class CheckCompareToLiteralBool(StandardBase):
sid = "ANSIBLE0013"
description = "Don't compare to True or False"
helptext = "use `when: var` rather than `when: var == True` (or conversely `when: not var`)"
helptext = ("use `when: var` rather than `when: var == True` (or conversely `when: not var`)")
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckDeprecated(RuleBase):
rid = "ANS999"
class CheckDeprecated(StandardBase):
sid = "ANSIBLE9999"
description = "Deprecated features should not be used"
helptext = "`{old}` is deprecated and should not be used anymore. Use `{new}` instead."
helptext = "'{old}' is deprecated and should not be used anymore. Use '{new}' instead."
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
@ -18,7 +20,7 @@ class CheckDeprecated(RuleBase):
task["__line__"],
self.helptext.format(
old="skip_ansible_lint", new="skip_ansible_later"
),
)
)
)
return self.Result(candidate.path, errors)

View File

@ -1,87 +0,0 @@
# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
from ansiblelater.rule import RuleBase
from ansiblelater.utils import has_glob, has_jinja
class CheckDeprecatedBareVars(RuleBase):
rid = "ANS127"
description = "Deprecated bare variables in loops must not be used"
helptext = (
"bare var '{barevar}' in '{loop_type}' must use full var syntax '{{{{ {barevar} }}}}' "
"or be converted to a list"
)
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
tasks, self.errors = self.get_normalized_tasks(candidate, settings)
if not self.errors:
for task in tasks:
loop_type = next((key for key in task if key.startswith("with_")), None)
if not loop_type:
continue
if loop_type in [
"with_nested",
"with_together",
"with_flattened",
"with_filetree",
"with_community.general.filetree",
]:
# These loops can either take a list defined directly in the task
# or a variable that is a list itself. When a single variable is used
# we just need to check that one variable, and not iterate over it like
# it's a list. Otherwise, loop through and check all items.
items = task[loop_type]
if not isinstance(items, (list, tuple)):
items = [items]
for var in items:
self._matchvar(var, task, loop_type)
elif loop_type == "with_subelements":
self._matchvar(task[loop_type][0], task, loop_type)
elif loop_type in ["with_sequence", "with_ini", "with_inventory_hostnames"]:
pass
else:
self._matchvar(task[loop_type], task, loop_type)
return self.Result(candidate.path, self.errors)
def _matchvar(self, varstring, task, loop_type):
if isinstance(varstring, str) and not has_jinja(varstring):
valid = loop_type == "with_fileglob" and bool(
has_jinja(varstring) or has_glob(varstring),
)
valid |= loop_type == "with_filetree" and bool(
has_jinja(varstring) or varstring.endswith(os.sep),
)
if not valid:
self.errors.append(
self.Error(
task["__line__"],
self.helptext.format(barevar=varstring, loop_type=loop_type),
)
)

View File

@ -1,132 +0,0 @@
# Original code written by the authors of ansible-lint
from ansiblelater.rule import RuleBase
from ansiblelater.utils import load_plugin
class CheckFQCNBuiltin(RuleBase):
rid = "ANS128"
helptext = "use FQCN `{module_alias}` for module action `{module}`"
description = "Module actions should use full qualified collection names"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
module_aliases = {"block/always/rescue": "block/always/rescue"}
def check(self, candidate, settings):
tasks, errors = self.get_normalized_tasks(candidate, settings)
_builtins = [
"add_host",
"apt",
"apt_key",
"apt_repository",
"assemble",
"assert",
"async_status",
"blockinfile",
"command",
"copy",
"cron",
"debconf",
"debug",
"dnf",
"dpkg_selections",
"expect",
"fail",
"fetch",
"file",
"find",
"gather_facts",
"get_url",
"getent",
"git",
"group",
"group_by",
"hostname",
"import_playbook",
"import_role",
"import_tasks",
"include",
"include_role",
"include_tasks",
"include_vars",
"iptables",
"known_hosts",
"lineinfile",
"meta",
"package",
"package_facts",
"pause",
"ping",
"pip",
"raw",
"reboot",
"replace",
"rpm_key",
"script",
"service",
"service_facts",
"set_fact",
"set_stats",
"setup",
"shell",
"slurp",
"stat",
"subversion",
"systemd",
"sysvinit",
"tempfile",
"template",
"unarchive",
"uri",
"user",
"wait_for",
"wait_for_connection",
"yum",
"yum_repository",
]
if errors:
return self.Result(candidate.path, errors)
for task in tasks:
module = task["action"]["__ansible_module_original__"]
if module not in self.module_aliases:
loaded_module = load_plugin(module)
target = loaded_module.resolved_fqcn
self.module_aliases[module] = target
if target is None:
self.module_aliases[module] = module
continue
if target not in self.module_aliases:
self.module_aliases[target] = target
if module != self.module_aliases[module]:
module_alias = self.module_aliases[module]
if module_alias.startswith("ansible.builtin"):
legacy_module = module_alias.replace(
"ansible.builtin.",
"ansible.legacy.",
1,
)
if module != legacy_module:
helptext = self.helptext.format(module_alias=module_alias, module=module)
if module == "ansible.builtin.include":
helptext = (
"`ansible.builtin.include_task` or `ansible.builtin.import_tasks` "
f"should be used instead of deprecated `{module}`",
)
errors.append(self.Error(task["__line__"], helptext))
else:
if module.count(".") < 2:
errors.append(
self.Error(
task["__line__"],
self.helptext.format(module_alias=module_alias, module=module),
)
)
return self.Result(candidate.path, errors)

View File

@ -19,16 +19,18 @@
# THE SOFTWARE.
import re
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckFilePermissionMissing(RuleBase):
rid = "ANS118"
class CheckFilePermissionMissing(StandardBase):
sid = "ANSIBLE0018"
description = "File permissions unset or incorrect"
helptext = (
"`mode` parameter should set permissions explicitly (e.g. `mode: 0644`) "
"to avoid unexpected file permissions"
)
version = "0.2"
types = ["playbook", "task", "handler"]
_modules = {
@ -65,7 +67,8 @@ class CheckFilePermissionMissing(RuleBase):
mode = task["action"].get("mode", None)
state = task["action"].get("state", "file")
if module not in self._modules and module not in self._create_modules:
if module not in self._modules and \
module not in self._create_modules:
return False
if mode == "preserve" and module not in self._preserve_modules:

View File

@ -17,28 +17,22 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckFilePermissionOctal(RuleBase):
rid = "ANS119"
description = "Numeric file permissions without a leading zero can behave unexpectedly"
helptext = '`mode: {mode}` should be strings with a leading zero `mode: "0{mode}"`'
class CheckFilePermissionOctal(StandardBase):
sid = "ANSIBLE0019"
description = "Octal file permissions must contain leading zero or be a string"
helptext = "numeric file permissions without leading zero can behave in unexpected ways"
version = "0.2"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
tasks, errors = self.get_normalized_tasks(candidate, settings)
modules = [
"assemble",
"copy",
"file",
"ini_file",
"lineinfile",
"replace",
"synchronize",
"template",
"unarchive",
"assemble", "copy", "file", "ini_file", "lineinfile", "replace", "synchronize",
"template", "unarchive"
]
if not errors:
@ -46,33 +40,28 @@ class CheckFilePermissionOctal(RuleBase):
if task["action"]["__ansible_module__"] in modules:
mode = task["action"].get("mode", None)
if isinstance(mode, int) and self._is_invalid_permission(mode):
errors.append(
self.Error(task["__line__"], self.helptext.format(mode=mode))
)
if isinstance(mode, int):
if self._is_invalid_permission(mode):
errors.append(self.Error(task["__line__"], self.helptext))
return self.Result(candidate.path, errors)
@staticmethod
def _is_invalid_permission(mode):
other_write_without_read = (
mode % 8 and mode % 8 < 4 and not (mode % 8 == 1 and (mode >> 6) % 2 == 1)
)
group_write_without_read = (
(mode >> 3) % 8
and (mode >> 3) % 8 < 4
and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1)
)
user_write_without_read = (mode >> 6) % 8 and (mode >> 6) % 8 < 4 and (mode >> 6) % 8 != 1
group_write_without_read = ((mode >> 3) % 8 and (mode >> 3) % 8 < 4
and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1))
user_write_without_read = ((mode >> 6) % 8 and (mode >> 6) % 8 < 4
and not (mode >> 6) % 8 == 1)
other_more_generous_than_group = mode % 8 > (mode >> 3) % 8
other_more_generous_than_user = mode % 8 > (mode >> 6) % 8
group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8
return bool(
other_write_without_read
or group_write_without_read
or user_write_without_read
or other_more_generous_than_group
or other_more_generous_than_user
other_write_without_read or group_write_without_read or user_write_without_read
or other_more_generous_than_group or other_more_generous_than_user
or group_more_generous_than_user
)

View File

@ -1,12 +1,14 @@
import re
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckFilterSeparation(RuleBase):
rid = "ANS116"
class CheckFilterSeparation(StandardBase):
sid = "ANSIBLE0016"
description = "Jinja2 filters should be separated with spaces"
helptext = "no suitable numbers of spaces (required: 1)"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
def check(self, candidate, settings):
@ -14,18 +16,16 @@ class CheckFilterSeparation(RuleBase):
matches = []
braces = re.compile("{{(.*?)}}")
filters = re.compile(r"(?<=\|)((\s{2,})*\S+)|(\S+(\s{2,})*)(?=\|)")
filters = re.compile(r"(?<=\|)([\s]{2,}[^\s}]+|[^\s]+)|([^\s{]+[\s]{2,}|[^\s]+)(?=\|)")
if not errors:
for i, line in yamllines:
match = braces.findall(line)
if match:
for item in match:
# replace potential regex in filters
item = re.sub(r"\(.+\)", "(dummy)", item)
matches.append((i, item))
for i, item in matches:
if filters.findall(item):
for i, line in matches:
if filters.findall(line):
errors.append(self.Error(i, self.helptext))
return self.Result(candidate.path, errors)

View File

@ -17,14 +17,15 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckGitHasVersion(RuleBase):
rid = "ANS120"
class CheckGitHasVersion(StandardBase):
sid = "ANSIBLE0020"
description = "Git checkouts should use explicit version"
helptext = "git checkouts should point to an explicit commit or tag, not `latest`"
version = "0.2"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):

View File

@ -1,41 +1,21 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckInstallUseLatest(RuleBase):
rid = "ANS109"
class CheckInstallUseLatest(StandardBase):
sid = "ANSIBLE0009"
description = "Package installs should use present, not latest"
helptext = "package installs should use `state=present` with or without a version"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
tasks, errors = self.get_normalized_tasks(candidate, settings)
package_managers = [
"yum",
"apt",
"dnf",
"homebrew",
"pacman",
"openbsd_package",
"pkg5",
"portage",
"pkgutil",
"slackpkg",
"swdepot",
"zypper",
"bundler",
"pip",
"pear",
"npm",
"yarn",
"gem",
"easy_install",
"bower",
"package",
"apk",
"openbsd_pkg",
"pkgng",
"sorcery",
"xbps",
"yum", "apt", "dnf", "homebrew", "pacman", "openbsd_package", "pkg5", "portage",
"pkgutil", "slackpkg", "swdepot", "zypper", "bundler", "pip", "pear", "npm", "yarn",
"gem", "easy_install", "bower", "package", "apk", "openbsd_pkg", "pkgng", "sorcery",
"xbps"
]
if not errors:

View File

@ -1,89 +0,0 @@
# Original code written by the authors of ansible-lint
import functools
from ansiblelater.rule import RuleBase
SORTER_TASKS = (
"name",
# "__module__",
# "action",
# "args",
None, # <-- None include all modules that not using action and *
# "when",
# "notify",
# "tags",
"block",
"rescue",
"always",
)
class CheckKeyOrder(RuleBase):
rid = "ANS129"
description = "Check for recommended key order"
helptext = "{type} key order can be improved to `{sorted_keys}`"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
errors = []
tasks, err = self.get_normalized_tasks(candidate, settings)
if err:
return self.Result(candidate.path, err)
for task in tasks:
is_sorted, keys = self._sort_keys(task.get("__raw_task__"))
if not is_sorted:
errors.append(
self.Error(
task["__line__"],
self.helptext.format(type="task", sorted_keys=", ".join(keys)),
)
)
if candidate.kind == "playbook":
tasks, err = self.get_tasks(candidate, settings)
if err:
return self.Result(candidate.path, err)
for task in tasks:
is_sorted, keys = self._sort_keys(task)
if not is_sorted:
errors.append(
self.Error(
task["__line__"],
self.helptext.format(type="play", sorted_keys=", ".join(keys)),
)
)
return self.Result(candidate.path, errors)
@staticmethod
def _sort_keys(task):
if not task:
return True, []
keys = [str(key) for key in task if not key.startswith("_")]
sorted_keys = sorted(keys, key=functools.cmp_to_key(_task_property_sorter))
return (keys == sorted_keys), sorted_keys
def _task_property_sorter(property1, property2):
"""Sort task properties based on SORTER."""
v_1 = _get_property_sort_index(property1)
v_2 = _get_property_sort_index(property2)
return (v_1 > v_2) - (v_1 < v_2)
def _get_property_sort_index(name):
"""Return the index of the property in the sorter."""
a_index = -1
for i, v in enumerate(SORTER_TASKS):
if v == name:
return i
if v is None:
a_index = i
return a_index

View File

@ -1,12 +1,14 @@
import re
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckLiteralBoolFormat(RuleBase):
rid = "ANS114"
class CheckLiteralBoolFormat(StandardBase):
sid = "ANSIBLE0014"
description = "Literal bools should be consistent"
helptext = "literal bools should be written as `{bools}`"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
def check(self, candidate, settings):

View File

@ -1,12 +1,14 @@
# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp>
# Copyright (c) 2018, Ansible Project
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckLocalAction(RuleBase):
rid = "ANS124"
class CheckLocalAction(StandardBase):
sid = "ANSIBLE0024"
description = "Don't use local_action"
helptext = "`delegate_to: localhost` should be used instead of `local_action`"
helptext = ("`delegate_to: localhost` should be used instead of `local_action`")
version = "0.2"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):

View File

@ -1,13 +1,15 @@
# Copyright (c) 2018, Ansible Project
from nested_lookup import nested_lookup
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckMetaChangeFromDefault(RuleBase):
rid = "ANS121"
class CheckMetaChangeFromDefault(StandardBase):
sid = "ANSIBLE0021"
description = "Roles meta/main.yml default values should be changed"
helptext = "meta/main.yml default values should be changed for: `{field}`"
version = "0.2"
types = ["meta"]
def check(self, candidate, settings):
@ -22,7 +24,7 @@ class CheckMetaChangeFromDefault(RuleBase):
if not errors:
for field, default in field_defaults:
pair = f"{field}: {default}"
pair = "{field}: {default}".format(field=field, default=default)
lookup = nested_lookup(field, content)
if lookup and default in nested_lookup(field, content):
errors.append(self.Error(None, self.helptext.format(field=pair)))

View File

@ -1,12 +1,14 @@
from nested_lookup import nested_lookup
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckMetaMain(RuleBase):
rid = "ANS102"
class CheckMetaMain(StandardBase):
sid = "ANSIBLE0002"
description = "Roles must contain suitable meta/main.yml"
helptext = "file should contain `{key}` key"
version = "0.1"
types = ["meta"]
def check(self, candidate, settings):
@ -14,8 +16,8 @@ class CheckMetaMain(RuleBase):
keys = ["author", "description", "min_ansible_version", "platforms"]
if not errors:
has_galaxy_info = isinstance(content, dict) and "galaxy_info" in content
has_dependencies = isinstance(content, dict) and "dependencies" in content
has_galaxy_info = (isinstance(content, dict) and "galaxy_info" in content.keys())
has_dependencies = (isinstance(content, dict) and "dependencies" in content.keys())
if not has_galaxy_info:
errors.append(self.Error(None, self.helptext.format(key="galaxy_info")))

View File

@ -1,12 +1,14 @@
from collections import defaultdict
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckNameFormat(RuleBase):
rid = "ANS107"
class CheckNameFormat(StandardBase):
sid = "ANSIBLE0007"
description = "Name of tasks and handlers must be formatted"
helptext = "name `{name}` should start with uppercase"
helptext = "name '{name}' should start with uppercase"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
@ -17,7 +19,7 @@ class CheckNameFormat(RuleBase):
for task in tasks:
if "name" in task:
namelines[task["name"]].append(task["__line__"])
for name, lines in namelines.items():
for (name, lines) in namelines.items():
if name and not name[0].isupper():
errors.append(self.Error(lines[-1], self.helptext.format(name=name)))

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckNamedTask(RuleBase):
rid = "ANS106"
class CheckNamedTask(StandardBase):
sid = "ANSIBLE0006"
description = "Tasks and handlers must be named"
helptext = "module `{module}` used without or empty `name` attribute"
helptext = "module '{module}' used without or empty `name` attribute"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckNativeYaml(RuleBase):
rid = "YML108"
class CheckNativeYaml(StandardBase):
sid = "LINT0008"
description = "Use YAML format for tasks and handlers rather than key=value"
helptext = "task arguments appear to be in key value rather than YAML format"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# Author: Adrián Tóth <adtoth@redhat.com>
#
# Copyright (c) 2020, Red Hat, Inc.
@ -21,16 +22,18 @@
# THE SOFTWARE.
import re
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckNestedJinja(RuleBase):
rid = "ANS123"
class CheckNestedJinja(StandardBase):
sid = "ANSIBLE0023"
description = "Don't use nested Jinja2 pattern"
helptext = (
"there should not be any nested jinja pattern "
"like `{{ list_one + {{ list_two | max }} }}`"
)
version = "0.2"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
def check(self, candidate, settings):
@ -48,7 +51,7 @@ class CheckNestedJinja(RuleBase):
for item in match:
matches.append((i, item))
for i, _ in matches:
for i, line in matches:
errors.append(self.Error(i, self.helptext))
return self.Result(candidate.path, errors)

View File

@ -1,12 +1,14 @@
# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp>
# Copyright (c) 2018, Ansible Project
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckRelativeRolePaths(RuleBase):
rid = "ANS125"
class CheckRelativeRolePaths(StandardBase):
sid = "ANSIBLE0025"
description = "Don't use a relative path in a role"
helptext = "`copy` and `template` modules don't need relative path for `src`"
version = "0.2"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
@ -24,7 +26,7 @@ class CheckRelativeRolePaths(RuleBase):
path_to_check = None
if module in module_to_path_folder and "src" in task["action"]:
path_to_check = f"../{module_to_path_folder[module]}"
path_to_check = "../{}".format(module_to_path_folder[module])
if path_to_check and path_to_check in task["action"]["src"]:
errors.append(self.Error(task["__line__"], self.helptext))

View File

@ -1,12 +1,14 @@
from ansible.parsing.yaml.objects import AnsibleMapping
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckScmInSrc(RuleBase):
rid = "ANS105"
class CheckScmInSrc(StandardBase):
sid = "ANSIBLE0005"
description = "Use `scm:` key rather than `src: scm+url`"
helptext = "usage of `src: scm+url` not recommended"
version = "0.1"
types = ["rolesfile"]
def check(self, candidate, settings):
@ -14,11 +16,8 @@ class CheckScmInSrc(RuleBase):
if not errors:
for role in roles:
if (
isinstance(role, AnsibleMapping)
and bool(role.get("src"))
and "+" in role.get("src")
):
errors.append(self.Error(role["__line__"], self.helptext))
if isinstance(role, AnsibleMapping):
if "+" in role.get("src"):
errors.append(self.Error(role["__line__"], self.helptext))
return self.Result(candidate.path, errors)

View File

@ -1,10 +1,14 @@
from ansiblelater.rule import RuleBase
import re
from ansiblelater.standard import StandardBase
class CheckShellInsteadCommand(RuleBase):
rid = "ANS110"
class CheckShellInsteadCommand(StandardBase):
sid = "ANSIBLE0010"
description = "Shell should only be used when essential"
helptext = "shell should only be used when piping, redirecting or chaining commands"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
@ -18,8 +22,13 @@ class CheckShellInsteadCommand(RuleBase):
if "executable" in task["action"]:
continue
cmd = self.get_safe_cmd(task)
if not any(ch in cmd for ch in self.SHELL_PIPE_CHARS):
if "cmd" in task["action"]:
cmd = task["action"].get("cmd", [])
else:
cmd = " ".join(task["action"].get("__ansible_arguments__", []))
unjinja = re.sub(r"\{\{[^\}]*\}\}", "JINJA_VAR", cmd)
if not any(ch in unjinja for ch in "&|<>;$\n*[]{}?"):
errors.append(self.Error(task["__line__"], self.helptext))
return self.Result(candidate.path, errors)

View File

@ -1,13 +1,15 @@
import re
from collections import defaultdict
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckTaskSeparation(RuleBase):
rid = "ANS101"
class CheckTaskSeparation(StandardBase):
sid = "ANSIBLE0001"
description = "Single tasks should be separated by empty line"
helptext = "missing task separation (required: 1 empty line)"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):

View File

@ -1,12 +1,14 @@
from collections import defaultdict
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckUniqueNamedTask(RuleBase):
rid = "ANS103"
class CheckUniqueNamedTask(StandardBase):
sid = "ANSIBLE0003"
description = "Tasks and handlers must be uniquely named within a single file"
helptext = "name `{name}` appears multiple times"
helptext = "name '{name}' appears multiple times"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):
@ -18,7 +20,7 @@ class CheckUniqueNamedTask(RuleBase):
for task in tasks:
if "name" in task:
namelines[task["name"]].append(task["__line__"])
for name, lines in namelines.items():
for (name, lines) in namelines.items():
if name and len(lines) > 1:
errors.append(self.Error(lines[-1], self.helptext.format(name=name)))

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckWhenFormat(RuleBase):
rid = "ANS122"
class CheckWhenFormat(StandardBase):
sid = "ANSIBLE0022"
description = "Don't use Jinja2 in when"
helptext = (
"`when` is a raw Jinja2 expression, redundant `{{ }}` should be removed from variable(s)"
"`when` is a raw Jinja2 expression, redundant {{ }} "
"should be removed from variable(s)"
)
version = "0.2"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckYamlColons(RuleBase):
rid = "YML105"
class CheckYamlColons(StandardBase):
sid = "LINT0005"
description = "YAML should use consistent number of spaces around colons"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings):
options = f"rules: {{colons: {settings['yamllint']['colons']}}}"
options = "rules: {{colons: {conf}}}".format(conf=settings["yamllint"]["colons"])
errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors)

View File

@ -1,13 +1,17 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckYamlDocumentEnd(RuleBase):
rid = "YML109"
description = "YAML document end marker should match configuration"
class CheckYamlDocumentEnd(StandardBase):
sid = "LINT0009"
description = "YAML should contain document end marker"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings):
options = f"rules: {{document-end: {settings['yamllint']['document-end']}}}"
options = "rules: {{document-end: {conf}}}".format(
conf=settings["yamllint"]["document-end"]
)
errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors)

View File

@ -1,13 +1,17 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckYamlDocumentStart(RuleBase):
rid = "YML104"
description = "YAML document start marker should match configuration"
class CheckYamlDocumentStart(StandardBase):
sid = "LINT0004"
description = "YAML should contain document start marker"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings):
options = f"rules: {{document-start: {settings['yamllint']['document-start']}}}"
options = "rules: {{document-start: {conf}}}".format(
conf=settings["yamllint"]["document-start"]
)
errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors)

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckYamlEmptyLines(RuleBase):
rid = "YML101"
class CheckYamlEmptyLines(StandardBase):
sid = "LINT0001"
description = "YAML should not contain unnecessarily empty lines"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings):
options = f"rules: {{empty-lines: {settings['yamllint']['empty-lines']}}}"
options = "rules: {{empty-lines: {conf}}}".format(conf=settings["yamllint"]["empty-lines"])
errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors)

View File

@ -1,12 +1,14 @@
import os
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckYamlFile(RuleBase):
rid = "YML106"
class CheckYamlFile(StandardBase):
sid = "LINT0006"
description = "Roles file should be in yaml format"
helptext = "file does not have a .yml extension"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):

View File

@ -1,10 +1,12 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckYamlHasContent(RuleBase):
rid = "YML107"
class CheckYamlHasContent(StandardBase):
sid = "LINT0007"
description = "Files should contain useful content"
helptext = "the file appears to have no useful content"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "defaults", "meta"]
def check(self, candidate, settings):

View File

@ -1,13 +1,15 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckYamlHyphens(RuleBase):
rid = "YML103"
class CheckYamlHyphens(StandardBase):
sid = "LINT0003"
description = "YAML should use consistent number of spaces after hyphens"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings):
options = f"rules: {{hyphens: {settings['yamllint']['hyphens']}}}"
options = "rules: {{hyphens: {conf}}}".format(conf=settings["yamllint"]["hyphens"])
errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors)

View File

@ -1,13 +1,17 @@
from ansiblelater.rule import RuleBase
from ansiblelater.standard import StandardBase
class CheckYamlIndent(RuleBase):
rid = "YML102"
class CheckYamlIndent(StandardBase):
sid = "LINT0002"
description = "YAML should not contain unnecessarily empty lines"
version = "0.1"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings):
options = f"rules: {{document-start: {settings['yamllint']['document-start']}}}"
options = "rules: {{document-start: {conf}}}".format(
conf=settings["yamllint"]["document-start"]
)
errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors)

View File

@ -1,13 +0,0 @@
from ansiblelater.rule import RuleBase
class CheckYamlOctalValues(RuleBase):
rid = "YML110"
description = "YAML implicit/explicit octal value should match configuration"
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
def check(self, candidate, settings):
options = f"rules: {{octal-values: {settings['yamllint']['octal-values']}}}"
errors = self.run_yamllint(candidate, options)
return self.Result(candidate.path, errors)

View File

@ -1,6 +1,5 @@
"""Global settings object definition."""
import importlib.resources
import os
import anyconfig
@ -8,6 +7,7 @@ import jsonschema.exceptions
import pathspec
from appdirs import AppDirs
from jsonschema._utils import format_as_index
from pkg_resources import resource_filename
from ansiblelater import utils
@ -15,7 +15,7 @@ config_dir = AppDirs("ansible-later").user_config_dir
default_config_file = os.path.join(config_dir, "config.yml")
class Settings:
class Settings(object):
"""
Create an object with all necessary settings.
@ -25,13 +25,14 @@ class Settings:
- provides cli parameters
"""
def __init__(self, args, config_file=default_config_file):
def __init__(self, args={}, config_file=default_config_file):
"""
Initialize a new settings class.
:param args: An optional dict of options, arguments and commands from the CLI.
:param config_file: An optional path to a yaml config file.
:returns: None
"""
self.config_file = config_file
self.schema = None
@ -41,9 +42,6 @@ class Settings:
self._update_filelist()
def _set_args(self, args):
if args is None:
args = {}
defaults = self._get_defaults()
self.config_file = args.get("config_file") or default_config_file
@ -104,13 +102,13 @@ class Settings:
if f not in defaults["ansible"]["custom_modules"]:
defaults["ansible"]["custom_modules"].append(f)
if defaults["rules"]["builtin"]:
ref = importlib.resources.files("ansiblelater") / "rules"
with importlib.resources.as_file(ref) as path:
defaults["rules"]["dir"].append(path)
if defaults["rules"]["buildin"]:
defaults["rules"]["standards"].append(
os.path.join(resource_filename("ansiblelater", "rules"))
)
defaults["rules"]["dir"] = [
os.path.relpath(os.path.normpath(p)) for p in defaults["rules"]["dir"]
defaults["rules"]["standards"] = [
os.path.relpath(os.path.normpath(p)) for p in defaults["rules"]["standards"]
]
return defaults
@ -118,20 +116,18 @@ class Settings:
def _get_defaults(self):
defaults = {
"rules": {
"builtin": True,
"dir": [],
"include_filter": [],
"buildin": True,
"standards": [],
"filter": [],
"exclude_filter": [],
"warning_filter": [
"ANS128",
"ANS999",
],
"warning_filter": ["ANSIBLE9999"],
"ignore_dotfiles": True,
"exclude_files": [],
"version": ""
},
"logging": {
"level": "WARNING",
"json": False,
"json": False
},
"ansible": {
"custom_modules": [],
@ -144,7 +140,7 @@ class Settings:
"exclude": [
"meta",
"debug",
"block/always/rescue",
"block",
"include_role",
"import_role",
"include_tasks",
@ -168,21 +164,17 @@ class Settings:
"indent-sequences": True,
},
"hyphens": {
"max-spaces-after": 1,
"max-spaces-after": 1
},
"document-start": {
"present": True,
"present": True
},
"document-end": {
"present": False,
"present": True
},
"colons": {
"max-spaces-before": 0,
"max-spaces-after": 1,
},
"octal-values": {
"forbid-implicit-octal": True,
"forbid-explicit-octal": True,
"max-spaces-after": 1
},
},
}
@ -196,16 +188,14 @@ class Settings:
anyconfig.validate(config, self.schema, ac_schema_safe=False)
return True
except jsonschema.exceptions.ValidationError as e:
validator = e.validator
path = format_as_index(
next(iter(e.absolute_path)),
list(e.absolute_path)[1:],
)
msg = e.message
utils.sysexit_with_message(
schema_error = (
"Error while loading configuration:\n"
f"Failed validating '{validator}' at {path}: {msg}"
"Failed validating '{validator}' in schema{schema}"
).format(
validator=e.validator, schema=format_as_index(list(e.relative_schema_path)[:-1])
)
utils.sysexit_with_message(
"{schema}: {msg}".format(schema=schema_error, msg=e.message)
)
def _update_filelist(self):
@ -213,14 +203,13 @@ class Settings:
excludes = self.config["rules"]["exclude_files"]
ignore_dotfiles = self.config["rules"]["ignore_dotfiles"]
if ignore_dotfiles:
if ignore_dotfiles and not self.args_files:
excludes.append(".*")
if self.args_files:
else:
del excludes[:]
filelist = []
for root, _dirs, files in os.walk("."):
for root, dirs, files in os.walk("."):
for filename in files:
filelist.append(os.path.relpath(os.path.normpath(os.path.join(root, filename))))

View File

@ -1,90 +1,98 @@
"""Rule definition."""
"""Standard definition."""
import codecs
import copy
import importlib
import inspect
import os
import pathlib
import re
from abc import ABCMeta, abstractmethod
from abc import ABCMeta
from abc import abstractmethod
from collections import defaultdict
from urllib.parse import urlparse
import toolz
import yaml
from yamllint import linter
from yamllint.config import YamlLintConfig
from ansiblelater.exceptions import LaterAnsibleError, LaterError
from ansiblelater.utils import Singleton, sysexit_with_message
from ansiblelater.utils.yamlhelper import (
UnsafeTag,
VaultTag,
action_tasks,
normalize_task,
normalized_yaml,
parse_yaml_linenumbers,
)
from ansiblelater.exceptions import LaterAnsibleError
from ansiblelater.exceptions import LaterError
from ansiblelater.utils import Singleton
from ansiblelater.utils import sysexit_with_message
from ansiblelater.utils.yamlhelper import UnsafeTag
from ansiblelater.utils.yamlhelper import VaultTag
from ansiblelater.utils.yamlhelper import action_tasks
from ansiblelater.utils.yamlhelper import normalize_task
from ansiblelater.utils.yamlhelper import normalized_yaml
from ansiblelater.utils.yamlhelper import parse_yaml_linenumbers
class RuleMeta(type):
def __call__(cls, *args):
class StandardMeta(type):
def __call__(cls, *args, **kwargs):
mcls = type.__call__(cls, *args)
mcls.rid = cls.rid
mcls.description = getattr(cls, "description", "__unknown__")
mcls.helptext = getattr(cls, "helptext", "")
mcls.types = getattr(cls, "types", [])
setattr(mcls, "sid", cls.sid)
setattr(mcls, "description", getattr(cls, "description", "__unknown__"))
setattr(mcls, "helptext", getattr(cls, "helptext", ""))
setattr(mcls, "version", getattr(cls, "version", None))
setattr(mcls, "types", getattr(cls, "types", []))
return mcls
class RuleExtendedMeta(RuleMeta, ABCMeta):
class StandardExtendedMeta(StandardMeta, ABCMeta):
pass
class RuleBase(metaclass=RuleExtendedMeta):
SHELL_PIPE_CHARS = "&|<>;$\n*[]{}?"
class StandardBase(object, metaclass=StandardExtendedMeta):
@property
@abstractmethod
def rid(self):
def sid(self):
pass
@abstractmethod
def check(self, candidate, settings):
pass
def __repr__(self):
return f"Rule: {self.description} (types: {self.types})"
def __repr__(self): # noqa
return "Standard: {description} (version: {version}, types: {types})".format(
description=self.description, version=self.version, types=self.types
)
@staticmethod
def get_tasks(candidate, settings): # noqa
def get_tasks(candidate, settings):
errors = []
yamllines = []
if not candidate.faulty:
try:
with open(candidate.path, encoding="utf-8") as f:
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
yamllines = parse_yaml_linenumbers(f, candidate.path)
except LaterError as ex:
e = ex.original
errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
)
candidate.faulty = True
except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
errors.append(
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
)
candidate.faulty = True
return yamllines, errors
@staticmethod
def get_action_tasks(candidate, settings): # noqa
def get_action_tasks(candidate, settings):
tasks = []
errors = []
if not candidate.faulty:
try:
with open(candidate.path, encoding="utf-8") as f:
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
yamllines = parse_yaml_linenumbers(f, candidate.path)
if yamllines:
@ -92,11 +100,13 @@ class RuleBase(metaclass=RuleExtendedMeta):
except LaterError as ex:
e = ex.original
errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
)
candidate.faulty = True
except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
errors.append(StandardBase.Error(e.line, "syntax error: {}".format(e.message)))
candidate.faulty = True
return tasks, errors
@ -114,11 +124,15 @@ class RuleBase(metaclass=RuleExtendedMeta):
except LaterError as ex:
e = ex.original
errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
)
candidate.faulty = True
except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
errors.append(
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
)
candidate.faulty = True
return normalized, errors
@ -130,7 +144,7 @@ class RuleBase(metaclass=RuleExtendedMeta):
if not candidate.faulty:
try:
with open(candidate.path, encoding="utf-8") as f:
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
yamllines = parse_yaml_linenumbers(f, candidate.path)
if yamllines:
@ -149,27 +163,30 @@ class RuleBase(metaclass=RuleExtendedMeta):
# No need to normalize_task if we are skipping it.
continue
normalized_task = normalize_task(
task, candidate.path, settings["ansible"]["custom_modules"]
normalized.append(
normalize_task(
task, candidate.path, settings["ansible"]["custom_modules"]
)
)
normalized_task["__raw_task__"] = task
normalized.append(normalized_task)
except LaterError as ex:
e = ex.original
errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
)
candidate.faulty = True
except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
errors.append(
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
)
candidate.faulty = True
return normalized, errors
@staticmethod
def get_normalized_yaml(candidate, settings, options=None): # noqa
def get_normalized_yaml(candidate, settings, options=None):
errors = []
yamllines = []
@ -184,23 +201,27 @@ class RuleBase(metaclass=RuleExtendedMeta):
except LaterError as ex:
e = ex.original
errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
)
candidate.faulty = True
except LaterAnsibleError as e:
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
errors.append(
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
)
candidate.faulty = True
return yamllines, errors
@staticmethod
def get_raw_yaml(candidate, settings): # noqa
def get_raw_yaml(candidate, settings):
content = None
errors = []
if not candidate.faulty:
try:
with open(candidate.path, encoding="utf-8") as f:
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
yaml.add_constructor(
UnsafeTag.yaml_tag, UnsafeTag.yaml_constructor, Loader=yaml.SafeLoader
)
@ -210,7 +231,9 @@ class RuleBase(metaclass=RuleExtendedMeta):
content = yaml.safe_load(f)
except yaml.YAMLError as e:
errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
)
candidate.faulty = True
@ -222,17 +245,16 @@ class RuleBase(metaclass=RuleExtendedMeta):
if not candidate.faulty:
try:
with open(candidate.path, encoding="utf-8") as f:
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
for problem in linter.run(f, YamlLintConfig(options)):
errors.append(RuleBase.Error(problem.line, problem.desc))
errors.append(StandardBase.Error(problem.line, problem.desc))
except yaml.YAMLError as e:
errors.append(
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
StandardBase.Error(
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
)
)
candidate.faulty = True
except (TypeError, ValueError) as e:
errors.append(RuleBase.Error(None, f"yamllint error: {e}"))
candidate.faulty = True
return errors
@ -247,26 +269,10 @@ class RuleBase(metaclass=RuleExtendedMeta):
return first_cmd_arg
@staticmethod
def get_safe_cmd(task):
if "cmd" in task["action"]:
cmd = task["action"].get("cmd", "")
else:
cmd = " ".join(task["action"].get("__ansible_arguments__", []))
cmd = re.sub(r"{{.+?}}", "JINJA_EXPRESSION", cmd)
cmd = re.sub(r"{%.+?%}", "JINJA_STATEMENT", cmd)
cmd = re.sub(r"{#.+?#}", "JINJA_COMMENT", cmd)
parts = cmd.split()
parts = [p if not urlparse(p.strip('"').strip("'")).scheme else "URL" for p in parts]
return " ".join(parts)
class Error:
class Error(object):
"""Default error object created if a rule failed."""
def __init__(self, lineno, message, **kwargs):
def __init__(self, lineno, message, error_type=None, **kwargs):
"""
Initialize a new error object and returns None.
@ -277,21 +283,22 @@ class RuleBase(metaclass=RuleExtendedMeta):
self.lineno = lineno
self.message = message
self.kwargs = kwargs
for key, value in kwargs.items():
for (key, value) in kwargs.items():
setattr(self, key, value)
def __repr__(self):
def __repr__(self): # noqa
if self.lineno:
return f"{self.lineno}: {self.message}"
return f" {self.message}"
return "{no}: {msg}".format(no=self.lineno, msg=self.message)
else:
return " {msg}".format(msg=self.message)
def to_dict(self):
result = {"lineno": self.lineno, "message": self.message}
for key, value in self.kwargs.items():
result = dict(lineno=self.lineno, message=self.message)
for (key, value) in self.kwargs.items():
result[key] = value
return result
class Result:
class Result(object):
"""Generic result object."""
def __init__(self, candidate, errors=None):
@ -299,10 +306,11 @@ class RuleBase(metaclass=RuleExtendedMeta):
self.errors = errors or []
def message(self):
return "\n".join([f"{self.candidate}:{error}" for error in self.errors])
return "\n".join(["{0}:{1}".format(self.candidate, error) for error in self.errors])
class RulesLoader:
class StandardLoader():
def __init__(self, source):
self.rules = []
@ -318,33 +326,37 @@ class RulesLoader:
try:
spec.loader.exec_module(module)
except (ImportError, NameError) as e:
sysexit_with_message(f"Failed to load roles file {filename}: \n {e!s}")
sysexit_with_message(
"Failed to load roles file {module}: \n {msg}".format(
msg=str(e), module=filename
)
)
try:
for _name, obj in inspect.getmembers(module):
for name, obj in inspect.getmembers(module):
if self._is_plugin(obj):
self.rules.append(obj())
except TypeError as e:
sysexit_with_message(f"Failed to load roles file: \n {e!s}")
sysexit_with_message("Failed to load roles file: \n {msg}".format(msg=str(e)))
self.validate()
def _is_plugin(self, obj):
return (
inspect.isclass(obj) and issubclass(obj, RuleBase) and obj is not RuleBase and not None
)
return inspect.isclass(obj) and issubclass(
obj, StandardBase
) and obj is not StandardBase and not None
def validate(self):
normalize_rule = list(toolz.remove(lambda x: x.rid == "", self.rules))
unique_rule = len(list(toolz.unique(normalize_rule, key=lambda x: x.rid)))
all_rules = len(normalize_rule)
if all_rules != unique_rule:
normalized_std = (list(toolz.remove(lambda x: x.sid == "", self.rules)))
unique_std = len(list(toolz.unique(normalized_std, key=lambda x: x.sid)))
all_std = len(normalized_std)
if not all_std == unique_std:
sysexit_with_message(
"Found duplicate tags in rules definition. Please use unique tags only."
"Detect duplicate ID's in standards definition. Please use unique ID's only."
)
class SingleRules(RulesLoader, metaclass=Singleton):
class SingleStandards(StandardLoader, metaclass=Singleton):
"""Singleton config class."""
pass

View File

@ -22,8 +22,10 @@ def test_critical(capsys, mocker):
_, stderr = capsys.readouterr()
print(
f"{colorama.Fore.RED}{colorama.Style.BRIGHT}CRITICAL:{colorama.Style.NORMAL} foo\n"
f"{colorama.Style.RESET_ALL}"
"{}{}CRITICAL:{} foo\n{}".format(
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.NORMAL,
colorama.Style.RESET_ALL
)
)
x, _ = capsys.readouterr()
@ -36,8 +38,10 @@ def test_error(capsys, mocker):
_, stderr = capsys.readouterr()
print(
f"{colorama.Fore.RED}{colorama.Style.BRIGHT}ERROR:{colorama.Style.NORMAL} foo\n"
f"{colorama.Style.RESET_ALL}"
"{}{}ERROR:{} foo\n{}".format(
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.NORMAL,
colorama.Style.RESET_ALL
)
)
x, _ = capsys.readouterr()
@ -50,8 +54,10 @@ def test_warn(capsys, mocker):
stdout, _ = capsys.readouterr()
print(
f"{colorama.Fore.YELLOW}{colorama.Style.BRIGHT}WARNING:{colorama.Style.NORMAL} foo\n"
f"{colorama.Style.RESET_ALL}"
"{}{}WARNING:{} foo\n{}".format(
colorama.Fore.YELLOW, colorama.Style.BRIGHT, colorama.Style.NORMAL,
colorama.Style.RESET_ALL
)
)
x, _ = capsys.readouterr()
@ -64,8 +70,10 @@ def test_info(capsys, mocker):
stdout, _ = capsys.readouterr()
print(
f"{colorama.Fore.BLUE}{colorama.Style.BRIGHT}INFO:{colorama.Style.NORMAL} foo\n"
f"{colorama.Style.RESET_ALL}"
"{}{}INFO:{} foo\n{}".format(
colorama.Fore.BLUE, colorama.Style.BRIGHT, colorama.Style.NORMAL,
colorama.Style.RESET_ALL
)
)
x, _ = capsys.readouterr()

View File

@ -1,13 +1,14 @@
"""Global utils collection."""
from __future__ import print_function
import contextlib
import os
import re
import sys
from contextlib import suppress
from functools import lru_cache
from distutils.version import LooseVersion
import yaml
from ansible.plugins.loader import module_loader
from ansiblelater import logger
@ -23,17 +24,32 @@ def count_spaces(c_string):
leading_spaces = 0
trailing_spaces = 0
for _i, e in enumerate(c_string):
for i, e in enumerate(c_string):
if not e.isspace():
break
leading_spaces += 1
for _i, e in reversed(list(enumerate(c_string))):
for i, e in reversed(list(enumerate(c_string))):
if not e.isspace():
break
trailing_spaces += 1
return (leading_spaces, trailing_spaces)
return ((leading_spaces, trailing_spaces))
def get_property(prop):
currentdir = os.path.dirname(os.path.realpath(__file__))
parentdir = os.path.dirname(currentdir)
result = re.search(
r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop),
open(os.path.join(parentdir, "__init__.py")).read()
)
return result.group(1)
def standards_latest(standards):
return max([standard.version for standard in standards if standard.version] or ["0.1"],
key=LooseVersion)
def lines_ranges(lines_spec):
@ -58,8 +74,10 @@ def safe_load(string):
:returns: dict
"""
with suppress(yaml.scanner.ScannerError):
try:
return yaml.safe_load(string) or {}
except yaml.scanner.ScannerError as e:
print(str(e))
@contextlib.contextmanager
@ -78,24 +96,14 @@ def open_file(filename, mode="r"):
def add_dict_branch(tree, vector, value):
key = vector[0]
tree[key] = (
value if len(vector) == 1 else add_dict_branch(tree.get(key, {}), vector[1:], value)
)
tree[key] = value \
if len(vector) == 1 \
else add_dict_branch(tree[key] if key in tree else {},
vector[1:],
value)
return tree
def has_jinja(value):
"""Return true if a string seems to contain jinja templating."""
re_has_jinja = re.compile(r"{[{%#].*[%#}]}", re.DOTALL)
return bool(isinstance(value, str) and re_has_jinja.search(value))
def has_glob(value):
"""Return true if a string looks like having a glob pattern."""
re_has_glob = re.compile("[][*?]")
return bool(isinstance(value, str) and re_has_glob.search(value))
def sysexit(code=1):
sys.exit(code)
@ -112,23 +120,5 @@ class Singleton(type):
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super().__call__(*args, **kwargs)
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
@lru_cache
def load_plugin(name):
"""Return loaded ansible plugin/module."""
loaded_module = module_loader.find_plugin_with_context(
name,
ignore_deprecated=True,
check_aliases=True,
)
if not loaded_module.resolved and name.startswith("ansible.builtin."):
# fallback to core behavior of using legacy
loaded_module = module_loader.find_plugin_with_context(
name.replace("ansible.builtin.", "ansible.legacy."),
ignore_deprecated=True,
check_aliases=True,
)
return loaded_module

View File

@ -21,13 +21,15 @@
# THE SOFTWARE.
import codecs
import glob
import imp
import os
from contextlib import suppress
import ansible.parsing.mod_args
import yaml
from ansible import constants
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.errors import AnsibleError
from ansible.errors import AnsibleParserError
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.constructor import AnsibleConstructor
@ -35,7 +37,8 @@ from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.template import Templar
from yaml.composer import Composer
from ansiblelater.exceptions import LaterAnsibleError, LaterError
from ansiblelater.exceptions import LaterAnsibleError
from ansiblelater.exceptions import LaterError
try:
# Try to import the Ansible 2 module first, it's the future-proof one
@ -65,9 +68,7 @@ def ansible_template(basedir, varname, templatevars, **kwargs):
try:
from ansible.plugins.loader import init_plugin_loader, module_loader
init_plugin_loader()
from ansible.plugins import module_loader
except ImportError:
from ansible.plugins.loader import module_loader
@ -128,6 +129,24 @@ BLOCK_NAME_TO_ACTION_TYPE_MAP = {
}
def load_plugins(directory):
result = []
fh = None
for pluginfile in glob.glob(os.path.join(directory, "[A-Za-z]*.py")):
pluginname = os.path.basename(pluginfile.replace(".py", ""))
try:
fh, filename, desc = imp.find_module(pluginname, [directory])
mod = imp.load_module(pluginname, fh, filename, desc)
obj = getattr(mod, pluginname)()
result.append(obj)
finally:
if fh:
fh.close()
return result
def tokenize(line):
tokens = line.lstrip().split(" ")
if tokens[0] == "-":
@ -136,8 +155,8 @@ def tokenize(line):
tokens = tokens[1:]
command = tokens[0].replace(":", "")
args = []
kwargs = {}
args = list()
kwargs = dict()
nonkvfound = False
for arg in tokens[1:]:
if "=" in arg and not nonkvfound:
@ -152,11 +171,10 @@ def tokenize(line):
def _playbook_items(pb_data):
if isinstance(pb_data, dict):
return pb_data.items()
if not pb_data:
elif not pb_data:
return []
return [item for play in pb_data for item in play.items()]
else:
return [item for play in pb_data for item in play.items()]
def find_children(playbook, playbook_dir):
@ -168,7 +186,7 @@ def find_children(playbook, playbook_dir):
try:
playbook_ds = parse_yaml_from_file(playbook[0])
except AnsibleError as e:
raise SystemExit(str(e)) from e
raise SystemExit(str(e))
results = []
basedir = os.path.dirname(playbook[0])
items = _playbook_items(playbook_ds)
@ -176,7 +194,7 @@ def find_children(playbook, playbook_dir):
for child in play_children(basedir, item, playbook[1], playbook_dir):
if "$" in child["path"] or "{{" in child["path"]:
continue
valid_tokens = []
valid_tokens = list()
for token in split_args(child["path"]):
if "=" in token:
break
@ -187,18 +205,20 @@ def find_children(playbook, playbook_dir):
def template(basedir, value, variables, fail_on_undefined=False, **kwargs):
# Hack to skip the following exception when using to_json filter on a variable.
# I guess the filter doesn't like empty vars...
with suppress(AnsibleError, ValueError):
return ansible_template(
os.path.abspath(basedir),
value,
variables,
**dict(kwargs, fail_on_undefined=fail_on_undefined),
try:
value = ansible_template(
os.path.abspath(basedir), value, variables,
**dict(kwargs, fail_on_undefined=fail_on_undefined)
)
# Hack to skip the following exception when using to_json filter on a variable.
# I guess the filter doesn't like empty vars...
except (AnsibleError, ValueError):
# templating failed, so just keep value as is.
pass
return value
def play_children(basedir, item, parent_type):
def play_children(basedir, item, parent_type, playbook_dir):
delegate_map = {
"tasks": _taskshandlers_children,
"pre_tasks": _taskshandlers_children,
@ -214,20 +234,21 @@ def play_children(basedir, item, parent_type):
play_library = os.path.join(os.path.abspath(basedir), "library")
_load_library_if_exists(play_library)
if k in delegate_map and v:
v = template(
os.path.abspath(basedir),
v,
{"playbook_dir": os.path.abspath(basedir)},
fail_on_undefined=False,
)
return delegate_map[k](basedir, k, v, parent_type)
if k in delegate_map:
if v:
v = template(
os.path.abspath(basedir),
v,
dict(playbook_dir=os.path.abspath(basedir)),
fail_on_undefined=False
)
return delegate_map[k](basedir, k, v, parent_type)
return []
def _include_children(basedir, k, v, parent_type):
# handle include: filename.yml tags=blah
(command, args, kwargs) = tokenize(f"{k}: {v}")
(command, args, kwargs) = tokenize("{0}: {1}".format(k, v))
result = path_dwim(basedir, args[0])
if not os.path.exists(result) and not basedir.endswith("tasks"):
@ -250,20 +271,18 @@ def _taskshandlers_children(basedir, k, v, parent_type):
results.extend(
_roles_children(
basedir,
k,
[th["import_role"].get("name")],
k, [th["import_role"].get("name")],
parent_type,
main=th["import_role"].get("tasks_from", "main"),
main=th["import_role"].get("tasks_from", "main")
)
)
elif "include_role" in th:
results.extend(
_roles_children(
basedir,
k,
[th["include_role"].get("name")],
k, [th["include_role"].get("name")],
parent_type,
main=th["include_role"].get("tasks_from", "main"),
main=th["include_role"].get("tasks_from", "main")
)
)
elif "block" in th:
@ -279,11 +298,14 @@ def append_children(taskhandler, basedir, k, parent_type, results):
# when taskshandlers_children is called for playbooks, the
# actual type of the included tasks is the section containing the
# include, e.g. tasks, pre_tasks, or handlers.
playbook_section = k if parent_type == "playbook" else parent_type
if parent_type == "playbook":
playbook_section = k
else:
playbook_section = parent_type
results.append({"path": path_dwim(basedir, taskhandler), "type": playbook_section})
def _roles_children(basedir, k, v, parent_type, main="main"): # noqa
def _roles_children(basedir, k, v, parent_type, main="main"):
results = []
for role in v:
if isinstance(role, dict):
@ -295,7 +317,10 @@ def _roles_children(basedir, k, v, parent_type, main="main"): # noqa
)
)
else:
raise SystemExit(f"role dict {role} does not contain a 'role' or 'name' key")
raise SystemExit(
"role dict {0} does not contain a 'role' "
"or 'name' key".format(role)
)
else:
results.extend(_look_for_role_files(basedir, role, main=main))
return results
@ -315,7 +340,7 @@ def _rolepath(basedir, role):
path_dwim(basedir, role),
# if included from roles/[role]/meta/main.yml
path_dwim(basedir, os.path.join("..", "..", "..", "roles", role)),
path_dwim(basedir, os.path.join("..", "..", role)),
path_dwim(basedir, os.path.join("..", "..", role))
]
if constants.DEFAULT_ROLES_PATH:
@ -357,114 +382,93 @@ def rolename(filepath):
idx = filepath.find("roles/")
if idx < 0:
return ""
role = filepath[idx + 6 :]
return role[: role.find("/")]
role = filepath[idx + 6:]
role = role[:role.find("/")]
return role
def _kv_to_dict(v):
(command, args, kwargs) = tokenize(v)
return dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs)
return (dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs))
def normalize_task(task, filename, custom_modules=None):
def normalize_task(task, filename, custom_modules=[]):
"""Ensure tasks have an action key and strings are converted to python objects."""
def _normalize(task, custom_modules):
if custom_modules is None:
custom_modules = []
normalized = {}
ansible_parsed_keys = ("action", "local_action", "args", "delegate_to")
if is_nested_task(task):
_extract_ansible_parsed_keys_from_task(normalized, task, ansible_parsed_keys)
# Add dummy action for block/always/rescue statements
normalized["action"] = {
"__ansible_module__": "block/always/rescue",
"__ansible_module_original__": "block/always/rescue",
"__ansible_arguments__": "block/always/rescue",
}
return normalized
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
builtin = list(set(builtin + custom_modules))
ansible.parsing.mod_args.BUILTIN_TASKS = frozenset(builtin)
mod_arg_parser = ModuleArgsParser(task)
try:
action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
except AnsibleParserError as e:
raise LaterAnsibleError(e) from e
# denormalize shell -> command conversion
if "_uses_shell" in arguments:
action = "shell"
del arguments["_uses_shell"]
for k, v in list(task.items()):
if k in ansible_parsed_keys or k == action:
# we don"t want to re-assign these values, which were
# determined by the ModuleArgsParser() above
continue
normalized[k] = v
# convert builtin fqn calls to short forms because most rules know only
# about short calls
normalized["action"] = {
"__ansible_module__": action.removeprefix("ansible.builtin."),
"__ansible_module_original__": action,
}
if "_raw_params" in arguments:
normalized["action"]["__ansible_arguments__"] = (
arguments["_raw_params"].strip().split()
)
del arguments["_raw_params"]
else:
normalized["action"]["__ansible_arguments__"] = []
normalized["action"].update(arguments)
return normalized
ansible_action_type = task.get("__ansible_action_type__", "task")
if "__ansible_action_type__" in task:
del (task["__ansible_action_type__"])
# temp. extract metadata
ansible_meta = {}
ansible_meta = dict()
for key in ["__line__", "__file__", "__ansible_action_meta__"]:
default = None
if key == "__ansible_action_meta__":
default = {}
default = dict()
ansible_meta[key] = task.pop(key, default)
ansible_action_type = task.get("__ansible_action_type__", "task")
if "__ansible_action_type__" in task:
del task["__ansible_action_type__"]
normalized = dict()
normalized = _normalize(task, custom_modules)
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
builtin = list(set(builtin + custom_modules))
ansible.parsing.mod_args.BUILTIN_TASKS = frozenset(builtin)
mod_arg_parser = ModuleArgsParser(task)
try:
action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
except AnsibleParserError as e:
raise LaterAnsibleError("syntax error", e)
# denormalize shell -> command conversion
if "_uses_shell" in arguments:
action = "shell"
del (arguments["_uses_shell"])
for (k, v) in list(task.items()):
if k in ("action", "local_action", "args", "delegate_to") or k == action:
# we don"t want to re-assign these values, which were
# determined by the ModuleArgsParser() above
continue
else:
normalized[k] = v
normalized["action"] = dict(__ansible_module__=action)
if "_raw_params" in arguments:
normalized["action"]["__ansible_arguments__"] = arguments["_raw_params"].strip().split()
del (arguments["_raw_params"])
else:
normalized["action"]["__ansible_arguments__"] = list()
normalized["action"].update(arguments)
normalized[FILENAME_KEY] = filename
normalized["__ansible_action_type__"] = ansible_action_type
# add back extracted metadata
for k, v in ansible_meta.items():
for (k, v) in ansible_meta.items():
if v:
normalized[k] = v
return normalized
def action_tasks(yaml, candidate):
tasks = []
if candidate.filemeta in ["tasks", "handlers"]:
tasks = add_action_type(yaml, candidate.filemeta)
def action_tasks(yaml, file):
tasks = list()
if file["filetype"] in ["tasks", "handlers"]:
tasks = add_action_type(yaml, file["filetype"])
else:
tasks.extend(extract_from_list(yaml, ["tasks", "handlers", "pre_tasks", "post_tasks"]))
# Add sub-elements of block/rescue/always to tasks list
tasks.extend(extract_from_list(tasks, ["block", "rescue", "always"]))
# Remove block/rescue/always elements from tasks list
block_rescue_always = ("block", "rescue", "always")
tasks[:] = [task for task in tasks if all(k not in task for k in block_rescue_always)]
return tasks
allowed = ["include", "include_tasks", "import_playbook", "import_tasks"]
return [task for task in tasks if set(allowed).isdisjoint(task.keys())]
def task_to_str(task):
@ -472,19 +476,16 @@ def task_to_str(task):
if name:
return name
action = task.get("action")
args = " ".join(
[
f"{k}={v}"
for (k, v) in action.items()
if k not in ["__ansible_module__", "__ansible_arguments__"]
]
+ action.get("__ansible_arguments__")
)
return "{} {}".format(action["__ansible_module__"], args)
args = " ".join([
u"{0}={1}".format(k, v)
for (k, v) in action.items()
if k not in ["__ansible_module__", "__ansible_arguments__"]
] + action.get("__ansible_arguments__"))
return u"{0} {1}".format(action["__ansible_module__"], args)
def extract_from_list(blocks, candidates):
results = []
results = list()
for block in blocks:
for candidate in candidates:
delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"]
@ -493,19 +494,18 @@ def extract_from_list(blocks, candidates):
meta_data = dict(block)
for key in delete_meta_keys:
meta_data.pop(key, None)
actions = add_action_type(block[candidate], candidate, meta_data)
results.extend(actions)
results.extend(add_action_type(block[candidate], candidate, meta_data))
elif block[candidate] is not None:
raise RuntimeError(
f"Key '{candidate}' defined, but bad value: '{block[candidate]!s}'"
"Key '{candidate}' defined, but bad value: '{block}'".format(
candidate=candidate, block=str(block[candidate])
)
)
return results
def add_action_type(actions, action_type, action_meta=None):
results = []
results = list()
for action in actions:
action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
if action_meta:
@ -533,7 +533,7 @@ def parse_yaml_linenumbers(data, filename):
try:
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
except yaml.constructor.ConstructorError as e:
raise LaterError("syntax error", e) from e
raise LaterError("syntax error", e)
if hasattr(node, "__line__"):
mapping[LINE_NUMBER_KEY] = node.__line__
@ -548,15 +548,11 @@ def parse_yaml_linenumbers(data, filename):
loader.compose_node = compose_node
loader.construct_mapping = construct_mapping
data = loader.get_single_data() or []
except (
yaml.parser.ParserError,
yaml.scanner.ScannerError,
yaml.constructor.ConstructorError,
) as e:
raise LaterError("syntax error", e) from e
except yaml.composer.ComposerError as e:
e.problem = f"{e.context} {e.problem}"
raise LaterError("syntax error", e) from e
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
raise LaterError("syntax error", e)
except (yaml.composer.ComposerError) as e:
e.problem = "{} {}".format(e.context, e.problem)
raise LaterError("syntax error", e)
return data
@ -581,34 +577,14 @@ def normalized_yaml(file, options):
for line in removes:
lines.remove(line)
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
raise LaterError("syntax error", e) from e
raise LaterError("syntax error", e)
return lines
def is_nested_task(task):
"""Check if task includes block/always/rescue."""
# Cannot really trust the input
if isinstance(task, str):
return False
return any(task.get(key) for key in ["block", "rescue", "always"])
def _extract_ansible_parsed_keys_from_task(result, task, keys):
"""Return a dict with existing key in task."""
for k, v in list(task.items()):
if k in keys:
# we don't want to re-assign these values, which were
# determined by the ModuleArgsParser() above
continue
result[k] = v
return result
class UnsafeTag:
"""Handle custom yaml unsafe tag."""
yaml_tag = "!unsafe"
yaml_tag = u"!unsafe"
def __init__(self, value):
self.unsafe = value
@ -621,7 +597,7 @@ class UnsafeTag:
class VaultTag:
"""Handle custom yaml vault tag."""
yaml_tag = "!vault"
yaml_tag = u"!vault"
def __init__(self, value):
self.unsafe = value

21
codecov.yml Normal file
View File

@ -0,0 +1,21 @@
codecov:
require_ci_to_pass: true
coverage:
status:
project:
default:
target: auto
threshold: 5%
branches:
- main
if_ci_failed: error
informational: false
only_pulls: false
patch:
default:
target: auto
threshold: 5%
branches:
- main
if_ci_failed: error
only_pulls: false

View File

@ -1,4 +1,4 @@
FROM python:3.12-alpine@sha256:a982997504b8ec596f553d78f4de4b961bbdf5254e0177f6e99bb34f4ef16f95
FROM python:3.10-alpine@sha256:bed0a5fe5b7d996b0678abf4fb2f0d5f3d5078cda951ccdc068ffbddb555e727
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"

26
docker/Dockerfile.arm Normal file
View File

@ -0,0 +1,26 @@
FROM arm32v7/python:3.10-alpine@sha256:6ee764ac3084eb137c2c0a16b89a5a93606633bf22a46aea61ee45f8eab1db3a
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.title="ansible-later"
LABEL org.opencontainers.image.url="https://ansible-later.geekdocs.de/"
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-later"
LABEL org.opencontainers.image.documentation="https://ansible-later.geekdocs.de/"
ENV PY_COLORS=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
ADD dist/ansible_later-*.whl /
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev musl-dev python3-dev cargo && \
apk --update add git && \
pip install --upgrade --no-cache-dir pip && \
pip install --no-cache-dir $(find / -name "ansible_later-*.whl")[ansible] && \
apk del .build-deps && \
rm -f ansible_later-*.whl && \
rm -rf /var/cache/apk/* && \
rm -rf /root/.cache/
USER root
CMD []
ENTRYPOINT ["/usr/local/bin/ansible-later"]

26
docker/Dockerfile.arm64 Normal file
View File

@ -0,0 +1,26 @@
FROM arm64v8/python:3.10-alpine@sha256:2c3e3fbd7e36aca4eee1e95e0d7c12022e580053dc61f7a86117bc666203686e
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.title="ansible-later"
LABEL org.opencontainers.image.url="https://ansible-later.geekdocs.de/"
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-later"
LABEL org.opencontainers.image.documentation="https://ansible-later.geekdocs.de/"
ENV PY_COLORS=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
ADD dist/ansible_later-*.whl /
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev musl-dev python3-dev cargo && \
apk --update add git && \
pip install --upgrade --no-cache-dir pip && \
pip install --no-cache-dir $(find / -name "ansible_later-*.whl")[ansible] && \
apk del .build-deps && \
rm -f ansible_later-*.whl && \
rm -rf /var/cache/apk/* && \
rm -rf /root/.cache/
USER root
CMD []
ENTRYPOINT ["/usr/local/bin/ansible-later"]

24
docker/manifest-quay.tmpl Normal file
View File

@ -0,0 +1,24 @@
image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
platform:
architecture: amd64
os: linux
- image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
platform:
architecture: arm
os: linux
variant: v7

24
docker/manifest.tmpl Normal file
View File

@ -0,0 +1,24 @@
image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
platform:
architecture: amd64
os: linux
- image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
platform:
architecture: arm
os: linux
variant: v7

View File

@ -18,17 +18,11 @@ markup:
startLevel: 1
params:
description: >
ansible-later is a fast and extensible best practice scanner and linting tool for Ansible resources
to enforce a coding or best practice guideline.
images:
- "socialmedia2.png"
geekdocMenuBundle: true
geekdocToC: 3
geekdocRepo: https://github.com/thegeeklab/ansible-later
geekdocEditPath: edit/main/docs
geekdocEditPath: edit/main/docs/content
geekdocDateFormat: "Jan 2, 2006"
geekdocSearch: true

View File

@ -2,12 +2,13 @@
title: Documentation
---
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-later/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-later)
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-later?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-later)
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later)
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later)
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/)
[![Codecov](https://img.shields.io/codecov/c/github/thegeeklab/ansible-later)](https://codecov.io/gh/thegeeklab/ansible-later)
[![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors)
[![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later)
[![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE)

View File

@ -1,17 +1,18 @@
---
title: Write a rule
title: Minimal standard checks
---
A typical rule check will look like:
A typical standards check will look like:
<!-- prettier-ignore-start -->
<!-- spellchecker-disable -->
{{< highlight Python "linenos=table" >}}
class CheckBecomeUser(RuleBase):
class CheckBecomeUser(StandardBase):
rid = "ANS115"
sid = "ANSIBLE0015"
description = "Become should be combined with become_user"
helptext = "the task has `become` enabled but `become_user` is missing"
version = "0.1"
types = ["playbook", "task", "handler"]
def check(self, candidate, settings):

View File

@ -13,4 +13,4 @@ Changes can be made in a YAML configuration file or via CLI options, which are p
Please note that YAML attributes are overwritten while YAML lists are merged in any configuration files.
To simplify the linting of individual files, e.g. for debugging purposes, ansible-later ignores the `exclude_files` and `ignore_dotfiles` options when files are passed to the CLI.
To simplify single file linting, e.g. for debugging purposes, ansible-later ignores the `exclude_files` and `ignore_dotfiles` options when only one file is passed to the CLI.

View File

@ -8,27 +8,28 @@ You can get all available CLI options by running `ansible-later --help`:
<!-- spellchecker-disable -->
{{< highlight Shell "linenos=table" >}}
$ ansible-later --help
usage: ansible-later [-h] [-c CONFIG] [-r DIR] [-B] [-i TAGS] [-x TAGS] [-v] [-q] [-V] [rules.files ...]
usage: ansible-later [-h] [-c CONFIG_FILE] [-r RULES.STANDARDS]
[-s RULES.FILTER] [-v] [-q] [--version]
[rules.files [rules.files ...]]
Validate Ansible files against best practice guideline
positional arguments:
rules.files
options:
optional arguments:
-h, --help show this help message and exit
-c CONFIG, --config CONFIG
path to configuration file
-r DIR, --rules-dir DIR
directory of rules
-B, --no-builtin disables built-in rules
-i TAGS, --include-rules TAGS
limit rules to given id/tags
-x TAGS, --exclude-rules TAGS
exclude rules by given it/tags
-c CONFIG_FILE, --config CONFIG_FILE
location of configuration file
-r RULES.STANDARDS, --rules RULES.STANDARDS
location of standards rules
-s RULES.FILTER, --standards RULES.FILTER
limit standards to given ID's
-x RULES.EXCLUDE_FILTER, --exclude-standards RULES.EXCLUDE_FILTER
exclude standards by given ID's
-v increase log level
-q decrease log level
-V, --version show program's version number and exit
--version show program's version number and exit
{{< /highlight >}}
<!-- spellchecker-enable -->
<!-- prettier-ignore-end -->

View File

@ -11,37 +11,37 @@ The default configuration is used if no other value is specified. Each option ca
---
ansible:
# Add the name of used custom Ansible modules. Otherwise ansible-later
# can't detect unknown modules and will throw an error.
# can't detect unknown modules and will through an error.
# Modules which are bundled with the role and placed in a './library'
# directory will be auto-detected and don't need to be added to this list.
custom_modules: []
# Settings for variable formatting rule (ANS104)
# Settings for variable formatting rule (ANSIBLE0004)
double-braces:
max-spaces-inside: 1
min-spaces-inside: 1
# List of allowed literal bools (ANS114)
# List of allowed literal bools (ANSIBLE0014)
literal-bools:
- "True"
- "False"
- "yes"
- "no"
# List of modules that don't need to be named (ANS106).
# List of modules that don't need to be named (ANSIBLE0006).
# You must specify each individual module name, globs or wildcards do not work!
named-task:
exclude:
- "meta"
- "debug"
- "block/always/rescue"
- "block"
- "include_role"
- "include_tasks"
- "include_vars"
- "import_role"
- "import_tasks"
# List of modules that are allowed to use the key=value format instead of the native YAML format (YML108).
# List of modules that are allowed to use the key=value format instead of the native YAML format (LINT0008).
# You must specify each individual module name, globs or wildcards do not work!
native-yaml:
exclude: []
@ -58,8 +58,8 @@ logging:
# Global settings for all defined rules
rules:
# Disable built-in rules if required
builtin: True
# Disable build-in rules if required
buildin: True
# List of files to exclude
exclude_files: []
@ -75,17 +75,21 @@ rules:
exclude_filter: []
# List of rule ID's that should be displayed as a warning instead of an error. By default,
# no rules are marked as warnings. This list allows to degrade errors to warnings for each rule.
# only rules whose version is higher than the current default version are marked as warnings.
# This list allows to degrade errors to warnings for each rule.
warning_filter:
- "ANS128"
- "ANS999"
- "ANSIBLE9999"
# All dotfiles (including hidden folders) are excluded by default.
# You can disable this setting and handle dotfiles by yourself with `exclude_files`.
ignore_dotfiles: True
# List of directories to load rules from (defaults to built-in)
dir: []
# List of directories to load standard rules from (defaults to build-in)
standards: []
# Standard version to use. Standard version set in a roles meta file
# or playbook will takes precedence.
version:
# Block to control included yamllint rules.
# See https://yamllint.readthedocs.io/en/stable/rules.html
@ -95,8 +99,6 @@ yamllint:
max-spaces-before: 0
document-start:
present: True
document-end:
present: True
empty-lines:
max: 1
max-end: 1

View File

@ -1,21 +0,0 @@
---
title: Pre-Commit setup
---
To use `ansible-later` with the [pre-commit](https://pre-commit.com/) framework, add the following to the `.pre-commit-config.yaml` file in your local repository.
<!-- prettier-ignore-start -->
<!-- markdownlint-disable -->
<!-- spellchecker-disable -->
{{< highlight yaml "linenos=table" >}}
- repo: https://github.com/thegeeklab/ansible-later
# change ref to the latest release from https://github.com/thegeeklab/ansible-later/releases
rev: v3.0.2
hooks:
- id: ansible-later
{{< /highlight >}}
<!-- spellchecker-enable -->
<!-- markdownlint-restore -->
<!-- prettier-ignore-end -->

View File

@ -2,47 +2,43 @@
title: Included rules
---
Reviews are useless without some rules to check against. `ansible-later` comes with a set of built-in checks, which are explained in the following table.
Reviews are useless without some rules or standards to check against. ansible-later comes with a set of built-in checks, which are explained in the following table.
| Rule | ID | Description | Parameter |
| ----------------------------- | ------ | ----------------------------------------------------------------- | -------------------------------------------------------------------------- |
| CheckYamlEmptyLines | YML101 | YAML should not contain unnecessarily empty lines. | {max: 1, max-start: 0, max-end: 1} |
| CheckYamlIndent | YML102 | YAML should be correctly indented. | {spaces: 2, check-multi-line-strings: false, indent-sequences: true} |
| CheckYamlHyphens | YML103 | YAML should use consistent number of spaces after hyphens (-). | {max-spaces-after: 1} |
| CheckYamlDocumentStart | YML104 | YAML should contain document start marker. | {document-start: {present: true}} |
| CheckYamlColons | YML105 | YAML should use consistent number of spaces around colons. | {colons: {max-spaces-before: 0, max-spaces-after: 1}} |
| CheckYamlFile | YML106 | Roles file should be in YAML format. | |
| CheckYamlHasContent | YML107 | Files should contain useful content. | |
| CheckNativeYaml | YML108 | Use YAML format for tasks and handlers rather than key=value. | {native-yaml: {exclude: []}} |
| CheckYamlDocumentEnd | YML109 | YAML should contain document end marker. | {document-end: {present: true}} |
| CheckYamlOctalValues | YML110 | YAML should not use forbidden implicit or explicit octal value. | {octal-values: {forbid-implicit-octal: true, forbid-explicit-octal: true}} |
| CheckTaskSeparation | ANS101 | Single tasks should be separated by an empty line. | |
| CheckMetaMain | ANS102 | Meta file should contain a basic subset of parameters. | author, description, min_ansible_version, platforms, dependencies |
| CheckUniqueNamedTask | ANS103 | Tasks and handlers must be uniquely named within a file. | |
| CheckBraces | ANS104 | YAML should use consistent number of spaces around variables. | {double-braces: max-spaces-inside: 1, min-spaces-inside: 1} |
| CheckScmInSrc | ANS105 | Use SCM key rather than `src: scm+url` in requirements file. | |
| CheckNamedTask | ANS106 | Tasks and handlers must be named. | {named-task: {exclude: [meta, debug, block, include\_\*, import\_\*]}} |
| CheckNameFormat | ANS107 | Name of tasks and handlers must be formatted. | formats: first letter capital |
| CheckCommandInsteadofModule | ANS108 | Commands should not be used in place of modules. | |
| CheckInstallUseLatest | ANS109 | Package managers should not install with state=latest. | |
| CheckShellInsteadCommand | ANS110 | Use Shell only when piping, redirecting or chaining commands. | |
| CheckCommandHasChanges | ANS111 | Commands should be idempotent and only used with some checks. | |
| CheckCompareToEmptyString | ANS112 | Don't compare to "" - use `when: var` or `when: not var`. | |
| CheckCompareToLiteralBool | ANS113 | Don't compare to True/False - use `when: var` or `when: not var`. | |
| CheckLiteralBoolFormat | ANS114 | Literal bools should be consistent. | {literal-bools: [True, False, yes, no]} |
| CheckBecomeUser | ANS115 | Become should be combined with become_user. | |
| CheckFilterSeparation | ANS116 | Jinja2 filters should be separated with spaces. | |
| CheckCommandInsteadOfArgument | ANS117 | Commands should not be used in place of module arguments. | |
| CheckFilePermissionMissing | ANS118 | File permissions unset or incorrect. | |
| CheckFilePermissionOctal | ANS119 | Octal file permissions must contain leading zero or be a string. | |
| CheckGitHasVersion | ANS120 | Git checkouts should use explicit version. | |
| CheckMetaChangeFromDefault | ANS121 | Roles meta/main.yml default values should be changed. | |
| CheckWhenFormat | ANS122 | Don't use Jinja2 in `when`. | |
| CheckNestedJinja | ANS123 | Don't use nested Jinja2 pattern. | |
| CheckLocalAction | ANS124 | Don't use local_action. | |
| CheckRelativeRolePaths | ANS125 | Don't use a relative path in a role. | |
| CheckChangedInWhen | ANS126 | Use handlers instead of `when: changed`. | |
| CheckChangedInWhen | ANS127 | Deprecated bare variables in loops must not be used. | |
| CheckFQCNBuiltin | ANS128 | Module actions should use full qualified collection names. | |
| CheckFQCNBuiltin | ANS129 | Check optimized playbook/tasks key order. | |
| CheckDeprecated | ANS999 | Deprecated features of `ansible-later` should not be used. | |
| Rule | ID | Description | Parameter |
| ----------------------------- | ----------- | ----------------------------------------------------------------- | ---------------------------------------------------------------------- |
| CheckYamlEmptyLines | LINT0001 | YAML should not contain unnecessarily empty lines. | {max: 1, max-start: 0, max-end: 1} |
| CheckYamlIndent | LINT0002 | YAML should be correctly indented. | {spaces: 2, check-multi-line-strings: false, indent-sequences: true} |
| CheckYamlHyphens | LINT0003 | YAML should use consistent number of spaces after hyphens (-). | {max-spaces-after: 1} |
| CheckYamlDocumentStart | LINT0004 | YAML should contain document start marker. | {document-start: {present: true}} |
| CheckYamlColons | LINT0005 | YAML should use consistent number of spaces around colons. | {colons: {max-spaces-before: 0, max-spaces-after: 1}} |
| CheckYamlFile | LINT0006 | Roles file should be in YAML format. | |
| CheckYamlHasContent | LINT0007 | Files should contain useful content. | |
| CheckNativeYaml | LINT0008 | Use YAML format for tasks and handlers rather than key=value. | {native-yaml: {exclude: []}} |
| CheckYamlDocumentEnd | LINT0009 | YAML should contain document end marker. | {document-end: {present: true}} |
| CheckTaskSeparation | ANSIBLE0001 | Single tasks should be separated by an empty line. | |
| CheckMetaMain | ANSIBLE0002 | Meta file should contain a basic subset of parameters. | author, description, min_ansible_version, platforms, dependencies |
| CheckUniqueNamedTask | ANSIBLE0003 | Tasks and handlers must be uniquely named within a file. | |
| CheckBraces | ANSIBLE0004 | YAML should use consistent number of spaces around variables. | {double-braces: max-spaces-inside: 1, min-spaces-inside: 1} |
| CheckScmInSrc | ANSIBLE0005 | Use SCM key rather than `src: scm+url` in requirements file. | |
| CheckNamedTask | ANSIBLE0006 | Tasks and handlers must be named. | {named-task: {exclude: [meta, debug, block, include\_\*, import\_\*]}} |
| CheckNameFormat | ANSIBLE0007 | Name of tasks and handlers must be formatted. | formats: first letter capital |
| CheckCommandInsteadofModule | ANSIBLE0008 | Commands should not be used in place of modules. | |
| CheckInstallUseLatest | ANSIBLE0009 | Package managers should not install with state=latest. | |
| CheckShellInsteadCommand | ANSIBLE0010 | Use Shell only when piping, redirecting or chaining commands. | |
| CheckCommandHasChanges | ANSIBLE0011 | Commands should be idempotent and only used with some checks. | |
| CheckCompareToEmptyString | ANSIBLE0012 | Don't compare to "" - use `when: var` or `when: not var`. | |
| CheckCompareToLiteralBool | ANSIBLE0013 | Don't compare to True/False - use `when: var` or `when: not var`. | |
| CheckLiteralBoolFormat | ANSIBLE0014 | Literal bools should be consistent. | {literal-bools: [True, False, yes, no]} |
| CheckBecomeUser | ANSIBLE0015 | Become should be combined with become_user. | |
| CheckFilterSeparation | ANSIBLE0016 | Jinja2 filters should be separated with spaces. | |
| CheckCommandInsteadOfArgument | ANSIBLE0017 | Commands should not be used in place of module arguments. | |
| CheckFilePermissionMissing | ANSIBLE0018 | File permissions unset or incorrect. | |
| CheckFilePermissionOctal | ANSIBLE0019 | Octal file permissions must contain leading zero or be a string. | |
| CheckGitHasVersion | ANSIBLE0020 | Git checkouts should use explicit version. | |
| CheckMetaChangeFromDefault | ANSIBLE0021 | Roles meta/main.yml default values should be changed. | |
| CheckWhenFormat | ANSIBLE0022 | Don't use Jinja2 in `when`. | |
| CheckNestedJinja | ANSIBLE0023 | Don't use nested Jinja2 pattern. | |
| CheckLocalAction | ANSIBLE0024 | Don't use local_action. | |
| CheckRelativeRolePaths | ANSIBLE0025 | Don't use a relative path in a role. | |
| CheckChangedInWhen | ANSIBLE0026 | Use handlers instead of `when: changed`. | |
| CheckDeprecated | ANSIBLE9999 | Deprecated features of `ansible-later` should not be used. | |

View File

@ -23,5 +23,5 @@ main:
sub:
- name: Candidates
ref: "/build_rules/candidates"
- name: Rules
ref: "/build_rules/rule"
- name: Standards checks
ref: "/build_rules/standards_check"

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

1544
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,40 +10,63 @@ classifiers = [
"Natural Language :: English",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Utilities",
"Topic :: Software Development",
]
description = "Reviews ansible playbooks, roles and inventories and suggests improvements."
documentation = "https://ansible-later.geekdocs.de/"
homepage = "https://ansible-later.geekdocs.de/"
include = ["LICENSE"]
include = [
"LICENSE",
]
keywords = ["ansible", "code", "review"]
license = "MIT"
name = "ansible-later"
packages = [{ include = "ansiblelater" }]
packages = [
{include = "ansiblelater"},
]
readme = "README.md"
repository = "https://github.com/thegeeklab/ansible-later/"
version = "0.0.0"
[tool.poetry.dependencies]
PyYAML = "6.0.1"
ansible-core = { version = "2.14.17", optional = true }
ansible = { version = "7.7.0", optional = true }
anyconfig = "0.14.0"
PyYAML = "6.0"
ansible = {version = "5.1.0", optional = true}
ansible-core = {version = "2.12.1", optional = true}
anyconfig = "0.12.0"
appdirs = "1.4.4"
colorama = "0.4.6"
jsonschema = "4.22.0"
nested-lookup = "0.2.25"
pathspec = "0.12.1"
python = "^3.9.0"
python-json-logger = "2.0.7"
toolz = "0.12.1"
unidiff = "0.7.5"
yamllint = "1.35.1"
colorama = "0.4.4"
flake8 = "4.0.1"
jsonschema = "4.3.3"
nested-lookup = "0.2.23"
pathspec = "0.9.0"
python = "^3.8.0"
python-json-logger = "2.0.2"
toolz = "0.11.2"
unidiff = "0.7.0"
yamllint = "1.26.3"
[tool.poetry.dev-dependencies]
bandit = "1.7.1"
flake8-blind-except = "0.2.0"
flake8-builtins = "1.5.3"
flake8-docstrings = "1.6.0"
flake8-eradicate = "1.2.0"
flake8-isort = "4.1.1"
flake8-logging-format = "0.6.0"
flake8-pep3101 = "1.3.0"
flake8-polyfill = "1.0.2"
flake8-quotes = "3.3.1"
pep8-naming = "0.12.1"
pydocstyle = "6.1.1"
pytest = "6.2.5"
pytest-cov = "3.0.0"
pytest-mock = "3.6.1"
tomli = "2.0.0"
yapf = "0.32.0"
[tool.poetry.extras]
ansible = ["ansible"]
@ -52,23 +75,23 @@ ansible-core = ["ansible-core"]
[tool.poetry.scripts]
ansible-later = "ansiblelater.__main__:main"
[tool.poetry.group.dev.dependencies]
ruff = "0.4.9"
pytest = "8.2.2"
pytest-mock = "3.14.0"
pytest-cov = "5.0.0"
toml = "0.10.2"
[tool.poetry-dynamic-versioning]
enable = true
style = "semver"
vcs = "git"
[tool.isort]
default_section = "THIRDPARTY"
force_single_line = true
line_length = 99
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
skip_glob = ["**/.env*", "**/env/*", "**/.venv/*", "**/docs/*"]
[tool.pytest.ini_options]
addopts = "ansiblelater --cov=ansiblelater --cov-report=xml:coverage.xml --cov-report=term --no-cov-on-fail"
addopts = "ansiblelater --cov=ansiblelater --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
filterwarnings = [
"ignore::FutureWarning",
"ignore::DeprecationWarning",
"ignore:.*collections.*:DeprecationWarning",
"ignore:.*pep8.*:FutureWarning",
]
@ -76,74 +99,5 @@ filterwarnings = [
omit = ["**/test/*"]
[build-system]
build-backend = "poetry_dynamic_versioning.backend"
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
[tool.ruff]
exclude = [
".git",
"__pycache__",
"build",
"dist",
"test",
"*.pyc",
"*.egg-info",
".cache",
".eggs",
"env*",
]
line-length = 99
indent-width = 4
[tool.ruff.lint]
# Explanation of errors
#
# D100: Missing docstring in public module
# D101: Missing docstring in public class
# D102: Missing docstring in public method
# D103: Missing docstring in public function
# D105: Missing docstring in magic method
# D107: Missing docstring in __init__
# D202: No blank lines allowed after function docstring
# D203: One blank line required before class docstring
# D212: Multi-line docstring summary should start at the first line
ignore = [
"D100",
"D101",
"D102",
"D103",
"D105",
"D107",
"D202",
"D203",
"D212",
"UP038",
"RUF012",
]
select = [
"D",
"E",
"F",
"Q",
"W",
"I",
"S",
"BLE",
"N",
"UP",
"B",
"A",
"C4",
"T20",
"SIM",
"RET",
"ARG",
"ERA",
"RUF",
]
[tool.ruff.format]
quote-style = "double"
indent-style = "space"
line-ending = "lf"

View File

@ -4,14 +4,8 @@
"packageRules": [
{
"description": "Ansible base dependencies",
"matchPackageNames": ["ansible", "ansible-core"],
"separateMinorPatch": true
},
{
"matchManagers": ["woodpecker"],
"matchFileNames": [".woodpecker/test.yml"],
"matchPackageNames": ["docker.io/library/python"],
"enabled": false
"groupName": "ansible packages",
"matchPackageNames": ["ansible", "ansible-core"]
}
]
}

22
setup.cfg Normal file
View File

@ -0,0 +1,22 @@
[flake8]
# Explanation of errors
#
# D100: Missing docstring in public module
# D101: Missing docstring in public class
# D102: Missing docstring in public method
# D103: Missing docstring in public function
# D105: Missing docstring in magic method
# D107: Missing docstring in __init__
# D202: No blank lines allowed after function docstring
# W503:Line break occurred before a binary operator
ignore = D100, D101, D102, D103, D107, D202, W503
max-line-length = 99
inline-quotes = double
exclude = .git, __pycache__, build, dist, test, *.pyc, *.egg-info, .cache, .eggs, env*
[yapf]
based_on_style = google
column_limit = 99
dedent_closing_brackets = true
coalesce_brackets = true
split_before_logical_operator = true