mirror of
https://github.com/thegeeklab/ansible-later.git
synced 2024-11-21 20:30:42 +00:00
initial commit
This commit is contained in:
commit
6e20d9bac5
82
.drone.yml
Normal file
82
.drone.yml
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: default
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: test
|
||||||
|
image: randomknowledge/docker-pyenv-tox
|
||||||
|
pull: true
|
||||||
|
environment:
|
||||||
|
PY_COLORS: "1"
|
||||||
|
commands:
|
||||||
|
- tox
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
- tag
|
||||||
|
|
||||||
|
- name: build
|
||||||
|
image: python:3.7-alpine
|
||||||
|
pull: true
|
||||||
|
commands:
|
||||||
|
- python setup.py sdist bdist_wheel
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
- tag
|
||||||
|
|
||||||
|
- name: checksum
|
||||||
|
image: alpine
|
||||||
|
pull: always
|
||||||
|
commands:
|
||||||
|
- apk add --no-cache coreutils
|
||||||
|
# exclude files
|
||||||
|
# - sha256sum -b files/!(*.out) > CHECKSUMFILE
|
||||||
|
- sha256sum -b dist/* > sha256sum.txt
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
- tag
|
||||||
|
|
||||||
|
- name: gpg-sign
|
||||||
|
image: plugins/gpgsign:1
|
||||||
|
pull: always
|
||||||
|
settings:
|
||||||
|
key:
|
||||||
|
from_secret: gpgsign_key
|
||||||
|
passphrase:
|
||||||
|
from_secret: gpgsign_passphrase
|
||||||
|
detach_sign: true
|
||||||
|
files:
|
||||||
|
- dist/*
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
- tag
|
||||||
|
|
||||||
|
- name: publish
|
||||||
|
image: plugins/github-release
|
||||||
|
settings:
|
||||||
|
api_key:
|
||||||
|
from_secret: github_token
|
||||||
|
files:
|
||||||
|
- dist/*
|
||||||
|
- sha256sum.txt
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- tag
|
||||||
|
|
||||||
|
- name: pypi_publish
|
||||||
|
image: xoxys/drone-pypi:0.1.0
|
||||||
|
pull: always
|
||||||
|
settings:
|
||||||
|
username:
|
||||||
|
from_secret: pypi_username
|
||||||
|
password:
|
||||||
|
from_secret: pypi_password
|
||||||
|
repository: https://upload.pypi.org/legacy/
|
||||||
|
skip_build: true
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- tag
|
||||||
|
depends_on:
|
99
.gitignore
vendored
Normal file
99
.gitignore
vendored
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
# ---> Python
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
env/
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*,cover
|
||||||
|
.hypothesis/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# celery beat schedule file
|
||||||
|
celerybeat-schedule
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# dotenv
|
||||||
|
.env
|
||||||
|
|
||||||
|
# virtualenv
|
||||||
|
.venv
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# Ignore ide addons
|
||||||
|
.server-script
|
||||||
|
.on-save.json
|
||||||
|
.vscode
|
||||||
|
.pytest_cache
|
1
CHANGELOG.md
Normal file
1
CHANGELOG.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
# placeholder
|
9
LICENSE
Normal file
9
LICENSE
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2018 Robert Kaussow
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
4
MANIFEST.in
Normal file
4
MANIFEST.in
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
include README.md
|
||||||
|
include LICENSE
|
||||||
|
recursive-include test *.py *.yml *.txt hosts
|
||||||
|
recursive-include ansiblelater/examples *.py
|
257
README.md
Normal file
257
README.md
Normal file
@ -0,0 +1,257 @@
|
|||||||
|
# ansible-later
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
This is a fork of Will Thames [ansible-review](https://github.com/willthames/ansible-review) so credits goes to him
|
||||||
|
for his work on ansible-review and ansible-lint.
|
||||||
|
|
||||||
|
ansible-later is an acronym for **L**ovely **A**utomation **TE**sting f**R**mework.
|
||||||
|
|
||||||
|
## Table of Content
|
||||||
|
|
||||||
|
- [Setup](#setup)
|
||||||
|
- [Using pip](#using-pip)
|
||||||
|
- [From source](#from-source)
|
||||||
|
- [Usage](#usage)
|
||||||
|
- [Configuration](#configuration)
|
||||||
|
- [Review a git repositories](#review-a-git-repositories)
|
||||||
|
- [Review a list of files](#review-a-list-of-files)
|
||||||
|
- [Buildin rules](#buildin-rules)
|
||||||
|
- [Build your own](#build-your-own)
|
||||||
|
- [The standards file](#the-standards-file)
|
||||||
|
- [Candidates](#candidates)
|
||||||
|
- [Minimal standards checks](#minimal-standards-checks)
|
||||||
|
- [License](#license)
|
||||||
|
- [Maintainers and Contributors](#maintainers-and-contributors)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Setup
|
||||||
|
|
||||||
|
#### Using pip
|
||||||
|
|
||||||
|
```Shell
|
||||||
|
# From internal pip repo as user
|
||||||
|
pip install ansible-later --user
|
||||||
|
|
||||||
|
# .. or as root
|
||||||
|
sudo pip install ansible-later
|
||||||
|
```
|
||||||
|
|
||||||
|
#### From source
|
||||||
|
|
||||||
|
```Shell
|
||||||
|
# Install dependency
|
||||||
|
git clone https://repourl
|
||||||
|
export PYTHONPATH=$PYTHONPATH:`pwd`/ansible-later/ansiblelater
|
||||||
|
export PATH=$PATH:`pwd`/ansible-later/ansiblelater/bin
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
```Shell
|
||||||
|
ansible-later FILES
|
||||||
|
```
|
||||||
|
|
||||||
|
Where FILES is a space delimited list of files to review.
|
||||||
|
ansible-later is _not_ recursive and won't descend
|
||||||
|
into child folders; it just processes the list of files you give it.
|
||||||
|
|
||||||
|
Passing a folder in with the list of files will elicit a warning:
|
||||||
|
|
||||||
|
```Shell
|
||||||
|
WARN: Couldn't classify file ./foldername
|
||||||
|
```
|
||||||
|
|
||||||
|
ansible-later will review inventory files, role
|
||||||
|
files, python code (modules, plugins) and playbooks.
|
||||||
|
|
||||||
|
- The goal is that each file that changes in a
|
||||||
|
changeset should be reviewable simply by passing
|
||||||
|
those files as the arguments to ansible-later.
|
||||||
|
- Roles are slightly harder, and sub-roles are yet
|
||||||
|
harder still (currently just using `-R` to process
|
||||||
|
roles works very well, but doesn't examine the
|
||||||
|
structure of the role)
|
||||||
|
- Using `{{ playbook_dir }}` in sub roles is so far
|
||||||
|
very hard.
|
||||||
|
- This should work against various repository styles
|
||||||
|
- per-role repository
|
||||||
|
- roles with sub-roles
|
||||||
|
- per-playbook repository
|
||||||
|
- It should work with roles requirement files and with local roles
|
||||||
|
|
||||||
|
#### Configuration
|
||||||
|
|
||||||
|
If your standards (and optionally inhouse rules) are set up, create
|
||||||
|
a configuration file in the appropriate location (this will depend on
|
||||||
|
your operating system)
|
||||||
|
|
||||||
|
The location can be found by using `ansible-later` with no arguments.
|
||||||
|
|
||||||
|
You can override the configuration file location with the `-c` flag.
|
||||||
|
|
||||||
|
```INI
|
||||||
|
[rules]
|
||||||
|
standards = /path/to/your/standards/rules
|
||||||
|
```
|
||||||
|
|
||||||
|
The standards directory can be overridden with the `-d` argument.
|
||||||
|
|
||||||
|
#### Review a git repositories
|
||||||
|
|
||||||
|
- `git ls-files | xargs ansible-later` works well in
|
||||||
|
a roles repo to review the whole role. But it will
|
||||||
|
review the whole of other repos too.
|
||||||
|
- `git ls-files *[^LICENSE,.md] | xargs ansible-later`
|
||||||
|
works like the first example but excludes some
|
||||||
|
unnecessary files.
|
||||||
|
- `git diff branch_to_compare | ansible-later` will
|
||||||
|
review only the changes between the branches and
|
||||||
|
surrounding context.
|
||||||
|
|
||||||
|
#### Review a list of files
|
||||||
|
|
||||||
|
- `find . -type f | xargs ansible-later` will review
|
||||||
|
all files in the current folder (and all subfolders),
|
||||||
|
even if they're not checked into git
|
||||||
|
|
||||||
|
#### Buildin rules
|
||||||
|
|
||||||
|
Reviews are nothing without some rules or standards against which to review. ansible-later
|
||||||
|
comes with a couple of built-in checks explained in the following table.
|
||||||
|
|
||||||
|
| Rule | ID | Description | Parameter |
|
||||||
|
| ------------------------------- | ----------- | ---------------------------------------------------------------- | -------------------------------------------------------------------- |
|
||||||
|
| check_yaml_empty_lines | LINT0001 | YAML should not contain unnecessarily empty lines. | {max: 1, max-start: 0, max-end: 0} |
|
||||||
|
| check_yaml_indent | LINT0002 | YAML should be correctly indented. | {spaces: 2, check-multi-line-strings: false, indent-sequences: true} |
|
||||||
|
| check_yaml_hyphens | LINT0003 | YAML should use consitent number of spaces after hyphens (-). | {max-spaces-after: 1} |
|
||||||
|
| check_yaml_document_start | LINT0004 | YAML should contain document start marker. | {document-start: {present: true}} |
|
||||||
|
| check_yaml_colons | LINT0005 | YAML should use consitent number of spaces around colons. | {colons: {max-spaces-before: 0, max-spaces-after: 1}} |
|
||||||
|
| check_yaml_file | LINT0006 | Roles file should be in yaml format. | |
|
||||||
|
| check_yaml_has_content | LINT0007 | Files should contain useful content. | |
|
||||||
|
| check_native_yaml | LINT0008 | Use YAML format for tasks and handlers rather than key=value. | |
|
||||||
|
| check_line_between_tasks | ANSIBLE0001 | Single tasks should be separated by an empty line. | |
|
||||||
|
| check_meta_main | ANSIBLE0002 | Meta file should contain a basic subset of parameters. | author, description, min_ansible_version, platforms, dependencies |
|
||||||
|
| check_unique_named_task | ANSIBLE0003 | Tasks and handlers must be uniquely named within a file. | |
|
||||||
|
| check_braces | ANSIBLE0004 | YAML should use consitent number of spaces around variables. | |
|
||||||
|
| check_scm_in_src | ANSIBLE0005 | Use scm key rather than src: scm+url in requirements file. | |
|
||||||
|
| check_named_task | ANSIBLE0006 | Tasks and handlers must be named. | excludes: meta, debug, include\_\*, import\_\*, block |
|
||||||
|
| check_name_format | ANSIBLE0007 | Name of tasks and handlers must be formatted. | formats: first letter capital |
|
||||||
|
| check_command_instead_of_module | ANSIBLE0008 | Commands should not be used in place of modules. | |
|
||||||
|
| check_install_use_latest | ANSIBLE0009 | Package managers should not install with state=latest. | |
|
||||||
|
| check_shell_instead_command | ANSIBLE0010 | Use Shell only when piping, redirecting or chaining commands. | |
|
||||||
|
| check_command_has_changes | ANSIBLE0011 | Commands should be idempotent and only used with some checks. | |
|
||||||
|
| check_empty_string_compare | ANSIBLE0012 | Don't compare to "" - use `when: var` or `when: not var` | |
|
||||||
|
| check_compare_to_literal_bool | ANSIBLE0013 | Don't compare to True/False - use `when: var` or `when: not var` | |
|
||||||
|
|
||||||
|
### Build your own
|
||||||
|
|
||||||
|
#### The standards file
|
||||||
|
|
||||||
|
A standards file comprises a list of standards, and optionally some methods to
|
||||||
|
check those standards.
|
||||||
|
|
||||||
|
Create a file called standards.py (this can import other modules)
|
||||||
|
|
||||||
|
```Python
|
||||||
|
from ansiblelater include Standard, Result
|
||||||
|
|
||||||
|
tasks_are_uniquely_named = Standard(dict(
|
||||||
|
id="ANSIBLE0003",
|
||||||
|
name="Tasks and handlers must be uniquely named within a single file",
|
||||||
|
check=check_unique_named_task,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"],
|
||||||
|
))
|
||||||
|
|
||||||
|
standards = [
|
||||||
|
tasks_are_uniquely_named,
|
||||||
|
role_must_contain_meta_main,
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
When you add new standards, you should increment the version of your standards.
|
||||||
|
Your playbooks and roles should declare what version of standards you are
|
||||||
|
using, otherwise ansible-later assumes you're using the latest. The declaration
|
||||||
|
is done by adding standards version as first line in the file. e.g.
|
||||||
|
|
||||||
|
```INI
|
||||||
|
# Standards: 1.2
|
||||||
|
```
|
||||||
|
|
||||||
|
To add standards that are advisory, don't set the version. These will cause
|
||||||
|
a message to be displayed but won't constitute a failure.
|
||||||
|
|
||||||
|
When a standard version is higher than declared version, a message will be
|
||||||
|
displayed 'WARN: Future standard' and won't constitute a failure.
|
||||||
|
|
||||||
|
An example standards file is available at
|
||||||
|
[ansiblelater/examples/standards.py](ansiblelater/examples/standards.py)
|
||||||
|
|
||||||
|
If you only want to check one or two standards quickly (perhaps you want
|
||||||
|
to review your entire code base for deprecated bare words), you can use the
|
||||||
|
`-s` flag with the name of your standard. You can pass `-s` multiple times.
|
||||||
|
|
||||||
|
```Shell
|
||||||
|
git ls-files | xargs ansible-later -s "bare words are deprecated for with_items"
|
||||||
|
```
|
||||||
|
|
||||||
|
You can see the name of the standards being checked for each different file by running
|
||||||
|
`ansible-later` with the `-v` option.
|
||||||
|
|
||||||
|
#### Candidates
|
||||||
|
|
||||||
|
Each file passed to `ansible-later` will be classified. The result is a `Candidate` object
|
||||||
|
which contains some meta informations and is an instance of one of following object types.
|
||||||
|
|
||||||
|
| Object type | Description |
|
||||||
|
| ----------- | ---------------------------------------------------------------------------------------------------------------------------- |
|
||||||
|
| Task | all files within the parent dir `tasks` |
|
||||||
|
| Handler | all files within the parent dir `handler` |
|
||||||
|
| RoleVars | all files within the parent dir `vars` or `default` |
|
||||||
|
| GroupVars | all files (including subdirs) within the parent dir `group_vars` |
|
||||||
|
| HostVars | all files (including subdirs) within the parent dir `host_vars` |
|
||||||
|
| Meta | all files within the parent dir `meta` |
|
||||||
|
| Code | all files within the parent dir `library`, `lookup_plugins`, `callback_plugins` and `filter_plugins` or python files (`.py`) |
|
||||||
|
| Inventory | all files within the parent dir `inventory` and `inventory` or `hosts` in filename |
|
||||||
|
| Rolesfile | all files with `rolesfile` or `requirements` in filename |
|
||||||
|
| Makefile | all files with `Makefile` in filename |
|
||||||
|
| Template | all files (including subdirs) within the parent dir `templates` or jinja2 files (`.j2`) |
|
||||||
|
| File | all files (including subdirs) within the parent dir `files` |
|
||||||
|
| Playbook | all yaml files (`.yml` or `.yaml`) not maching a previous object type |
|
||||||
|
| Doc | all files with `README` in filename |
|
||||||
|
|
||||||
|
#### Minimal standards checks
|
||||||
|
|
||||||
|
A typical standards check will look like:
|
||||||
|
|
||||||
|
```Python
|
||||||
|
def check_playbook_for_something(candidate, settings):
|
||||||
|
result = Result(candidate.path) # empty result is a success with no output
|
||||||
|
with open(candidate.path, 'r') as f:
|
||||||
|
for (lineno, line) in enumerate(f):
|
||||||
|
if line is dodgy:
|
||||||
|
# enumerate is 0-based so add 1 to lineno
|
||||||
|
result.errors.append(Error(lineno+1, "Line is dodgy: reasons"))
|
||||||
|
return result
|
||||||
|
```
|
||||||
|
|
||||||
|
All standards check take a candidate object, which has a path attribute.
|
||||||
|
The type can be inferred from the class name (i.e. `type(candidate).__name__`)
|
||||||
|
or from the table [here](#candidates).
|
||||||
|
|
||||||
|
They return a `Result` object, which contains a possibly empty list of `Error`
|
||||||
|
objects. `Error` objects are formed of a line number and a message. If the
|
||||||
|
error applies to the whole file being reviewed, set the line number to `None`.
|
||||||
|
Line numbers are important as `ansible-later` can review just ranges of files
|
||||||
|
to only review changes (e.g. through piping the output of `git diff` to
|
||||||
|
`ansible-later`).
|
||||||
|
|
||||||
|
### License
|
||||||
|
|
||||||
|
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
||||||
|
|
||||||
|
### Maintainers and Contributors
|
||||||
|
|
||||||
|
[Robert Kaussow](https://github.com/xoxys)
|
313
ansiblelater/__init__.py
Normal file
313
ansiblelater/__init__.py
Normal file
@ -0,0 +1,313 @@
|
|||||||
|
__author__ = "Robert Kaussow"
|
||||||
|
__project__ = "ansible-later"
|
||||||
|
__version__ = "0.1.0"
|
||||||
|
__license__ = "MIT"
|
||||||
|
__maintainer__ = "Robert Kaussow"
|
||||||
|
__email__ = "mail@geeklabor.de"
|
||||||
|
__status__ = "Production"
|
||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import codecs
|
||||||
|
import ansible
|
||||||
|
from distutils.version import LooseVersion
|
||||||
|
from ansiblelater.utils import info, warn, abort, error
|
||||||
|
from ansiblelater.utils import read_standards
|
||||||
|
from ansiblelater.utils import get_property
|
||||||
|
from ansiblelater.utils import standards_latest
|
||||||
|
from ansiblelater.utils import is_line_in_ranges
|
||||||
|
from ansiblelater.utils import lines_ranges
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Ansible 2.4 import of module loader
|
||||||
|
from ansible.plugins.loader import module_loader
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from ansible.plugins import module_loader
|
||||||
|
except ImportError:
|
||||||
|
from ansible.utils import module_finder as module_loader
|
||||||
|
|
||||||
|
|
||||||
|
class AnsibleReviewFormatter(object):
|
||||||
|
def format(self, match):
|
||||||
|
formatstr = u"{0}:{1}: [{2}] {3} {4}"
|
||||||
|
return formatstr.format(match.filename,
|
||||||
|
match.linenumber,
|
||||||
|
match.rule.id,
|
||||||
|
match.message,
|
||||||
|
match.line
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Standard(object):
|
||||||
|
def __init__(self, standard_dict):
|
||||||
|
if 'id' not in standard_dict:
|
||||||
|
standard_dict.update(id='')
|
||||||
|
else:
|
||||||
|
standard_dict.update(id='[{}] '.format(standard_dict.get("id")))
|
||||||
|
self.id = standard_dict.get("id")
|
||||||
|
self.name = standard_dict.get("name")
|
||||||
|
self.version = standard_dict.get("version")
|
||||||
|
self.check = standard_dict.get("check")
|
||||||
|
self.types = standard_dict.get("types")
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Standard: %s (version: %s, types: %s)" % (
|
||||||
|
self.name, self.version, self.types)
|
||||||
|
|
||||||
|
|
||||||
|
class Error(object):
|
||||||
|
def __init__(self, lineno, message):
|
||||||
|
self.lineno = lineno
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self.lineno:
|
||||||
|
return "%s: %s" % (self.lineno, self.message)
|
||||||
|
else:
|
||||||
|
return " %s" % (self.message)
|
||||||
|
|
||||||
|
|
||||||
|
class Result(object):
|
||||||
|
def __init__(self, candidate, errors=None):
|
||||||
|
self.candidate = candidate
|
||||||
|
self.errors = errors or []
|
||||||
|
|
||||||
|
def message(self):
|
||||||
|
return "\n".join(["{0}:{1}".format(self.candidate, error)
|
||||||
|
for error in self.errors])
|
||||||
|
|
||||||
|
|
||||||
|
class Candidate(object):
|
||||||
|
def __init__(self, filename):
|
||||||
|
self.path = filename
|
||||||
|
try:
|
||||||
|
self.version = find_version(filename)
|
||||||
|
self.binary = False
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
self.binary = True
|
||||||
|
|
||||||
|
self.vault = False
|
||||||
|
with codecs.open(filename, mode='rb', encoding='utf-8') as f:
|
||||||
|
if f.readline().startswith("$ANSIBLE_VAULT"):
|
||||||
|
self.vault = True
|
||||||
|
|
||||||
|
self.filetype = type(self).__name__.lower()
|
||||||
|
self.expected_version = True
|
||||||
|
|
||||||
|
def review(self, settings, lines=None):
|
||||||
|
return candidate_review(self, settings, lines)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s (%s)" % (type(self).__name__, self.path)
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
return self.__dict__.get(item)
|
||||||
|
|
||||||
|
|
||||||
|
class RoleFile(Candidate):
|
||||||
|
def __init__(self, filename):
|
||||||
|
super(RoleFile, self).__init__(filename)
|
||||||
|
self.version = None
|
||||||
|
parentdir = os.path.dirname(os.path.abspath(filename))
|
||||||
|
while parentdir != os.path.dirname(parentdir):
|
||||||
|
meta_file = os.path.join(parentdir, "meta", "main.yml")
|
||||||
|
if os.path.exists(meta_file):
|
||||||
|
self.version = find_version(meta_file)
|
||||||
|
if self.version:
|
||||||
|
break
|
||||||
|
parentdir = os.path.dirname(parentdir)
|
||||||
|
role_modules = os.path.join(parentdir, 'library')
|
||||||
|
if os.path.exists(role_modules):
|
||||||
|
module_loader.add_directory(role_modules)
|
||||||
|
|
||||||
|
|
||||||
|
class Playbook(Candidate):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Task(RoleFile):
|
||||||
|
def __init__(self, filename):
|
||||||
|
super(Task, self).__init__(filename)
|
||||||
|
self.filetype = 'tasks'
|
||||||
|
|
||||||
|
|
||||||
|
class Handler(RoleFile):
|
||||||
|
def __init__(self, filename):
|
||||||
|
super(Handler, self).__init__(filename)
|
||||||
|
self.filetype = 'handlers'
|
||||||
|
|
||||||
|
|
||||||
|
class Vars(Candidate):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Unversioned(Candidate):
|
||||||
|
def __init__(self, filename):
|
||||||
|
super(Unversioned, self).__init__(filename)
|
||||||
|
self.expected_version = False
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryVars(Unversioned):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HostVars(InventoryVars):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class GroupVars(InventoryVars):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RoleVars(RoleFile):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Meta(RoleFile):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Inventory(Unversioned):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Code(Unversioned):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Template(RoleFile):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Doc(Unversioned):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# For ease of checking files for tabs
|
||||||
|
class Makefile(Unversioned):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class File(RoleFile):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Rolesfile(Unversioned):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def classify(filename):
|
||||||
|
parentdir = os.path.basename(os.path.dirname(filename))
|
||||||
|
|
||||||
|
if parentdir in ['tasks']:
|
||||||
|
return Task(filename)
|
||||||
|
if parentdir in ['handlers']:
|
||||||
|
return Handler(filename)
|
||||||
|
if parentdir in ['vars', 'defaults']:
|
||||||
|
return RoleVars(filename)
|
||||||
|
if 'group_vars' in filename.split(os.sep):
|
||||||
|
return GroupVars(filename)
|
||||||
|
if 'host_vars' in filename.split(os.sep):
|
||||||
|
return HostVars(filename)
|
||||||
|
if parentdir in ['meta']:
|
||||||
|
return Meta(filename)
|
||||||
|
if parentdir in ['library', 'lookup_plugins', 'callback_plugins',
|
||||||
|
'filter_plugins'] or filename.endswith('.py'):
|
||||||
|
return Code(filename)
|
||||||
|
if 'inventory' in filename or 'hosts' in filename or parentdir in ['inventory']:
|
||||||
|
return Inventory(filename)
|
||||||
|
if 'rolesfile' in filename or 'requirements' in filename:
|
||||||
|
return Rolesfile(filename)
|
||||||
|
if 'Makefile' in filename:
|
||||||
|
return Makefile(filename)
|
||||||
|
if 'templates' in filename.split(os.sep) or filename.endswith('.j2'):
|
||||||
|
return Template(filename)
|
||||||
|
if 'files' in filename.split(os.sep):
|
||||||
|
return File(filename)
|
||||||
|
if filename.endswith('.yml') or filename.endswith('.yaml'):
|
||||||
|
return Playbook(filename)
|
||||||
|
if 'README' in filename:
|
||||||
|
return Doc(filename)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def candidate_review(candidate, settings, lines=None):
|
||||||
|
errors = 0
|
||||||
|
standards = read_standards(settings)
|
||||||
|
if getattr(standards, 'ansible_min_version', None) and \
|
||||||
|
LooseVersion(standards.ansible_min_version) > LooseVersion(ansible.__version__):
|
||||||
|
raise SystemExit("Standards require ansible version %s (current version %s). "
|
||||||
|
"Please upgrade ansible." %
|
||||||
|
(standards.ansible_min_version, ansible.__version__))
|
||||||
|
|
||||||
|
if getattr(standards, 'ansible_review_min_version', None) and \
|
||||||
|
LooseVersion(standards.ansible_review_min_version) > LooseVersion(
|
||||||
|
get_property("__version__")):
|
||||||
|
raise SystemExit("Standards require ansible-later version %s (current version %s). "
|
||||||
|
"Please upgrade ansible-later." %
|
||||||
|
(standards.ansible_review_min_version, get_property("__version__")))
|
||||||
|
|
||||||
|
if not candidate.version:
|
||||||
|
candidate.version = standards_latest(standards.standards)
|
||||||
|
if candidate.expected_version:
|
||||||
|
if isinstance(candidate, RoleFile):
|
||||||
|
warn("%s %s is in a role that contains a meta/main.yml without a declared "
|
||||||
|
"standards version. "
|
||||||
|
"Using latest standards version %s" %
|
||||||
|
(type(candidate).__name__, candidate.path, candidate.version),
|
||||||
|
settings)
|
||||||
|
else:
|
||||||
|
warn("%s %s does not present standards version. "
|
||||||
|
"Using latest standards version %s" %
|
||||||
|
(type(candidate).__name__, candidate.path, candidate.version),
|
||||||
|
settings)
|
||||||
|
|
||||||
|
info("%s %s declares standards version %s" %
|
||||||
|
(type(candidate).__name__, candidate.path, candidate.version),
|
||||||
|
settings)
|
||||||
|
|
||||||
|
for standard in standards.standards:
|
||||||
|
if type(candidate).__name__.lower() not in standard.types:
|
||||||
|
continue
|
||||||
|
if settings.standards_filter and standard.name not in settings.standards_filter:
|
||||||
|
continue
|
||||||
|
result = standard.check(candidate, settings)
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
abort("Standard '%s' returns an empty result object." %
|
||||||
|
(standard.check.__name__))
|
||||||
|
|
||||||
|
for err in [err for err in result.errors
|
||||||
|
if not err.lineno or is_line_in_ranges(err.lineno, lines_ranges(lines))]:
|
||||||
|
if not standard.version:
|
||||||
|
warn("{id}Best practice '{name}' not met:\n{path}:{error}".format(
|
||||||
|
id=standard.id, name=standard.name, path=candidate.path, error=err), settings)
|
||||||
|
elif LooseVersion(standard.version) > LooseVersion(candidate.version):
|
||||||
|
warn("{id}Future standard '{name}' not met:\n{path}:{error}".format(
|
||||||
|
id=standard.id, name=standard.name, path=candidate.path, error=err), settings)
|
||||||
|
else:
|
||||||
|
error("{id}Standard '{name}' not met:\n{path}:{error}".format(
|
||||||
|
id=standard.id, name=standard.name, path=candidate.path, error=err))
|
||||||
|
errors = errors + 1
|
||||||
|
if not result.errors:
|
||||||
|
if not standard.version:
|
||||||
|
info("Best practice '%s' met" % standard.name, settings)
|
||||||
|
elif LooseVersion(standard.version) > LooseVersion(candidate.version):
|
||||||
|
info("Future standard '%s' met" % standard.name, settings)
|
||||||
|
else:
|
||||||
|
info("Standard '%s' met" % standard.name, settings)
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
def find_version(filename, version_regex=r"^# Standards:\s*([\d.]+)"):
|
||||||
|
version_re = re.compile(version_regex)
|
||||||
|
|
||||||
|
with codecs.open(filename, mode='rb', encoding='utf-8') as f:
|
||||||
|
for line in f:
|
||||||
|
match = version_re.match(line)
|
||||||
|
if match:
|
||||||
|
return match.group(1)
|
||||||
|
return None
|
79
ansiblelater/__main__.py
Executable file
79
ansiblelater/__main__.py
Executable file
@ -0,0 +1,79 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from appdirs import AppDirs
|
||||||
|
from pkg_resources import resource_filename
|
||||||
|
from ansiblelater import classify
|
||||||
|
from ansiblelater.utils import info, warn, read_config, get_property
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
config_dir = AppDirs("ansible-later").user_config_dir
|
||||||
|
default_config_file = os.path.join(config_dir, "config.ini")
|
||||||
|
|
||||||
|
parser = optparse.OptionParser("%prog playbook_file|role_file|inventory_file",
|
||||||
|
version="%prog " + get_property("__version__"))
|
||||||
|
parser.add_option('-c', dest='configfile', default=default_config_file,
|
||||||
|
help="Location of configuration file: [%s]" % default_config_file)
|
||||||
|
parser.add_option('-d', dest='rulesdir',
|
||||||
|
help="Location of standards rules")
|
||||||
|
parser.add_option('-q', dest='log_level', action="store_const", default=logging.WARN,
|
||||||
|
const=logging.ERROR, help="Only output errors")
|
||||||
|
parser.add_option('-s', dest='standards_filter', action='append',
|
||||||
|
help="limit standards to specific names")
|
||||||
|
parser.add_option('-v', dest='log_level', action="store_const", default=logging.WARN,
|
||||||
|
const=logging.INFO, help="Show more verbose output")
|
||||||
|
|
||||||
|
options, args = parser.parse_args(sys.argv[1:])
|
||||||
|
settings = read_config(options.configfile)
|
||||||
|
|
||||||
|
# Merge CLI options with config options. CLI options override config options.
|
||||||
|
for key, value in options.__dict__.items():
|
||||||
|
if value:
|
||||||
|
setattr(settings, key, value)
|
||||||
|
|
||||||
|
if os.path.exists(settings.configfile):
|
||||||
|
info("Using configuration file: %s" % settings.configfile, settings)
|
||||||
|
else:
|
||||||
|
warn("No configuration file found at %s" % settings.configfile, settings, file=sys.stderr)
|
||||||
|
if not settings.rulesdir:
|
||||||
|
rules_dir = os.path.join(resource_filename('ansiblelater', 'examples'))
|
||||||
|
warn("Using example standards found at %s" % rules_dir, settings, file=sys.stderr)
|
||||||
|
settings.rulesdir = rules_dir
|
||||||
|
|
||||||
|
if len(args) == 0:
|
||||||
|
candidates = []
|
||||||
|
for root, dirs, files in os.walk("."):
|
||||||
|
for filename in files:
|
||||||
|
candidates.append(os.path.join(root, filename))
|
||||||
|
else:
|
||||||
|
candidates = args
|
||||||
|
|
||||||
|
errors = 0
|
||||||
|
for filename in candidates:
|
||||||
|
if ':' in filename:
|
||||||
|
(filename, lines) = filename.split(":")
|
||||||
|
else:
|
||||||
|
lines = None
|
||||||
|
candidate = classify(filename)
|
||||||
|
if candidate:
|
||||||
|
if candidate.binary:
|
||||||
|
info("Not reviewing binary file %s" % filename, settings)
|
||||||
|
continue
|
||||||
|
if candidate.vault:
|
||||||
|
info("Not reviewing vault file %s" % filename, settings)
|
||||||
|
continue
|
||||||
|
if lines:
|
||||||
|
info("Reviewing %s lines %s" % (candidate, lines), settings)
|
||||||
|
else:
|
||||||
|
info("Reviewing all of %s" % candidate, settings)
|
||||||
|
errors = errors + candidate.review(settings, lines)
|
||||||
|
else:
|
||||||
|
info("Couldn't classify file %s" % filename, settings)
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
main()
|
6
ansiblelater/bin/ansible-later
Executable file
6
ansiblelater/bin/ansible-later
Executable file
@ -0,0 +1,6 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import ansiblelater.__main__
|
||||||
|
|
||||||
|
sys.exit(ansiblelater.__main__.main())
|
227
ansiblelater/examples/standards.py
Normal file
227
ansiblelater/examples/standards.py
Normal file
@ -0,0 +1,227 @@
|
|||||||
|
from ansiblelater import Standard
|
||||||
|
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_empty_lines
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_indent
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_hyphens
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_document_start
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_colons
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_file
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_has_content
|
||||||
|
from ansiblelater.rules.yamlfiles import check_native_yaml
|
||||||
|
from ansiblelater.rules.taskfiles import check_line_between_tasks
|
||||||
|
from ansiblelater.rules.rolefiles import check_meta_main
|
||||||
|
from ansiblelater.rules.rolefiles import check_scm_in_src
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_unique_named_task
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_named_task
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_name_format
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_braces_spaces
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_command_instead_of_module
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_install_use_latest
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_shell_instead_command
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_command_has_changes
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_empty_string_compare
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_compare_to_literal_bool
|
||||||
|
|
||||||
|
|
||||||
|
tasks_should_be_separated = Standard(dict(
|
||||||
|
id="ANSIBLE0001",
|
||||||
|
name="Single tasks should be separated by empty line",
|
||||||
|
check=check_line_between_tasks,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
role_must_contain_meta_main = Standard(dict(
|
||||||
|
id="ANSIBLE0002",
|
||||||
|
name="Roles must contain suitable meta/main.yml",
|
||||||
|
check=check_meta_main,
|
||||||
|
version="0.1",
|
||||||
|
types=["meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
tasks_are_uniquely_named = Standard(dict(
|
||||||
|
id="ANSIBLE0003",
|
||||||
|
name="Tasks and handlers must be uniquely named within a single file",
|
||||||
|
check=check_unique_named_task,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"],
|
||||||
|
))
|
||||||
|
|
||||||
|
use_spaces_between_variable_braces = Standard(dict(
|
||||||
|
id="ANSIBLE0004",
|
||||||
|
name="YAML should use consistent number of spaces around variables",
|
||||||
|
check=check_braces_spaces,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
roles_scm_not_in_src = Standard(dict(
|
||||||
|
id="ANSIBLE0005",
|
||||||
|
name="Use scm key rather than src: scm+url",
|
||||||
|
check=check_scm_in_src,
|
||||||
|
version="0.1",
|
||||||
|
types=["rolesfile"]
|
||||||
|
))
|
||||||
|
|
||||||
|
tasks_are_named = Standard(dict(
|
||||||
|
id="ANSIBLE0006",
|
||||||
|
name="Tasks and handlers must be named",
|
||||||
|
check=check_named_task,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"],
|
||||||
|
))
|
||||||
|
|
||||||
|
tasks_names_are_formatted = Standard(dict(
|
||||||
|
id="ANSIBLE0007",
|
||||||
|
name="Name of tasks and handlers must be formatted",
|
||||||
|
check=check_name_format,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"],
|
||||||
|
))
|
||||||
|
|
||||||
|
commands_should_not_be_used_in_place_of_modules = Standard(dict(
|
||||||
|
id="ANSIBLE0008",
|
||||||
|
name="Commands should not be used in place of modules",
|
||||||
|
check=check_command_instead_of_module,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
package_installs_should_not_use_latest = Standard(dict(
|
||||||
|
id="ANSIBLE0009",
|
||||||
|
name="Package installs should use present, not latest",
|
||||||
|
check=check_install_use_latest,
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
use_shell_only_when_necessary = Standard(dict(
|
||||||
|
id="ANSIBLE0010",
|
||||||
|
name="Shell should only be used when essential",
|
||||||
|
check=check_shell_instead_command,
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
commands_should_be_idempotent = Standard(dict(
|
||||||
|
id="ANSIBLE0011",
|
||||||
|
name="Commands should be idempotent",
|
||||||
|
check=check_command_has_changes,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task"]
|
||||||
|
))
|
||||||
|
|
||||||
|
dont_compare_to_empty_string = Standard(dict(
|
||||||
|
id="ANSIBLE0012",
|
||||||
|
name="Don't compare to \"\" - use `when: var` or `when: not var`",
|
||||||
|
check=check_empty_string_compare,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "template"]
|
||||||
|
))
|
||||||
|
|
||||||
|
dont_compare_to_literal_bool = Standard(dict(
|
||||||
|
id="ANSIBLE0013",
|
||||||
|
name="Don't compare to True or False - use `when: var` or `when: not var`",
|
||||||
|
check=check_compare_to_literal_bool,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "template"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_not_contain_unnecessarily_empty_lines = Standard(dict(
|
||||||
|
id="LINT0001",
|
||||||
|
name="YAML should not contain unnecessarily empty lines",
|
||||||
|
check=check_yaml_empty_lines,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_be_indented = Standard(dict(
|
||||||
|
id="LINT0002",
|
||||||
|
name="YAML should be correctly indented",
|
||||||
|
check=check_yaml_indent,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_use_consistent_spaces_after_hyphens = Standard(dict(
|
||||||
|
id="LINT0003",
|
||||||
|
name="YAML should use consistent number of spaces after hyphens",
|
||||||
|
check=check_yaml_hyphens,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_contain_document_start_marker = Standard(dict(
|
||||||
|
id="LINT0004",
|
||||||
|
name="YAML should contain document start marker",
|
||||||
|
check=check_yaml_document_start,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
spaces_around_colons = Standard(dict(
|
||||||
|
id="LINT0005",
|
||||||
|
name="YAML should use consistent number of spaces around colons",
|
||||||
|
check=check_yaml_colons,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
rolesfile_should_be_in_yaml = Standard(dict(
|
||||||
|
id="LINT0006",
|
||||||
|
name="Roles file should be in yaml format",
|
||||||
|
check=check_yaml_file,
|
||||||
|
version="0.1",
|
||||||
|
types=["rolesfile"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_not_be_purposeless = Standard(dict(
|
||||||
|
id="LINT0007",
|
||||||
|
name="Files should contain useful content",
|
||||||
|
check=check_yaml_has_content,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars", "defaults", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
use_yaml_rather_than_key_value = Standard(dict(
|
||||||
|
id="LINT0008",
|
||||||
|
name="Use YAML format for tasks and handlers rather than key=value",
|
||||||
|
check=check_native_yaml,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
ansible_min_version = '2.1'
|
||||||
|
ansible_review_min_version = '0.1.2'
|
||||||
|
|
||||||
|
|
||||||
|
standards = [
|
||||||
|
# Ansible
|
||||||
|
tasks_should_be_separated,
|
||||||
|
role_must_contain_meta_main,
|
||||||
|
tasks_are_uniquely_named,
|
||||||
|
use_spaces_between_variable_braces,
|
||||||
|
roles_scm_not_in_src,
|
||||||
|
tasks_are_named,
|
||||||
|
tasks_names_are_formatted,
|
||||||
|
commands_should_not_be_used_in_place_of_modules,
|
||||||
|
package_installs_should_not_use_latest,
|
||||||
|
use_shell_only_when_necessary,
|
||||||
|
commands_should_be_idempotent,
|
||||||
|
dont_compare_to_empty_string,
|
||||||
|
dont_compare_to_literal_bool,
|
||||||
|
# Lint
|
||||||
|
files_should_not_contain_unnecessarily_empty_lines,
|
||||||
|
files_should_be_indented,
|
||||||
|
files_should_use_consistent_spaces_after_hyphens,
|
||||||
|
files_should_contain_document_start_marker,
|
||||||
|
spaces_around_colons,
|
||||||
|
rolesfile_should_be_in_yaml,
|
||||||
|
files_should_not_be_purposeless,
|
||||||
|
use_yaml_rather_than_key_value,
|
||||||
|
]
|
0
ansiblelater/rules/__init__.py
Normal file
0
ansiblelater/rules/__init__.py
Normal file
228
ansiblelater/rules/ansiblefiles.py
Normal file
228
ansiblelater/rules/ansiblefiles.py
Normal file
@ -0,0 +1,228 @@
|
|||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
from ansiblelater import Result, Error
|
||||||
|
from ansiblelater.utils.rulehelper import (get_normalized_tasks,
|
||||||
|
get_normalized_yaml)
|
||||||
|
|
||||||
|
|
||||||
|
def check_braces_spaces(candidate, settings):
|
||||||
|
yamllines, errors = get_normalized_yaml(candidate, settings)
|
||||||
|
description = "no suitable numbers of spaces (required: 1)"
|
||||||
|
|
||||||
|
lineno = 0
|
||||||
|
matches = []
|
||||||
|
braces = re.compile("{{(.*?)}}")
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for line in yamllines:
|
||||||
|
lineno += 1
|
||||||
|
match = braces.findall(line)
|
||||||
|
if match:
|
||||||
|
for item in match:
|
||||||
|
matches.append(item)
|
||||||
|
|
||||||
|
for item in matches:
|
||||||
|
error_count = 0
|
||||||
|
string_length = len(item)
|
||||||
|
strip_length = item.rstrip()
|
||||||
|
|
||||||
|
if strip_length == 0 and not string_length == 1:
|
||||||
|
error_count += 1
|
||||||
|
else:
|
||||||
|
x = 0
|
||||||
|
leading_spaces = 0
|
||||||
|
while (x < string_length - 1 and item[x].isspace()):
|
||||||
|
x += 1
|
||||||
|
leading_spaces += 1
|
||||||
|
|
||||||
|
x = string_length - 1
|
||||||
|
trailing_spaces = 0
|
||||||
|
while (x > 0 and item[x].isspace()):
|
||||||
|
x -= 1
|
||||||
|
trailing_spaces += 1
|
||||||
|
|
||||||
|
if not leading_spaces == 1 or not trailing_spaces == 1:
|
||||||
|
error_count += 1
|
||||||
|
|
||||||
|
if not error_count == 0:
|
||||||
|
errors.append(Error(lineno, description))
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_named_task(candidate, settings):
|
||||||
|
tasks, errors = get_normalized_tasks(candidate, settings)
|
||||||
|
nameless_tasks = ['meta', 'debug', 'include_role', 'import_role',
|
||||||
|
'include_tasks', 'import_tasks', 'block']
|
||||||
|
description = "module '%s' used without name attribute"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for task in tasks:
|
||||||
|
module = task["action"]["__ansible_module__"]
|
||||||
|
if 'name' not in task and module not in nameless_tasks:
|
||||||
|
errors.append(Error(task['__line__'], description % module))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_name_format(candidate, settings):
|
||||||
|
tasks, errors = get_normalized_tasks(candidate, settings)
|
||||||
|
description = "name '%s' should start with uppercase"
|
||||||
|
namelines = defaultdict(list)
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for task in tasks:
|
||||||
|
if 'name' in task:
|
||||||
|
namelines[task['name']].append(task['__line__'])
|
||||||
|
for (name, lines) in namelines.items():
|
||||||
|
if not name[0].isupper():
|
||||||
|
errors.append(Error(lines[-1], description % name))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_unique_named_task(candidate, settings):
|
||||||
|
tasks, errors = get_normalized_tasks(candidate, settings)
|
||||||
|
description = "name '%s' appears multiple times"
|
||||||
|
|
||||||
|
namelines = defaultdict(list)
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for task in tasks:
|
||||||
|
if 'name' in task:
|
||||||
|
namelines[task['name']].append(task['__line__'])
|
||||||
|
for (name, lines) in namelines.items():
|
||||||
|
if len(lines) > 1:
|
||||||
|
errors.append(Error(lines[-1], description % name))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_command_instead_of_module(candidate, settings):
|
||||||
|
tasks, errors = get_normalized_tasks(candidate, settings)
|
||||||
|
commands = ['command', 'shell', 'raw']
|
||||||
|
modules = {
|
||||||
|
'git': 'git', 'hg': 'hg', 'curl': 'get_url or uri', 'wget': 'get_url or uri',
|
||||||
|
'svn': 'subversion', 'service': 'service', 'mount': 'mount',
|
||||||
|
'rpm': 'yum or rpm_key', 'yum': 'yum', 'apt-get': 'apt-get',
|
||||||
|
'unzip': 'unarchive', 'tar': 'unarchive', 'chkconfig': 'service',
|
||||||
|
'rsync': 'synchronize', 'supervisorctl': 'supervisorctl', 'systemctl': 'systemd',
|
||||||
|
'sed': 'template or lineinfile'
|
||||||
|
}
|
||||||
|
description = "%s command used in place of %s module"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for task in tasks:
|
||||||
|
if task["action"]["__ansible_module__"] in commands:
|
||||||
|
if 'cmd' in task['action']:
|
||||||
|
first_cmd_arg = task["action"]["cmd"].split()[0]
|
||||||
|
else:
|
||||||
|
first_cmd_arg = task["action"]["__ansible_arguments__"][0]
|
||||||
|
|
||||||
|
executable = os.path.basename(first_cmd_arg)
|
||||||
|
if first_cmd_arg and executable in modules and task['action'].get('warn', True):
|
||||||
|
errors.append(
|
||||||
|
Error(task["__line__"], description % (executable, modules[executable])))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_install_use_latest(candidate, settings):
|
||||||
|
tasks, errors = get_normalized_tasks(candidate, settings)
|
||||||
|
package_managers = ['yum', 'apt', 'dnf', 'homebrew', 'pacman', 'openbsd_package', 'pkg5',
|
||||||
|
'portage', 'pkgutil', 'slackpkg', 'swdepot', 'zypper', 'bundler', 'pip',
|
||||||
|
'pear', 'npm', 'gem', 'easy_install', 'bower', 'package']
|
||||||
|
description = "package installs should use state=present with or without a version"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for task in tasks:
|
||||||
|
if (task["action"]["__ansible_module__"] in package_managers
|
||||||
|
and task["action"].get("state") == "latest"):
|
||||||
|
errors.append(Error(task["__line__"], description))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_shell_instead_command(candidate, settings):
|
||||||
|
tasks, errors = get_normalized_tasks(candidate, settings)
|
||||||
|
description = "shell should only be used when piping, redirecting or chaining commands"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for task in tasks:
|
||||||
|
if task["action"]["__ansible_module__"] == 'shell':
|
||||||
|
if 'cmd' in task['action']:
|
||||||
|
cmd = task["action"].get("cmd", [])
|
||||||
|
else:
|
||||||
|
cmd = ' '.join(task["action"].get("__ansible_arguments__", []))
|
||||||
|
|
||||||
|
unjinja = re.sub(r"\{\{[^\}]*\}\}", "JINJA_VAR", cmd)
|
||||||
|
if not any([ch in unjinja for ch in '&|<>;$\n*[]{}?']):
|
||||||
|
errors.append(Error(task["__line__"], description))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_command_has_changes(candidate, settings):
|
||||||
|
tasks, errors = get_normalized_tasks(candidate, settings)
|
||||||
|
commands = ['command', 'shell', 'raw']
|
||||||
|
description = "commands should either read information (and thus set changed_when) or not " \
|
||||||
|
"do something if it has already been done (using creates/removes) " \
|
||||||
|
"or only do it if another check has a particular result (when)"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for task in tasks:
|
||||||
|
if task["action"]["__ansible_module__"] in commands:
|
||||||
|
if 'changed_when' not in task and 'when' not in task \
|
||||||
|
and 'creates' not in task['action'] and 'removes' not in task['action']:
|
||||||
|
errors.append(Error(task["__line__"], description))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_empty_string_compare(candidate, settings):
|
||||||
|
yamllines, errors = get_normalized_yaml(candidate, settings)
|
||||||
|
description = 'use `when: var` rather than `when: var != ""` (or ' \
|
||||||
|
'conversely `when: not var` rather than `when: var == ""`)'
|
||||||
|
|
||||||
|
lineno = 0
|
||||||
|
empty_string_compare = re.compile("[=!]= ?[\"'][\"']")
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for line in yamllines:
|
||||||
|
lineno += 1
|
||||||
|
if empty_string_compare.findall(line):
|
||||||
|
errors.append(Error(lineno, description))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_compare_to_literal_bool(candidate, settings):
|
||||||
|
yamllines, errors = get_normalized_yaml(candidate, settings)
|
||||||
|
description = "use `when: var` rather than `when: var == True` " \
|
||||||
|
"(or conversely `when: not var`)"
|
||||||
|
|
||||||
|
lineno = 0
|
||||||
|
literal_bool_compare = re.compile("[=!]= ?(True|true|False|false)")
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for line in yamllines:
|
||||||
|
lineno += 1
|
||||||
|
if literal_bool_compare.findall(line):
|
||||||
|
errors.append(Error(lineno, description))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_delegate_to_localhost(candidate, settings):
|
||||||
|
tasks, errors = get_normalized_tasks(candidate, settings)
|
||||||
|
description = "connection: local ensures that unexpected delegated_vars " \
|
||||||
|
"don't get set (e.g. {{ inventory_hostname }} " \
|
||||||
|
"used by vars_files)"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for task in tasks:
|
||||||
|
if task.get('delegate_to') == 'localhost':
|
||||||
|
errors.append(Error(task["__line__"], description))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
29
ansiblelater/rules/rolefiles.py
Normal file
29
ansiblelater/rules/rolefiles.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from nested_lookup import nested_lookup
|
||||||
|
|
||||||
|
from ansiblelater import Error, Result
|
||||||
|
from ansiblelater.utils.rulehelper import get_raw_yaml, get_tasks
|
||||||
|
|
||||||
|
|
||||||
|
def check_meta_main(candidate, settings):
|
||||||
|
content, errors = get_raw_yaml(candidate, settings)
|
||||||
|
keys = ["author", "description", "min_ansible_version", "platforms", "dependencies"]
|
||||||
|
description = "file should contain '%s' key"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for key in keys:
|
||||||
|
if not nested_lookup(key, content):
|
||||||
|
errors.append(Error(None, description % (key)))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_scm_in_src(candidate, settings):
|
||||||
|
roles, errors = get_tasks(candidate, settings)
|
||||||
|
description = "usage of src: scm+url not recommended"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for role in roles:
|
||||||
|
if '+' in role.get('src'):
|
||||||
|
errors.append(Error(role['__line__'], description))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
32
ansiblelater/rules/taskfiles.py
Normal file
32
ansiblelater/rules/taskfiles.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from ansiblelater import Error, Result
|
||||||
|
from ansiblelater.utils.rulehelper import get_normalized_yaml
|
||||||
|
|
||||||
|
|
||||||
|
def check_line_between_tasks(candidate, settings):
|
||||||
|
options = defaultdict(dict)
|
||||||
|
options.update(remove_empty=False)
|
||||||
|
options.update(remove_markers=False)
|
||||||
|
|
||||||
|
lines, errors = get_normalized_yaml(candidate, settings, options)
|
||||||
|
description = "missing task separation (required: 1 empty line)"
|
||||||
|
|
||||||
|
task_regex = re.compile(r"-\sname:.*")
|
||||||
|
lineno = 0
|
||||||
|
prevline = "#file_start_marker"
|
||||||
|
|
||||||
|
allowed_prevline = ["---", "tasks:", "pre_tasks:", "post_tasks:", "block:"]
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for line in lines:
|
||||||
|
lineno += 1
|
||||||
|
match = task_regex.search(line)
|
||||||
|
if match and prevline:
|
||||||
|
if not any(item in prevline for item in allowed_prevline):
|
||||||
|
errors.append(Error(lineno, description))
|
||||||
|
prevline = line.strip()
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
92
ansiblelater/rules/yamlfiles.py
Normal file
92
ansiblelater/rules/yamlfiles.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
import codecs
|
||||||
|
import yaml
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ansiblelater import Result
|
||||||
|
from ansiblelater import Error
|
||||||
|
from ansiblelater.utils.rulehelper import get_action_tasks
|
||||||
|
from ansiblelater.utils.rulehelper import get_normalized_yaml
|
||||||
|
from ansiblelater.utils.rulehelper import get_normalized_task
|
||||||
|
from ansiblelater.utils.rulehelper import run_yamllint
|
||||||
|
|
||||||
|
|
||||||
|
def check_yaml_has_content(candidate, settings):
|
||||||
|
lines, errors = get_normalized_yaml(candidate, settings)
|
||||||
|
description = "the file appears to have no useful content"
|
||||||
|
|
||||||
|
if not lines and not errors:
|
||||||
|
errors.append(Error(None, description))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_native_yaml(candidate, settings):
|
||||||
|
tasks, errors = get_action_tasks(candidate, settings)
|
||||||
|
description = "task arguments appear to be in key value rather than YAML format"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
for task in tasks:
|
||||||
|
normal_form, error = get_normalized_task(task, candidate, settings)
|
||||||
|
if error:
|
||||||
|
errors.extend(error)
|
||||||
|
break
|
||||||
|
|
||||||
|
action = normal_form['action']['__ansible_module__']
|
||||||
|
arguments = normal_form['action']['__ansible_arguments__']
|
||||||
|
# Cope with `set_fact` where task['set_fact'] is None
|
||||||
|
if not task.get(action):
|
||||||
|
continue
|
||||||
|
if isinstance(task[action], dict):
|
||||||
|
continue
|
||||||
|
# strip additional newlines off task[action]
|
||||||
|
if task[action].strip().split() != arguments:
|
||||||
|
errors.append(Error(task['__line__'], description))
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_yaml_empty_lines(candidate, settings):
|
||||||
|
options = "rules: {empty-lines: {max: 1, max-start: 0, max-end: 0}}"
|
||||||
|
errors = run_yamllint(candidate, settings, options)
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_yaml_indent(candidate, settings):
|
||||||
|
options = "rules: {indentation: {spaces: 2, check-multi-line-strings: false, indent-sequences: true}}"
|
||||||
|
errors = run_yamllint(candidate, settings, options)
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_yaml_hyphens(candidate, settings):
|
||||||
|
options = "rules: {hyphens: {max-spaces-after: 1}}"
|
||||||
|
errors = run_yamllint(candidate, settings, options)
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_yaml_document_start(candidate, settings):
|
||||||
|
options = "rules: {document-start: {present: true}}"
|
||||||
|
errors = run_yamllint(candidate, settings, options)
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_yaml_colons(candidate, settings):
|
||||||
|
options = "rules: {colons: {max-spaces-before: 0, max-spaces-after: 1}}"
|
||||||
|
errors = run_yamllint(candidate, settings, options)
|
||||||
|
return Result(candidate.path, errors)
|
||||||
|
|
||||||
|
|
||||||
|
def check_yaml_file(candidate, settings):
|
||||||
|
errors = []
|
||||||
|
filename = candidate.path
|
||||||
|
|
||||||
|
if os.path.isfile(filename) and os.path.splitext(filename)[1][1:] != "yml":
|
||||||
|
errors.append(
|
||||||
|
Error(None, "file does not have a .yml extension"))
|
||||||
|
elif os.path.isfile(filename) and os.path.splitext(filename)[1][1:] == "yml":
|
||||||
|
with codecs.open(filename, mode='rb', encoding='utf-8') as f:
|
||||||
|
try:
|
||||||
|
yaml.safe_load(f)
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(
|
||||||
|
Error(e.problem_mark.line + 1, "syntax error: %s" % (e.problem)))
|
||||||
|
|
||||||
|
return Result(candidate.path, errors)
|
119
ansiblelater/utils/__init__.py
Normal file
119
ansiblelater/utils/__init__.py
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
|
||||||
|
from distutils.version import LooseVersion
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ConfigParser as configparser
|
||||||
|
except ImportError:
|
||||||
|
import configparser
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ansible.utils.color import stringc
|
||||||
|
except ImportError:
|
||||||
|
from ansible.color import stringc
|
||||||
|
|
||||||
|
# from yamlhelper import *
|
||||||
|
|
||||||
|
|
||||||
|
def abort(message, file=sys.stderr):
|
||||||
|
print(stringc("FATAL: %s" % message, 'red'), file=file)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def error(message, file=sys.stderr):
|
||||||
|
print(stringc("ERROR: %s" % message, 'red'), file=file)
|
||||||
|
|
||||||
|
|
||||||
|
def warn(message, settings, file=sys.stdout):
|
||||||
|
if settings.log_level <= logging.WARNING:
|
||||||
|
print(stringc("WARN: %s" % message, 'yellow'), file=file)
|
||||||
|
|
||||||
|
|
||||||
|
def info(message, settings, file=sys.stdout):
|
||||||
|
if settings.log_level <= logging.INFO:
|
||||||
|
print(stringc("INFO: %s" % message, 'green'), file=file)
|
||||||
|
|
||||||
|
|
||||||
|
def get_property(prop):
|
||||||
|
currentdir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
parentdir = os.path.dirname(currentdir)
|
||||||
|
result = re.search(
|
||||||
|
r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop),
|
||||||
|
open(os.path.join(parentdir, '__init__.py')).read())
|
||||||
|
return result.group(1)
|
||||||
|
|
||||||
|
|
||||||
|
def standards_latest(standards):
|
||||||
|
return max([standard.version for standard in standards if standard.version] or ["0.1"],
|
||||||
|
key=LooseVersion)
|
||||||
|
|
||||||
|
|
||||||
|
def lines_ranges(lines_spec):
|
||||||
|
if not lines_spec:
|
||||||
|
return None
|
||||||
|
result = []
|
||||||
|
for interval in lines_spec.split(","):
|
||||||
|
(start, end) = interval.split("-")
|
||||||
|
result.append(range(int(start), int(end) + 1))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def is_line_in_ranges(line, ranges):
|
||||||
|
return not ranges or any([line in r for r in ranges])
|
||||||
|
|
||||||
|
|
||||||
|
def read_standards(settings):
|
||||||
|
if not settings.rulesdir:
|
||||||
|
abort("Standards directory is not set on command line or in configuration file - aborting")
|
||||||
|
sys.path.append(os.path.abspath(os.path.expanduser(settings.rulesdir)))
|
||||||
|
try:
|
||||||
|
standards = importlib.import_module('standards')
|
||||||
|
except ImportError as e:
|
||||||
|
abort("Could not import standards from directory %s: %s" % (settings.rulesdir, str(e)))
|
||||||
|
return standards
|
||||||
|
|
||||||
|
|
||||||
|
def read_config(config_file):
|
||||||
|
config = configparser.RawConfigParser({'standards': None})
|
||||||
|
config.read(config_file)
|
||||||
|
|
||||||
|
return Settings(config, config_file)
|
||||||
|
|
||||||
|
|
||||||
|
def execute(cmd):
|
||||||
|
result = ExecuteResult()
|
||||||
|
encoding = 'UTF-8'
|
||||||
|
env = dict(os.environ)
|
||||||
|
env['PYTHONIOENCODING'] = encoding
|
||||||
|
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT, env=env)
|
||||||
|
result.output = proc.communicate()[0].decode(encoding)
|
||||||
|
result.rc = proc.returncode
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(object):
|
||||||
|
def __init__(self, config, config_file):
|
||||||
|
self.rulesdir = None
|
||||||
|
self.custom_modules = []
|
||||||
|
self.log_level = None
|
||||||
|
self.standards_filter = None
|
||||||
|
|
||||||
|
if config.has_section('rules'):
|
||||||
|
self.rulesdir = config.get('rules', 'standards')
|
||||||
|
if config.has_section('ansible'):
|
||||||
|
modules = config.get('ansible', 'custom_modules')
|
||||||
|
self.custom_modules = [x.strip() for x in modules.split(',')]
|
||||||
|
|
||||||
|
self.configfile = config_file
|
||||||
|
|
||||||
|
|
||||||
|
class ExecuteResult(object):
|
||||||
|
pass
|
24
ansiblelater/utils/exceptions.py
Normal file
24
ansiblelater/utils/exceptions.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
# Custom exceptions
|
||||||
|
class LaterError(Exception):
|
||||||
|
"""Generic exception for later"""
|
||||||
|
|
||||||
|
def __init__(self, msg, original):
|
||||||
|
super(LaterError, self).__init__(msg + (": %s" % original))
|
||||||
|
self.original = original
|
||||||
|
|
||||||
|
|
||||||
|
class LaterAnsibleError(Exception):
|
||||||
|
"""Wrapper for ansible syntax errors"""
|
||||||
|
|
||||||
|
def __init__(self, msg, original):
|
||||||
|
lines = original.message.splitlines()
|
||||||
|
|
||||||
|
line_no = re.search('line(.*?),', lines[2])
|
||||||
|
column_no = re.search('column(.*?),', lines[2])
|
||||||
|
|
||||||
|
self.message = lines[0]
|
||||||
|
self.line = line_no.group(1).strip()
|
||||||
|
self.column = column_no.group(1).strip()
|
135
ansiblelater/utils/rulehelper.py
Normal file
135
ansiblelater/utils/rulehelper.py
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
import codecs
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
from yamllint import linter
|
||||||
|
from yamllint.config import YamlLintConfig
|
||||||
|
# Workaround for import errors with ansble 2.1 and 2.3
|
||||||
|
from ansible.parsing.dataloader import DataLoader
|
||||||
|
from ansiblelater import Error
|
||||||
|
from .yamlhelper import normalize_task
|
||||||
|
from .yamlhelper import action_tasks
|
||||||
|
from .yamlhelper import parse_yaml_linenumbers
|
||||||
|
from .yamlhelper import normalized_yaml
|
||||||
|
from .exceptions import LaterError, LaterAnsibleError
|
||||||
|
|
||||||
|
|
||||||
|
def get_tasks(candidate, settings):
|
||||||
|
errors = []
|
||||||
|
try:
|
||||||
|
with codecs.open(candidate.path, mode='rb', encoding='utf-8') as f:
|
||||||
|
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||||
|
|
||||||
|
except LaterError as ex:
|
||||||
|
e = ex.original
|
||||||
|
errors.append(Error(e.problem_mark.line + 1, "syntax error: %s" % (e.problem)))
|
||||||
|
except LaterAnsibleError as e:
|
||||||
|
errors.append(Error(e.line, "syntax error: %s" % (e.message)))
|
||||||
|
|
||||||
|
return yamllines, errors
|
||||||
|
|
||||||
|
|
||||||
|
def get_action_tasks(candidate, settings):
|
||||||
|
tasks = []
|
||||||
|
errors = []
|
||||||
|
try:
|
||||||
|
with codecs.open(candidate.path, mode='rb', encoding='utf-8') as f:
|
||||||
|
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||||
|
|
||||||
|
if yamllines:
|
||||||
|
tasks = action_tasks(yamllines, candidate)
|
||||||
|
except LaterError as ex:
|
||||||
|
e = ex.original
|
||||||
|
errors.append(Error(e.problem_mark.line + 1, "syntax error: %s" % (e.problem)))
|
||||||
|
except LaterAnsibleError as e:
|
||||||
|
errors.append(Error(e.line, "syntax error: %s" % (e.message)))
|
||||||
|
|
||||||
|
return tasks, errors
|
||||||
|
|
||||||
|
|
||||||
|
def get_normalized_task(task, candidate, settings):
|
||||||
|
normalized = None
|
||||||
|
errors = []
|
||||||
|
try:
|
||||||
|
normalized = normalize_task(task, candidate.path, settings.custom_modules)
|
||||||
|
except LaterError as ex:
|
||||||
|
e = ex.original
|
||||||
|
errors.append(Error(e.problem_mark.line + 1, "syntax error: %s" % (e.problem)))
|
||||||
|
except LaterAnsibleError as e:
|
||||||
|
errors.append(Error(e.line, "syntax error: %s" % (e.message)))
|
||||||
|
|
||||||
|
return normalized, errors
|
||||||
|
|
||||||
|
|
||||||
|
def get_normalized_tasks(candidate, settings):
|
||||||
|
normalized = []
|
||||||
|
errors = []
|
||||||
|
try:
|
||||||
|
with codecs.open(candidate.path, mode='rb', encoding='utf-8') as f:
|
||||||
|
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||||
|
|
||||||
|
if yamllines:
|
||||||
|
tasks = action_tasks(yamllines, candidate)
|
||||||
|
for task in tasks:
|
||||||
|
# An empty `tags` block causes `None` to be returned if
|
||||||
|
# the `or []` is not present - `task.get('tags', [])`
|
||||||
|
# does not suffice.
|
||||||
|
if 'skip_ansible_lint' in (task.get('tags') or []):
|
||||||
|
# No need to normalize_task if we are skipping it.
|
||||||
|
continue
|
||||||
|
normalized.append(normalize_task(task, candidate.path, settings.custom_modules))
|
||||||
|
|
||||||
|
except LaterError as ex:
|
||||||
|
e = ex.original
|
||||||
|
errors.append(Error(e.problem_mark.line + 1, "syntax error: %s" % (e.problem)))
|
||||||
|
except LaterAnsibleError as e:
|
||||||
|
errors.append(Error(e.line, "syntax error: %s" % (e.message)))
|
||||||
|
|
||||||
|
return normalized, errors
|
||||||
|
|
||||||
|
|
||||||
|
def get_normalized_yaml(candidate, settings, options=None):
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
if not options:
|
||||||
|
options = defaultdict(dict)
|
||||||
|
options.update(remove_empty=True)
|
||||||
|
options.update(remove_markers=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yamllines = normalized_yaml(candidate.path, options)
|
||||||
|
except LaterError as ex:
|
||||||
|
e = ex.original
|
||||||
|
errors.append(Error(e.problem_mark.line + 1, "syntax error: %s" % (e.problem)))
|
||||||
|
except LaterAnsibleError as e:
|
||||||
|
errors.append(Error(e.line, "syntax error: %s" % (e.message)))
|
||||||
|
|
||||||
|
return yamllines, errors
|
||||||
|
|
||||||
|
|
||||||
|
def get_raw_yaml(candidate, settings):
|
||||||
|
content = None
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
with codecs.open(candidate.path, mode='rb', encoding='utf-8') as f:
|
||||||
|
content = yaml.safe_load(f)
|
||||||
|
|
||||||
|
except LaterError as ex:
|
||||||
|
e = ex.original
|
||||||
|
errors.append(Error(e.problem_mark.line + 1, "syntax error: %s" % (e.problem)))
|
||||||
|
|
||||||
|
return content, errors
|
||||||
|
|
||||||
|
|
||||||
|
def run_yamllint(candidate, settings, options="extends: default"):
|
||||||
|
errors = []
|
||||||
|
try:
|
||||||
|
with codecs.open(candidate.path, mode='rb', encoding='utf-8') as f:
|
||||||
|
for problem in linter.run(f, YamlLintConfig(options)):
|
||||||
|
errors.append(Error(problem.line, problem.desc))
|
||||||
|
except LaterError as ex:
|
||||||
|
e = ex.original
|
||||||
|
errors.append(Error(e.problem_mark.line + 1, "syntax error: %s" % (e.problem)))
|
||||||
|
|
||||||
|
return errors
|
508
ansiblelater/utils/yamlhelper.py
Normal file
508
ansiblelater/utils/yamlhelper.py
Normal file
@ -0,0 +1,508 @@
|
|||||||
|
# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
import glob
|
||||||
|
import imp
|
||||||
|
import os
|
||||||
|
import codecs
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
import six
|
||||||
|
import ansible.parsing.mod_args
|
||||||
|
from ansible import constants
|
||||||
|
from ansible.errors import AnsibleError
|
||||||
|
from .exceptions import LaterError, LaterAnsibleError
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Try to import the Ansible 2 module first, it's the future-proof one
|
||||||
|
from ansible.parsing.splitter import split_args
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
# Fallback on the Ansible 1.9 module
|
||||||
|
from ansible.module_utils.splitter import split_args
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from yaml.composer import Composer
|
||||||
|
|
||||||
|
from ansible.parsing.dataloader import DataLoader
|
||||||
|
from ansible.template import Templar
|
||||||
|
from ansible.parsing.mod_args import ModuleArgsParser
|
||||||
|
from ansible.parsing.yaml.constructor import AnsibleConstructor
|
||||||
|
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||||
|
from ansible.errors import AnsibleParserError
|
||||||
|
|
||||||
|
# ansible-later doesn't need/want to know about encrypted secrets, but it needs
|
||||||
|
# Ansible 2.3+ allows encrypted secrets within yaml files, so we pass a string
|
||||||
|
# as the password to enable such yaml files to be opened and parsed successfully.
|
||||||
|
DEFAULT_VAULT_PASSWORD = 'x'
|
||||||
|
|
||||||
|
|
||||||
|
def parse_yaml_from_file(filepath):
|
||||||
|
dl = DataLoader()
|
||||||
|
if hasattr(dl, 'set_vault_password'):
|
||||||
|
dl.set_vault_password(DEFAULT_VAULT_PASSWORD)
|
||||||
|
return dl.load_from_file(filepath)
|
||||||
|
|
||||||
|
|
||||||
|
def path_dwim(basedir, given):
|
||||||
|
dl = DataLoader()
|
||||||
|
dl.set_basedir(basedir)
|
||||||
|
return dl.path_dwim(given)
|
||||||
|
|
||||||
|
|
||||||
|
def ansible_template(basedir, varname, templatevars, **kwargs):
|
||||||
|
dl = DataLoader()
|
||||||
|
dl.set_basedir(basedir)
|
||||||
|
templar = Templar(dl, variables=templatevars)
|
||||||
|
return templar.template(varname, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ansible.plugins import module_loader
|
||||||
|
except ImportError:
|
||||||
|
from ansible.plugins.loader import module_loader
|
||||||
|
|
||||||
|
LINE_NUMBER_KEY = '__line__'
|
||||||
|
FILENAME_KEY = '__file__'
|
||||||
|
|
||||||
|
VALID_KEYS = [
|
||||||
|
'name', 'action', 'when', 'async', 'poll', 'notify',
|
||||||
|
'first_available_file', 'include', 'import_playbook',
|
||||||
|
'tags', 'register', 'ignore_errors', 'delegate_to',
|
||||||
|
'local_action', 'transport', 'remote_user', 'sudo',
|
||||||
|
'sudo_user', 'sudo_pass', 'when', 'connection', 'environment', 'args', 'always_run',
|
||||||
|
'any_errors_fatal', 'changed_when', 'failed_when', 'check_mode', 'delay',
|
||||||
|
'retries', 'until', 'su', 'su_user', 'su_pass', 'no_log', 'run_once',
|
||||||
|
'become', 'become_user', 'become_method', FILENAME_KEY,
|
||||||
|
]
|
||||||
|
|
||||||
|
BLOCK_NAME_TO_ACTION_TYPE_MAP = {
|
||||||
|
'tasks': 'task',
|
||||||
|
'handlers': 'handler',
|
||||||
|
'pre_tasks': 'task',
|
||||||
|
'post_tasks': 'task',
|
||||||
|
'block': 'meta',
|
||||||
|
'rescue': 'meta',
|
||||||
|
'always': 'meta',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def load_plugins(directory):
|
||||||
|
result = []
|
||||||
|
fh = None
|
||||||
|
|
||||||
|
for pluginfile in glob.glob(os.path.join(directory, '[A-Za-z]*.py')):
|
||||||
|
|
||||||
|
pluginname = os.path.basename(pluginfile.replace('.py', ''))
|
||||||
|
try:
|
||||||
|
fh, filename, desc = imp.find_module(pluginname, [directory])
|
||||||
|
mod = imp.load_module(pluginname, fh, filename, desc)
|
||||||
|
obj = getattr(mod, pluginname)()
|
||||||
|
result.append(obj)
|
||||||
|
finally:
|
||||||
|
if fh:
|
||||||
|
fh.close()
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def tokenize(line):
|
||||||
|
tokens = line.lstrip().split(" ")
|
||||||
|
if tokens[0] == '-':
|
||||||
|
tokens = tokens[1:]
|
||||||
|
if tokens[0] == 'action:' or tokens[0] == 'local_action:':
|
||||||
|
tokens = tokens[1:]
|
||||||
|
command = tokens[0].replace(":", "")
|
||||||
|
|
||||||
|
args = list()
|
||||||
|
kwargs = dict()
|
||||||
|
nonkvfound = False
|
||||||
|
for arg in tokens[1:]:
|
||||||
|
if "=" in arg and not nonkvfound:
|
||||||
|
kv = arg.split("=", 1)
|
||||||
|
kwargs[kv[0]] = kv[1]
|
||||||
|
else:
|
||||||
|
nonkvfound = True
|
||||||
|
args.append(arg)
|
||||||
|
return (command, args, kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def _playbook_items(pb_data):
|
||||||
|
if isinstance(pb_data, dict):
|
||||||
|
return pb_data.items()
|
||||||
|
elif not pb_data:
|
||||||
|
return []
|
||||||
|
else:
|
||||||
|
return [item for play in pb_data for item in play.items()]
|
||||||
|
|
||||||
|
|
||||||
|
def find_children(playbook, playbook_dir):
|
||||||
|
if not os.path.exists(playbook[0]):
|
||||||
|
return []
|
||||||
|
if playbook[1] == 'role':
|
||||||
|
playbook_ds = {'roles': [{'role': playbook[0]}]}
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
playbook_ds = parse_yaml_from_file(playbook[0])
|
||||||
|
except AnsibleError as e:
|
||||||
|
raise SystemExit(str(e))
|
||||||
|
results = []
|
||||||
|
basedir = os.path.dirname(playbook[0])
|
||||||
|
items = _playbook_items(playbook_ds)
|
||||||
|
for item in items:
|
||||||
|
for child in play_children(basedir, item, playbook[1], playbook_dir):
|
||||||
|
if "$" in child['path'] or "{{" in child['path']:
|
||||||
|
continue
|
||||||
|
valid_tokens = list()
|
||||||
|
for token in split_args(child['path']):
|
||||||
|
if '=' in token:
|
||||||
|
break
|
||||||
|
valid_tokens.append(token)
|
||||||
|
path = ' '.join(valid_tokens)
|
||||||
|
results.append({
|
||||||
|
'path': path_dwim(basedir, path),
|
||||||
|
'type': child['type']
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def template(basedir, value, vars, fail_on_undefined=False, **kwargs):
|
||||||
|
try:
|
||||||
|
value = ansible_template(os.path.abspath(basedir), value, vars,
|
||||||
|
**dict(kwargs, fail_on_undefined=fail_on_undefined))
|
||||||
|
# Hack to skip the following exception when using to_json filter on a variable.
|
||||||
|
# I guess the filter doesn't like empty vars...
|
||||||
|
except (AnsibleError, ValueError):
|
||||||
|
# templating failed, so just keep value as is.
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def play_children(basedir, item, parent_type, playbook_dir):
|
||||||
|
delegate_map = {
|
||||||
|
'tasks': _taskshandlers_children,
|
||||||
|
'pre_tasks': _taskshandlers_children,
|
||||||
|
'post_tasks': _taskshandlers_children,
|
||||||
|
'block': _taskshandlers_children,
|
||||||
|
'include': _include_children,
|
||||||
|
'import_playbook': _include_children,
|
||||||
|
'roles': _roles_children,
|
||||||
|
'dependencies': _roles_children,
|
||||||
|
'handlers': _taskshandlers_children,
|
||||||
|
}
|
||||||
|
(k, v) = item
|
||||||
|
play_library = os.path.join(os.path.abspath(basedir), 'library')
|
||||||
|
_load_library_if_exists(play_library)
|
||||||
|
|
||||||
|
if k in delegate_map:
|
||||||
|
if v:
|
||||||
|
v = template(os.path.abspath(basedir),
|
||||||
|
v,
|
||||||
|
dict(playbook_dir=os.path.abspath(basedir)),
|
||||||
|
fail_on_undefined=False)
|
||||||
|
return delegate_map[k](basedir, k, v, parent_type)
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def _include_children(basedir, k, v, parent_type):
|
||||||
|
# handle include: filename.yml tags=blah
|
||||||
|
(command, args, kwargs) = tokenize("{0}: {1}".format(k, v))
|
||||||
|
|
||||||
|
result = path_dwim(basedir, args[0])
|
||||||
|
if not os.path.exists(result) and not basedir.endswith('tasks'):
|
||||||
|
result = path_dwim(os.path.join(basedir, '..', 'tasks'), v)
|
||||||
|
return [{'path': result, 'type': parent_type}]
|
||||||
|
|
||||||
|
|
||||||
|
def _taskshandlers_children(basedir, k, v, parent_type):
|
||||||
|
results = []
|
||||||
|
for th in v:
|
||||||
|
if 'include' in th:
|
||||||
|
append_children(th['include'], basedir, k, parent_type, results)
|
||||||
|
elif 'include_tasks' in th:
|
||||||
|
append_children(th['include_tasks'], basedir, k, parent_type, results)
|
||||||
|
elif 'import_playbook' in th:
|
||||||
|
append_children(th['import_playbook'], basedir, k, parent_type, results)
|
||||||
|
elif 'import_tasks' in th:
|
||||||
|
append_children(th['import_tasks'], basedir, k, parent_type, results)
|
||||||
|
elif 'import_role' in th:
|
||||||
|
results.extend(_roles_children(basedir, k, [th['import_role'].get('name')], parent_type,
|
||||||
|
main=th['import_role'].get('tasks_from', 'main')))
|
||||||
|
elif 'include_role' in th:
|
||||||
|
results.extend(_roles_children(basedir, k, [th['include_role'].get('name')],
|
||||||
|
parent_type,
|
||||||
|
main=th['include_role'].get('tasks_from', 'main')))
|
||||||
|
elif 'block' in th:
|
||||||
|
results.extend(_taskshandlers_children(basedir, k, th['block'], parent_type))
|
||||||
|
if 'rescue' in th:
|
||||||
|
results.extend(_taskshandlers_children(basedir, k, th['rescue'], parent_type))
|
||||||
|
if 'always' in th:
|
||||||
|
results.extend(_taskshandlers_children(basedir, k, th['always'], parent_type))
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def append_children(taskhandler, basedir, k, parent_type, results):
|
||||||
|
# when taskshandlers_children is called for playbooks, the
|
||||||
|
# actual type of the included tasks is the section containing the
|
||||||
|
# include, e.g. tasks, pre_tasks, or handlers.
|
||||||
|
if parent_type == 'playbook':
|
||||||
|
playbook_section = k
|
||||||
|
else:
|
||||||
|
playbook_section = parent_type
|
||||||
|
results.append({
|
||||||
|
'path': path_dwim(basedir, taskhandler),
|
||||||
|
'type': playbook_section
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def _roles_children(basedir, k, v, parent_type, main='main'):
|
||||||
|
results = []
|
||||||
|
for role in v:
|
||||||
|
if isinstance(role, dict):
|
||||||
|
if 'role' in role or 'name' in role:
|
||||||
|
if 'tags' not in role or 'skip_ansible_later' not in role['tags']:
|
||||||
|
results.extend(_look_for_role_files(basedir,
|
||||||
|
role.get('role', role.get('name')),
|
||||||
|
main=main))
|
||||||
|
else:
|
||||||
|
raise SystemExit('role dict {0} does not contain a "role" '
|
||||||
|
'or "name" key'.format(role))
|
||||||
|
else:
|
||||||
|
results.extend(_look_for_role_files(basedir, role, main=main))
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def _load_library_if_exists(path):
|
||||||
|
if os.path.exists(path):
|
||||||
|
module_loader.add_directory(path)
|
||||||
|
|
||||||
|
|
||||||
|
def _rolepath(basedir, role):
|
||||||
|
role_path = None
|
||||||
|
|
||||||
|
possible_paths = [
|
||||||
|
# if included from a playbook
|
||||||
|
path_dwim(basedir, os.path.join('roles', role)),
|
||||||
|
path_dwim(basedir, role),
|
||||||
|
# if included from roles/[role]/meta/main.yml
|
||||||
|
path_dwim(
|
||||||
|
basedir, os.path.join('..', '..', '..', 'roles', role)
|
||||||
|
),
|
||||||
|
path_dwim(basedir, os.path.join('..', '..', role))
|
||||||
|
]
|
||||||
|
|
||||||
|
if constants.DEFAULT_ROLES_PATH:
|
||||||
|
search_locations = constants.DEFAULT_ROLES_PATH
|
||||||
|
if isinstance(search_locations, six.string_types):
|
||||||
|
search_locations = search_locations.split(os.pathsep)
|
||||||
|
for loc in search_locations:
|
||||||
|
loc = os.path.expanduser(loc)
|
||||||
|
possible_paths.append(path_dwim(loc, role))
|
||||||
|
|
||||||
|
for path_option in possible_paths:
|
||||||
|
if os.path.isdir(path_option):
|
||||||
|
role_path = path_option
|
||||||
|
break
|
||||||
|
|
||||||
|
if role_path:
|
||||||
|
_load_library_if_exists(os.path.join(role_path, 'library'))
|
||||||
|
|
||||||
|
return role_path
|
||||||
|
|
||||||
|
|
||||||
|
def _look_for_role_files(basedir, role, main='main'):
|
||||||
|
role_path = _rolepath(basedir, role)
|
||||||
|
if not role_path:
|
||||||
|
return []
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
for th in ['tasks', 'handlers', 'meta']:
|
||||||
|
for ext in ('.yml', '.yaml'):
|
||||||
|
thpath = os.path.join(role_path, th, main + ext)
|
||||||
|
if os.path.exists(thpath):
|
||||||
|
results.append({'path': thpath, 'type': th})
|
||||||
|
break
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def rolename(filepath):
|
||||||
|
idx = filepath.find('roles/')
|
||||||
|
if idx < 0:
|
||||||
|
return ''
|
||||||
|
role = filepath[idx + 6:]
|
||||||
|
role = role[:role.find('/')]
|
||||||
|
return role
|
||||||
|
|
||||||
|
|
||||||
|
def _kv_to_dict(v):
|
||||||
|
(command, args, kwargs) = tokenize(v)
|
||||||
|
return (dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs))
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_task(task, filename, custom_modules=[]):
|
||||||
|
'''Ensures tasks have an action key and strings are converted to python objects'''
|
||||||
|
ansible_action_type = task.get('__ansible_action_type__', 'task')
|
||||||
|
if '__ansible_action_type__' in task:
|
||||||
|
del(task['__ansible_action_type__'])
|
||||||
|
|
||||||
|
normalized = dict()
|
||||||
|
# TODO: Workaround for custom modules
|
||||||
|
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
|
||||||
|
builtin = list(set(builtin + custom_modules))
|
||||||
|
ansible.parsing.mod_args.BUILTIN_TASKS = frozenset(builtin)
|
||||||
|
mod_arg_parser = ModuleArgsParser(task)
|
||||||
|
try:
|
||||||
|
action, arguments, normalized['delegate_to'] = mod_arg_parser.parse()
|
||||||
|
except AnsibleParserError as e:
|
||||||
|
raise LaterAnsibleError("syntax error", e)
|
||||||
|
|
||||||
|
# denormalize shell -> command conversion
|
||||||
|
if '_uses_shell' in arguments:
|
||||||
|
action = 'shell'
|
||||||
|
del(arguments['_uses_shell'])
|
||||||
|
|
||||||
|
for (k, v) in list(task.items()):
|
||||||
|
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action:
|
||||||
|
# we don't want to re-assign these values, which were
|
||||||
|
# determined by the ModuleArgsParser() above
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
normalized[k] = v
|
||||||
|
|
||||||
|
normalized['action'] = dict(__ansible_module__=action)
|
||||||
|
|
||||||
|
if '_raw_params' in arguments:
|
||||||
|
normalized['action']['__ansible_arguments__'] = arguments['_raw_params'].split(' ')
|
||||||
|
del(arguments['_raw_params'])
|
||||||
|
else:
|
||||||
|
normalized['action']['__ansible_arguments__'] = list()
|
||||||
|
normalized['action'].update(arguments)
|
||||||
|
|
||||||
|
normalized[FILENAME_KEY] = filename
|
||||||
|
normalized['__ansible_action_type__'] = ansible_action_type
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
|
def action_tasks(yaml, file):
|
||||||
|
tasks = list()
|
||||||
|
if file['filetype'] in ['tasks', 'handlers']:
|
||||||
|
tasks = add_action_type(yaml, file['filetype'])
|
||||||
|
else:
|
||||||
|
tasks.extend(extract_from_list(yaml, ['tasks', 'handlers', 'pre_tasks', 'post_tasks']))
|
||||||
|
|
||||||
|
# Add sub-elements of block/rescue/always to tasks list
|
||||||
|
tasks.extend(extract_from_list(tasks, ['block', 'rescue', 'always']))
|
||||||
|
# Remove block/rescue/always elements from tasks list
|
||||||
|
block_rescue_always = ('block', 'rescue', 'always')
|
||||||
|
tasks[:] = [task for task in tasks if all(k not in task for k in block_rescue_always)]
|
||||||
|
|
||||||
|
return [task for task in tasks if set(
|
||||||
|
['include', 'include_tasks', 'import_playbook', 'import_tasks']).isdisjoint(task.keys())]
|
||||||
|
|
||||||
|
|
||||||
|
def task_to_str(task):
|
||||||
|
name = task.get("name")
|
||||||
|
if name:
|
||||||
|
return name
|
||||||
|
action = task.get("action")
|
||||||
|
args = " ".join([u"{0}={1}".format(k, v) for (k, v) in action.items()
|
||||||
|
if k not in ["__ansible_module__", "__ansible_arguments__"]
|
||||||
|
] + action.get("__ansible_arguments__"))
|
||||||
|
return u"{0} {1}".format(action["__ansible_module__"], args)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_from_list(blocks, candidates):
|
||||||
|
results = list()
|
||||||
|
for block in blocks:
|
||||||
|
for candidate in candidates:
|
||||||
|
if isinstance(block, dict) and candidate in block:
|
||||||
|
if isinstance(block[candidate], list):
|
||||||
|
results.extend(add_action_type(block[candidate], candidate))
|
||||||
|
elif block[candidate] is not None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Key '%s' defined, but bad value: '%s'" %
|
||||||
|
(candidate, str(block[candidate])))
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def add_action_type(actions, action_type):
|
||||||
|
results = list()
|
||||||
|
for action in actions:
|
||||||
|
action['__ansible_action_type__'] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
|
||||||
|
results.append(action)
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def parse_yaml_linenumbers(data, filename):
|
||||||
|
"""Parses yaml as ansible.utils.parse_yaml but with linenumbers.
|
||||||
|
|
||||||
|
The line numbers are stored in each node's LINE_NUMBER_KEY key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def compose_node(parent, index):
|
||||||
|
# the line number where the previous token has ended (plus empty lines)
|
||||||
|
line = loader.line
|
||||||
|
node = Composer.compose_node(loader, parent, index)
|
||||||
|
node.__line__ = line + 1
|
||||||
|
return node
|
||||||
|
|
||||||
|
def construct_mapping(node, deep=False):
|
||||||
|
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
|
||||||
|
if hasattr(node, '__line__'):
|
||||||
|
mapping[LINE_NUMBER_KEY] = node.__line__
|
||||||
|
else:
|
||||||
|
mapping[LINE_NUMBER_KEY] = mapping._line_number
|
||||||
|
mapping[FILENAME_KEY] = filename
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
try:
|
||||||
|
kwargs = {}
|
||||||
|
if 'vault_password' in inspect.getargspec(AnsibleLoader.__init__).args:
|
||||||
|
kwargs['vault_password'] = DEFAULT_VAULT_PASSWORD
|
||||||
|
loader = AnsibleLoader(data, **kwargs)
|
||||||
|
loader.compose_node = compose_node
|
||||||
|
loader.construct_mapping = construct_mapping
|
||||||
|
data = loader.get_single_data()
|
||||||
|
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
||||||
|
raise LaterError("syntax error", e)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def normalized_yaml(file, options):
|
||||||
|
lines = []
|
||||||
|
removes = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
with codecs.open(file, mode='rb', encoding='utf-8') as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
for line in lines:
|
||||||
|
if line.strip().startswith("#"):
|
||||||
|
removes.append(line)
|
||||||
|
# remove document starter also
|
||||||
|
if options.get("remove_markers") and line.strip() == "---":
|
||||||
|
removes.append(line)
|
||||||
|
# remove empty lines
|
||||||
|
if options.get("remove_empty") and not line.strip():
|
||||||
|
removes.append(line)
|
||||||
|
for line in removes:
|
||||||
|
lines.remove(line)
|
||||||
|
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
||||||
|
raise LaterError("syntax error", e)
|
||||||
|
return lines
|
10
setup.cfg
Normal file
10
setup.cfg
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
[metadata]
|
||||||
|
description-file = README.md
|
||||||
|
license_file = LICENSE
|
||||||
|
|
||||||
|
[bdist_wheel]
|
||||||
|
universal = 1
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
max-line-length = 100
|
||||||
|
exclude = .git,.hg,.svn,test,setup.py,__pycache__
|
76
setup.py
Normal file
76
setup.py
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
"""Setup script for the package."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import io
|
||||||
|
from setuptools import find_packages
|
||||||
|
from setuptools import setup
|
||||||
|
|
||||||
|
PACKAGE_NAME = "ansiblelater"
|
||||||
|
|
||||||
|
|
||||||
|
def get_property(prop, project):
|
||||||
|
current_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
result = re.search(
|
||||||
|
r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop),
|
||||||
|
open(os.path.join(current_dir, project, '__init__.py')).read())
|
||||||
|
return result.group(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_readme(filename='README.md'):
|
||||||
|
this = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
with io.open(os.path.join(this, filename), encoding='utf-8') as f:
|
||||||
|
long_description = f.read()
|
||||||
|
return long_description
|
||||||
|
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name=get_property("__project__", PACKAGE_NAME),
|
||||||
|
version=get_property("__version__", PACKAGE_NAME),
|
||||||
|
description=("Reviews ansible playbooks, roles and inventories and suggests improvements."),
|
||||||
|
keywords="ansible code review",
|
||||||
|
author=get_property("__author__", PACKAGE_NAME),
|
||||||
|
author_email=get_property("__email__", PACKAGE_NAME),
|
||||||
|
url="https://github.com/xoxys/ansible-later",
|
||||||
|
license=get_property("__license__", PACKAGE_NAME),
|
||||||
|
long_description=get_readme(),
|
||||||
|
long_description_content_type='text/markdown',
|
||||||
|
packages=find_packages(exclude=["test", "test.*"]),
|
||||||
|
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,,!=3.4.*',
|
||||||
|
classifiers=[
|
||||||
|
'Development Status :: 5 - Production/Stable',
|
||||||
|
'Environment :: Console',
|
||||||
|
'License :: OSI Approved :: MIT License',
|
||||||
|
'Intended Audience :: Developers',
|
||||||
|
'Intended Audience :: Information Technology',
|
||||||
|
'Intended Audience :: System Administrators',
|
||||||
|
'Natural Language :: English',
|
||||||
|
'Operating System :: POSIX',
|
||||||
|
'Programming Language :: Python :: 2',
|
||||||
|
'Programming Language :: Python :: 2.7',
|
||||||
|
'Programming Language :: Python :: 3',
|
||||||
|
'Programming Language :: Python :: 3.4',
|
||||||
|
'Programming Language :: Python :: 3.5',
|
||||||
|
'Programming Language :: Python :: 3.6',
|
||||||
|
'Programming Language :: Python :: 3.7',
|
||||||
|
'Topic :: Utilities',
|
||||||
|
],
|
||||||
|
include_package_data=True,
|
||||||
|
zip_safe=False,
|
||||||
|
install_requires=[
|
||||||
|
"ansible-lint>=3.4.1",
|
||||||
|
"pyyaml",
|
||||||
|
"appdirs",
|
||||||
|
"unidiff",
|
||||||
|
"flake8",
|
||||||
|
"yamllint",
|
||||||
|
"nested-lookup"
|
||||||
|
],
|
||||||
|
entry_points={
|
||||||
|
'console_scripts': [
|
||||||
|
'ansible-later = ansiblelater.__main__:main'
|
||||||
|
]
|
||||||
|
},
|
||||||
|
test_suite="test"
|
||||||
|
)
|
2
tests/config/config.ini
Normal file
2
tests/config/config.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[rules]
|
||||||
|
standards = tests/config
|
227
tests/config/standards.py
Normal file
227
tests/config/standards.py
Normal file
@ -0,0 +1,227 @@
|
|||||||
|
from ansiblelater import Standard
|
||||||
|
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_empty_lines
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_indent
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_hyphens
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_document_start
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_colons
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_file
|
||||||
|
from ansiblelater.rules.yamlfiles import check_yaml_has_content
|
||||||
|
from ansiblelater.rules.yamlfiles import check_native_yaml
|
||||||
|
from ansiblelater.rules.taskfiles import check_line_between_tasks
|
||||||
|
from ansiblelater.rules.rolefiles import check_meta_main
|
||||||
|
from ansiblelater.rules.rolefiles import check_scm_in_src
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_unique_named_task
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_named_task
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_name_format
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_braces_spaces
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_command_instead_of_module
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_install_use_latest
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_shell_instead_command
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_command_has_changes
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_empty_string_compare
|
||||||
|
from ansiblelater.rules.ansiblefiles import check_compare_to_literal_bool
|
||||||
|
|
||||||
|
|
||||||
|
tasks_should_be_separated = Standard(dict(
|
||||||
|
id="ANSIBLE0001",
|
||||||
|
name="Single tasks should be separated by empty line",
|
||||||
|
check=check_line_between_tasks,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
role_must_contain_meta_main = Standard(dict(
|
||||||
|
id="ANSIBLE0002",
|
||||||
|
name="Roles must contain suitable meta/main.yml",
|
||||||
|
check=check_meta_main,
|
||||||
|
version="0.1",
|
||||||
|
types=["meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
tasks_are_uniquely_named = Standard(dict(
|
||||||
|
id="ANSIBLE0003",
|
||||||
|
name="Tasks and handlers must be uniquely named within a single file",
|
||||||
|
check=check_unique_named_task,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"],
|
||||||
|
))
|
||||||
|
|
||||||
|
use_spaces_between_variable_braces = Standard(dict(
|
||||||
|
id="ANSIBLE0004",
|
||||||
|
name="YAML should use consistent number of spaces around variables",
|
||||||
|
check=check_braces_spaces,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
roles_scm_not_in_src = Standard(dict(
|
||||||
|
id="ANSIBLE0005",
|
||||||
|
name="Use scm key rather than src: scm+url",
|
||||||
|
check=check_scm_in_src,
|
||||||
|
version="0.1",
|
||||||
|
types=["rolesfile"]
|
||||||
|
))
|
||||||
|
|
||||||
|
tasks_are_named = Standard(dict(
|
||||||
|
id="ANSIBLE0006",
|
||||||
|
name="Tasks and handlers must be named",
|
||||||
|
check=check_named_task,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"],
|
||||||
|
))
|
||||||
|
|
||||||
|
tasks_names_are_formatted = Standard(dict(
|
||||||
|
id="ANSIBLE0007",
|
||||||
|
name="Name of tasks and handlers must be formatted",
|
||||||
|
check=check_name_format,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"],
|
||||||
|
))
|
||||||
|
|
||||||
|
commands_should_not_be_used_in_place_of_modules = Standard(dict(
|
||||||
|
id="ANSIBLE0008",
|
||||||
|
name="Commands should not be used in place of modules",
|
||||||
|
check=check_command_instead_of_module,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
package_installs_should_not_use_latest = Standard(dict(
|
||||||
|
id="ANSIBLE0009",
|
||||||
|
name="Package installs should use present, not latest",
|
||||||
|
check=check_install_use_latest,
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
use_shell_only_when_necessary = Standard(dict(
|
||||||
|
id="ANSIBLE0010",
|
||||||
|
name="Shell should only be used when essential",
|
||||||
|
check=check_shell_instead_command,
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
commands_should_be_idempotent = Standard(dict(
|
||||||
|
id="ANSIBLE0011",
|
||||||
|
name="Commands should be idempotent",
|
||||||
|
check=check_command_has_changes,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task"]
|
||||||
|
))
|
||||||
|
|
||||||
|
dont_compare_to_empty_string = Standard(dict(
|
||||||
|
id="ANSIBLE0012",
|
||||||
|
name="Don't compare to \"\" - use `when: var` or `when: not var`",
|
||||||
|
check=check_empty_string_compare,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "template"]
|
||||||
|
))
|
||||||
|
|
||||||
|
dont_compare_to_literal_bool = Standard(dict(
|
||||||
|
id="ANSIBLE0013",
|
||||||
|
name="Don't compare to True or False - use `when: var` or `when: not var`",
|
||||||
|
check=check_compare_to_literal_bool,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "template"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_not_contain_unnecessarily_empty_lines = Standard(dict(
|
||||||
|
id="LINT0001",
|
||||||
|
name="YAML should not contain unnecessarily empty lines",
|
||||||
|
check=check_yaml_empty_lines,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_be_indented = Standard(dict(
|
||||||
|
id="LINT0002",
|
||||||
|
name="YAML should be correctly indented",
|
||||||
|
check=check_yaml_indent,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_use_consistent_spaces_after_hyphens = Standard(dict(
|
||||||
|
id="LINT0003",
|
||||||
|
name="YAML should use consistent number of spaces after hyphens",
|
||||||
|
check=check_yaml_hyphens,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_contain_document_start_marker = Standard(dict(
|
||||||
|
id="LINT0004",
|
||||||
|
name="YAML should contain document start marker",
|
||||||
|
check=check_yaml_document_start,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
spaces_around_colons = Standard(dict(
|
||||||
|
id="LINT0005",
|
||||||
|
name="YAML should use consistent number of spaces around colons",
|
||||||
|
check=check_yaml_colons,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars",
|
||||||
|
"hostvars", "groupvars", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
rolesfile_should_be_in_yaml = Standard(dict(
|
||||||
|
id="LINT0006",
|
||||||
|
name="Roles file should be in yaml format",
|
||||||
|
check=check_yaml_file,
|
||||||
|
version="0.1",
|
||||||
|
types=["rolesfile"]
|
||||||
|
))
|
||||||
|
|
||||||
|
files_should_not_be_purposeless = Standard(dict(
|
||||||
|
id="LINT0007",
|
||||||
|
name="Files should contain useful content",
|
||||||
|
check=check_yaml_has_content,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler", "rolevars", "defaults", "meta"]
|
||||||
|
))
|
||||||
|
|
||||||
|
use_yaml_rather_than_key_value = Standard(dict(
|
||||||
|
id="LINT0008",
|
||||||
|
name="Use YAML format for tasks and handlers rather than key=value",
|
||||||
|
check=check_native_yaml,
|
||||||
|
version="0.1",
|
||||||
|
types=["playbook", "task", "handler"]
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
ansible_min_version = '2.1'
|
||||||
|
ansible_review_min_version = '0.1.2'
|
||||||
|
|
||||||
|
|
||||||
|
standards = [
|
||||||
|
# Ansible
|
||||||
|
tasks_should_be_separated,
|
||||||
|
role_must_contain_meta_main,
|
||||||
|
tasks_are_uniquely_named,
|
||||||
|
use_spaces_between_variable_braces,
|
||||||
|
roles_scm_not_in_src,
|
||||||
|
tasks_are_named,
|
||||||
|
tasks_names_are_formatted,
|
||||||
|
commands_should_not_be_used_in_place_of_modules,
|
||||||
|
package_installs_should_not_use_latest,
|
||||||
|
use_shell_only_when_necessary,
|
||||||
|
commands_should_be_idempotent,
|
||||||
|
dont_compare_to_empty_string,
|
||||||
|
dont_compare_to_literal_bool,
|
||||||
|
# Lint
|
||||||
|
files_should_not_contain_unnecessarily_empty_lines,
|
||||||
|
files_should_be_indented,
|
||||||
|
files_should_use_consistent_spaces_after_hyphens,
|
||||||
|
files_should_contain_document_start_marker,
|
||||||
|
spaces_around_colons,
|
||||||
|
rolesfile_should_be_in_yaml,
|
||||||
|
files_should_not_be_purposeless,
|
||||||
|
use_yaml_rather_than_key_value,
|
||||||
|
]
|
5
tests/data/yaml_fail.yml
Normal file
5
tests/data/yaml_fail.yml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
- start:
|
||||||
|
- overindented
|
||||||
|
- misaligned
|
||||||
|
- next:
|
||||||
|
- underindented
|
17
tests/data/yaml_success.yml
Normal file
17
tests/data/yaml_success.yml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# Standards: 0.1
|
||||||
|
---
|
||||||
|
- block:
|
||||||
|
- name: hello
|
||||||
|
command: echo hello
|
||||||
|
|
||||||
|
- name: task2
|
||||||
|
debug:
|
||||||
|
msg: hello
|
||||||
|
when: some_var_is_true
|
||||||
|
|
||||||
|
- name: another task
|
||||||
|
debug:
|
||||||
|
msg: another msg
|
||||||
|
|
||||||
|
- fail:
|
||||||
|
msg: this is actually valid indentation
|
5
tox-requirements.txt
Normal file
5
tox-requirements.txt
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
flake8
|
||||||
|
pep8-naming
|
||||||
|
wheel
|
||||||
|
flake8-colors
|
||||||
|
pytest
|
41
tox.ini
Normal file
41
tox.ini
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
[tox]
|
||||||
|
minversion = 1.6
|
||||||
|
envlist = py27-ansible{21},py{27,36}-ansible{22,23,24,25,26,27},py{27,36}-flake8
|
||||||
|
|
||||||
|
[testenv]
|
||||||
|
deps =
|
||||||
|
-rtox-requirements.txt
|
||||||
|
ansible21: ansible>=2.1,<2.2
|
||||||
|
ansible22: ansible>=2.2,<2.3
|
||||||
|
ansible23: ansible>=2.3,<2.4
|
||||||
|
ansible24: ansible>=2.4,<2.5
|
||||||
|
ansible25: ansible>=2.5,<2.6
|
||||||
|
ansible26: ansible>=2.6,<2.7
|
||||||
|
ansible27: ansible>=2.7,<2.8
|
||||||
|
|
||||||
|
commands = ansible-later -c tests/config/config.ini tests/data/yaml_success.yml
|
||||||
|
passenv = HOME
|
||||||
|
|
||||||
|
[testenv:py27-flake8]
|
||||||
|
commands = flake8 ansiblelater
|
||||||
|
usedevelop = True
|
||||||
|
|
||||||
|
[testenv:py36-flake8]
|
||||||
|
commands = flake8 ansiblelater
|
||||||
|
usedevelop = True
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
ignore = E501, W503, F401
|
||||||
|
exclude =
|
||||||
|
.tox,
|
||||||
|
.git,
|
||||||
|
__pycache__,
|
||||||
|
build,
|
||||||
|
dist,
|
||||||
|
tests/fixtures/*,
|
||||||
|
*.pyc,
|
||||||
|
*.egg-info,
|
||||||
|
.cache,
|
||||||
|
.eggs
|
||||||
|
|
||||||
|
format = %(path)s:%(row)d:%(col)d: ${red}%(code)s %(text)s${reset}
|
Loading…
Reference in New Issue
Block a user