mirror of
https://github.com/thegeeklab/ansible-later.git
synced 2024-11-25 22:30:42 +00:00
refactor logging and candidates
This commit is contained in:
parent
592da84e85
commit
be74192c67
@ -8,327 +8,6 @@ __maintainer__ = "Robert Kaussow"
|
||||
__email__ = "mail@geeklabor.de"
|
||||
__status__ = "Production"
|
||||
|
||||
import codecs
|
||||
import os
|
||||
import re
|
||||
from distutils.version import LooseVersion
|
||||
from ansiblelater import logger
|
||||
|
||||
import ansible
|
||||
|
||||
from ansiblelater.utils import (get_property, is_line_in_ranges, lines_ranges,
|
||||
read_standards, standards_latest)
|
||||
from ansiblelater.exceptions import ( # noqa
|
||||
LaterError, LaterAnsibleError
|
||||
)
|
||||
|
||||
try:
|
||||
# Ansible 2.4 import of module loader
|
||||
from ansible.plugins.loader import module_loader
|
||||
except ImportError:
|
||||
try:
|
||||
from ansible.plugins import module_loader
|
||||
except ImportError:
|
||||
from ansible.utils import module_finder as module_loader
|
||||
|
||||
|
||||
class Standard(object):
|
||||
"""
|
||||
Standard definition for all defined rules.
|
||||
|
||||
Later lookup the config file for a path to a rules directory
|
||||
or fallback to default `ansiblelater/data/*`.
|
||||
"""
|
||||
|
||||
def __init__(self, standard_dict):
|
||||
"""
|
||||
Initialize a new standard object and returns None.
|
||||
|
||||
:param standard_dict: Dictionary object containing all neseccary attributes
|
||||
|
||||
"""
|
||||
if "id" not in standard_dict:
|
||||
standard_dict.update(id="")
|
||||
else:
|
||||
standard_dict.update(id="[{}] ".format(standard_dict.get("id")))
|
||||
|
||||
self.id = standard_dict.get("id")
|
||||
self.name = standard_dict.get("name")
|
||||
self.version = standard_dict.get("version")
|
||||
self.check = standard_dict.get("check")
|
||||
self.types = standard_dict.get("types")
|
||||
|
||||
def __repr__(self): # noqa
|
||||
return "Standard: %s (version: %s, types: %s)" % (
|
||||
self.name, self.version, self.types)
|
||||
|
||||
|
||||
class Candidate(object):
|
||||
"""
|
||||
Meta object for all files which later has to process.
|
||||
|
||||
Each file passed to later will be classified by type and
|
||||
bundled with necessary meta informations for rule processing.
|
||||
"""
|
||||
|
||||
def __init__(self, filename):
|
||||
self.path = filename
|
||||
self.binary = False
|
||||
self.vault = False
|
||||
|
||||
try:
|
||||
self.version = find_version(filename)
|
||||
with codecs.open(filename, mode="rb", encoding="utf-8") as f:
|
||||
if f.readline().startswith("$ANSIBLE_VAULT"):
|
||||
self.vault = True
|
||||
except UnicodeDecodeError:
|
||||
self.binary = True
|
||||
|
||||
self.filetype = type(self).__name__.lower()
|
||||
self.expected_version = True
|
||||
|
||||
def review(self, settings, lines=None):
|
||||
return candidate_review(self, settings, lines)
|
||||
|
||||
def __repr__(self): # noqa
|
||||
return "%s (%s)" % (type(self).__name__, self.path)
|
||||
|
||||
def __getitem__(self, item): # noqa
|
||||
return self.__dict__.get(item)
|
||||
|
||||
|
||||
class Error(object):
|
||||
"""Default error object created if a rule failed."""
|
||||
|
||||
def __init__(self, lineno, message):
|
||||
"""
|
||||
Initialize a new error object and returns None.
|
||||
|
||||
:param lineno: Line number where the error from de rule occures
|
||||
:param message: Detailed error description provided by the rule
|
||||
|
||||
"""
|
||||
self.lineno = lineno
|
||||
self.message = message
|
||||
|
||||
def __repr__(self): # noqa
|
||||
if self.lineno:
|
||||
return "%s: %s" % (self.lineno, self.message)
|
||||
else:
|
||||
return " %s" % (self.message)
|
||||
|
||||
|
||||
class Result(object):
|
||||
def __init__(self, candidate, errors=None):
|
||||
self.candidate = candidate
|
||||
self.errors = errors or []
|
||||
|
||||
def message(self):
|
||||
return "\n".join(["{0}:{1}".format(self.candidate, error)
|
||||
for error in self.errors])
|
||||
|
||||
|
||||
class RoleFile(Candidate):
|
||||
def __init__(self, filename):
|
||||
super(RoleFile, self).__init__(filename)
|
||||
self.version = None
|
||||
parentdir = os.path.dirname(os.path.abspath(filename))
|
||||
while parentdir != os.path.dirname(parentdir):
|
||||
meta_file = os.path.join(parentdir, "meta", "main.yml")
|
||||
if os.path.exists(meta_file):
|
||||
self.version = find_version(meta_file)
|
||||
if self.version:
|
||||
break
|
||||
parentdir = os.path.dirname(parentdir)
|
||||
role_modules = os.path.join(parentdir, "library")
|
||||
if os.path.exists(role_modules):
|
||||
module_loader.add_directory(role_modules)
|
||||
|
||||
|
||||
class Playbook(Candidate):
|
||||
pass
|
||||
|
||||
|
||||
class Task(RoleFile):
|
||||
def __init__(self, filename):
|
||||
super(Task, self).__init__(filename)
|
||||
self.filetype = "tasks"
|
||||
|
||||
|
||||
class Handler(RoleFile):
|
||||
def __init__(self, filename):
|
||||
super(Handler, self).__init__(filename)
|
||||
self.filetype = "handlers"
|
||||
|
||||
|
||||
class Vars(Candidate):
|
||||
pass
|
||||
|
||||
|
||||
class Unversioned(Candidate):
|
||||
def __init__(self, filename):
|
||||
super(Unversioned, self).__init__(filename)
|
||||
self.expected_version = False
|
||||
|
||||
|
||||
class InventoryVars(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class HostVars(InventoryVars):
|
||||
pass
|
||||
|
||||
|
||||
class GroupVars(InventoryVars):
|
||||
pass
|
||||
|
||||
|
||||
class RoleVars(RoleFile):
|
||||
pass
|
||||
|
||||
|
||||
class Meta(RoleFile):
|
||||
pass
|
||||
|
||||
|
||||
class Inventory(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class Code(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class Template(RoleFile):
|
||||
pass
|
||||
|
||||
|
||||
class Doc(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class Makefile(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class File(RoleFile):
|
||||
pass
|
||||
|
||||
|
||||
class Rolesfile(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
def classify(filename):
|
||||
parentdir = os.path.basename(os.path.dirname(filename))
|
||||
|
||||
if parentdir in ["tasks"]:
|
||||
return Task(filename)
|
||||
if parentdir in ["handlers"]:
|
||||
return Handler(filename)
|
||||
if parentdir in ["vars", "defaults"]:
|
||||
return RoleVars(filename)
|
||||
if "group_vars" in filename.split(os.sep):
|
||||
return GroupVars(filename)
|
||||
if "host_vars" in filename.split(os.sep):
|
||||
return HostVars(filename)
|
||||
if parentdir in ["meta"]:
|
||||
return Meta(filename)
|
||||
if parentdir in ["library", "lookup_plugins", "callback_plugins",
|
||||
"filter_plugins"] or filename.endswith(".py"):
|
||||
return Code(filename)
|
||||
if "inventory" in filename or "hosts" in filename or parentdir in ["inventory"]:
|
||||
return Inventory(filename)
|
||||
if "rolesfile" in filename or "requirements" in filename:
|
||||
return Rolesfile(filename)
|
||||
if "Makefile" in filename:
|
||||
return Makefile(filename)
|
||||
if "templates" in filename.split(os.sep) or filename.endswith(".j2"):
|
||||
return Template(filename)
|
||||
if "files" in filename.split(os.sep):
|
||||
return File(filename)
|
||||
if filename.endswith(".yml") or filename.endswith(".yaml"):
|
||||
return Playbook(filename)
|
||||
if "README" in filename:
|
||||
return Doc(filename)
|
||||
return None
|
||||
|
||||
|
||||
def candidate_review(candidate, settings, lines=None):
|
||||
errors = 0
|
||||
standards = read_standards(settings)
|
||||
if getattr(standards, "ansible_min_version", None) and \
|
||||
LooseVersion(standards.ansible_min_version) > LooseVersion(ansible.__version__):
|
||||
raise SystemExit("Standards require ansible version %s (current version %s). "
|
||||
"Please upgrade ansible." %
|
||||
(standards.ansible_min_version, ansible.__version__))
|
||||
|
||||
if getattr(standards, "ansible_review_min_version", None) and \
|
||||
LooseVersion(standards.ansible_review_min_version) > LooseVersion(
|
||||
get_property("__version__")):
|
||||
raise SystemExit("Standards require ansible-later version %s (current version %s). "
|
||||
"Please upgrade ansible-later." %
|
||||
(standards.ansible_review_min_version, get_property("__version__")))
|
||||
|
||||
if not candidate.version:
|
||||
candidate.version = standards_latest(standards.standards)
|
||||
if candidate.expected_version:
|
||||
if isinstance(candidate, RoleFile):
|
||||
logger.warn("%s %s is in a role that contains a meta/main.yml without a declared "
|
||||
"standards version. "
|
||||
"Using latest standards version %s" %
|
||||
(type(candidate).__name__, candidate.path, candidate.version),
|
||||
settings)
|
||||
else:
|
||||
logger.warn("%s %s does not present standards version. "
|
||||
"Using latest standards version %s" %
|
||||
(type(candidate).__name__, candidate.path, candidate.version),
|
||||
settings)
|
||||
|
||||
info("%s %s declares standards version %s" %
|
||||
(type(candidate).__name__, candidate.path, candidate.version),
|
||||
settings)
|
||||
|
||||
for standard in standards.standards:
|
||||
print(type(standard))
|
||||
if type(candidate).__name__.lower() not in standard.types:
|
||||
continue
|
||||
if settings.standards_filter and standard.name not in settings.standards_filter:
|
||||
continue
|
||||
result = standard.check(candidate, settings)
|
||||
|
||||
if not result:
|
||||
abort("Standard '%s' returns an empty result object." %
|
||||
(standard.check.__name__))
|
||||
|
||||
for err in [err for err in result.errors
|
||||
if not err.lineno or is_line_in_ranges(err.lineno, lines_ranges(lines))]:
|
||||
if not standard.version:
|
||||
warn("{id}Best practice '{name}' not met:\n{path}:{error}".format(
|
||||
id=standard.id, name=standard.name, path=candidate.path, error=err), settings)
|
||||
elif LooseVersion(standard.version) > LooseVersion(candidate.version):
|
||||
warn("{id}Future standard '{name}' not met:\n{path}:{error}".format(
|
||||
id=standard.id, name=standard.name, path=candidate.path, error=err), settings)
|
||||
else:
|
||||
error("{id}Standard '{name}' not met:\n{path}:{error}".format(
|
||||
id=standard.id, name=standard.name, path=candidate.path, error=err))
|
||||
errors = errors + 1
|
||||
if not result.errors:
|
||||
if not standard.version:
|
||||
info("Best practice '%s' met" % standard.name, settings)
|
||||
elif LooseVersion(standard.version) > LooseVersion(candidate.version):
|
||||
info("Future standard '%s' met" % standard.name, settings)
|
||||
else:
|
||||
info("Standard '%s' met" % standard.name, settings)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def find_version(filename, version_regex=r"^# Standards:\s*([\d.]+)"):
|
||||
version_re = re.compile(version_regex)
|
||||
|
||||
with codecs.open(filename, mode="rb", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
match = version_re.match(line)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
LOG = logger.get_logger("ansiblelater")
|
||||
|
@ -3,61 +3,55 @@
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ansiblelater import __version__
|
||||
from ansiblelater import LOG
|
||||
from ansiblelater.command import base
|
||||
from ansiblelater.command import candidates
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Validate ansible files against best pratice guideline")
|
||||
parser.add_argument("-c", dest="config_file",
|
||||
parser.add_argument("-c", "--config", dest="config_file",
|
||||
help="Location of configuration file")
|
||||
parser.add_argument("-d", dest="rules.standards",
|
||||
parser.add_argument("-r", "--rules", dest="rules.standards",
|
||||
help="Location of standards rules")
|
||||
parser.add_argument("-q", dest="logging.level", action="store_const",
|
||||
parser.add_argument("-q", "--quiet", dest="logging.level", action="store_const",
|
||||
const=logging.ERROR, help="Only output errors")
|
||||
parser.add_argument("-s", dest="rules.filter", action="append",
|
||||
parser.add_argument("-s", "--standards", dest="rules.filter", action="append",
|
||||
help="limit standards to specific names")
|
||||
parser.add_argument("-v", "--verbose", dest="logging.level", action="count",
|
||||
parser.add_argument("-v", dest="logging.level", action="count",
|
||||
help="Show more verbose output")
|
||||
parser.add_argument("rules.files", nargs="*")
|
||||
parser.add_argument("--version", action="version", version="%(prog)s {}".format(__version__))
|
||||
|
||||
args = parser.parse_args().__dict__
|
||||
|
||||
settings = base.get_settings(args)
|
||||
print(json.dumps(settings.config, indent=4, sort_keys=True))
|
||||
# print(json.dumps(settings.config, indent=4, sort_keys=True))
|
||||
LOG.setLevel(settings.config["logging"]["level"])
|
||||
files = settings.config["rules"]["files"]
|
||||
standards = base.get_standards(settings.config["rules"]["standards"])
|
||||
|
||||
# if len(args) == 0:
|
||||
# candidates = []
|
||||
# for root, dirs, files in os.walk("."):
|
||||
# for filename in files:
|
||||
# candidates.append(os.path.join(root, filename))
|
||||
# else:
|
||||
# candidates = args
|
||||
|
||||
# errors = 0
|
||||
# for filename in candidates:
|
||||
# if ":" in filename:
|
||||
# (filename, lines) = filename.split(":")
|
||||
# else:
|
||||
# lines = None
|
||||
# candidate = classify(filename)
|
||||
# if candidate:
|
||||
# if candidate.binary:
|
||||
# info("Not reviewing binary file %s" % filename, settings)
|
||||
# continue
|
||||
# if candidate.vault:
|
||||
# info("Not reviewing vault file %s" % filename, settings)
|
||||
# continue
|
||||
# if lines:
|
||||
# info("Reviewing %s lines %s" % (candidate, lines), settings)
|
||||
# else:
|
||||
# info("Reviewing all of %s" % candidate, settings)
|
||||
# errors = errors + candidate.review(settings, lines)
|
||||
# else:
|
||||
# info("Couldn't classify file %s" % filename, settings)
|
||||
errors = []
|
||||
for filename in files:
|
||||
lines = None
|
||||
candidate = candidates.classify(filename, settings, standards)
|
||||
if candidate:
|
||||
if candidate.binary:
|
||||
LOG.info("Not reviewing binary file %s" % filename)
|
||||
continue
|
||||
if candidate.vault:
|
||||
LOG.info("Not reviewing vault file %s" % filename)
|
||||
continue
|
||||
if lines:
|
||||
LOG.info("Reviewing %s lines %s" % (candidate, lines))
|
||||
else:
|
||||
LOG.info("Reviewing all of %s" % candidate)
|
||||
errors = errors + candidate.review(settings, lines)
|
||||
else:
|
||||
LOG.info("Couldn't classify file %s" % filename)
|
||||
# return errors
|
||||
|
||||
|
||||
|
@ -1,9 +1,49 @@
|
||||
from ansiblelater import settings
|
||||
"""Base methods."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import importlib
|
||||
|
||||
import ansible
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from ansiblelater import settings
|
||||
from ansiblelater import utils
|
||||
|
||||
def get_settings(args):
|
||||
"""
|
||||
Get new settings object.
|
||||
|
||||
:param args: cli args from argparse
|
||||
:returns: Settings object
|
||||
|
||||
"""
|
||||
config = settings.Settings(
|
||||
args=args,
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def get_standards(filepath):
|
||||
sys.path.append(os.path.abspath(os.path.expanduser(filepath)))
|
||||
try:
|
||||
standards = importlib.import_module('standards')
|
||||
except ImportError as e:
|
||||
utils.sysexit_with_message("Could not import standards from directory %s: %s" % (filepath, str(e)))
|
||||
|
||||
if getattr(standards, "ansible_min_version", None) and \
|
||||
LooseVersion(standards.ansible_min_version) > LooseVersion(ansible.__version__):
|
||||
utils.sysexit_with_message("Standards require ansible version %s (current version %s). "
|
||||
"Please upgrade ansible." %
|
||||
(standards.ansible_min_version, ansible.__version__))
|
||||
|
||||
if getattr(standards, "ansible_review_min_version", None) and \
|
||||
LooseVersion(standards.ansible_review_min_version) > LooseVersion(
|
||||
utils.get_property("__version__")):
|
||||
utils.sysexit_with_message("Standards require ansible-later version %s (current version %s). "
|
||||
"Please upgrade ansible-later." %
|
||||
(standards.ansible_review_min_version, utils.get_property("__version__")))
|
||||
|
||||
return standards.standards
|
||||
|
260
ansiblelater/command/candidates.py
Normal file
260
ansiblelater/command/candidates.py
Normal file
@ -0,0 +1,260 @@
|
||||
"""Candidate module."""
|
||||
|
||||
|
||||
import codecs
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
import ansible
|
||||
|
||||
from ansiblelater import LOG
|
||||
from ansiblelater.utils import (get_property, is_line_in_ranges, lines_ranges, standards_latest)
|
||||
from ansiblelater.exceptions import ( # noqa
|
||||
LaterError, LaterAnsibleError
|
||||
)
|
||||
|
||||
try:
|
||||
# Ansible 2.4 import of module loader
|
||||
from ansible.plugins.loader import module_loader
|
||||
except ImportError:
|
||||
try:
|
||||
from ansible.plugins import module_loader
|
||||
except ImportError:
|
||||
from ansible.utils import module_finder as module_loader
|
||||
|
||||
|
||||
class Candidate(object):
|
||||
"""
|
||||
Meta object for all files which later has to process.
|
||||
|
||||
Each file passed to later will be classified by type and
|
||||
bundled with necessary meta informations for rule processing.
|
||||
"""
|
||||
|
||||
def __init__(self, filename, settings={}, standards=[]):
|
||||
self.path = filename
|
||||
self.binary = False
|
||||
self.vault = False
|
||||
self.standards = standards
|
||||
self.filetype = type(self).__name__.lower()
|
||||
self.expected_version = True
|
||||
self.version = self._find_version(settings)
|
||||
|
||||
try:
|
||||
with codecs.open(filename, mode="rb", encoding="utf-8") as f:
|
||||
if f.readline().startswith("$ANSIBLE_VAULT"):
|
||||
self.vault = True
|
||||
except UnicodeDecodeError:
|
||||
self.binary = True
|
||||
|
||||
def _find_version(self, settings):
|
||||
if isinstance(self, RoleFile):
|
||||
parentdir = os.path.dirname(os.path.abspath(self.path))
|
||||
while parentdir != os.path.dirname(parentdir):
|
||||
meta_file = os.path.join(parentdir, "meta", "main.yml")
|
||||
if os.path.exists(meta_file):
|
||||
path = meta_file
|
||||
parentdir = os.path.dirname(parentdir)
|
||||
else:
|
||||
path = self.path
|
||||
|
||||
version = None
|
||||
version_re = re.compile(r"^# Standards:\s*([\d.]+)")
|
||||
|
||||
with codecs.open(path, mode="rb", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
match = version_re.match(line)
|
||||
if match:
|
||||
version = match.group(1)
|
||||
|
||||
if not version:
|
||||
version = standards_latest(self.standards)
|
||||
if self.expected_version:
|
||||
if isinstance(self, RoleFile):
|
||||
LOG.warn("%s %s is in a role that contains a meta/main.yml without a declared "
|
||||
"standards version. "
|
||||
"Using latest standards version %s" %
|
||||
(type(self).__name__, self.path, version))
|
||||
else:
|
||||
LOG.warn("%s %s does not present standards version. "
|
||||
"Using latest standards version %s" %
|
||||
(type(self).__name__, self.path, version))
|
||||
|
||||
LOG.info("%s %s declares standards version %s" %
|
||||
(type(self).__name__, self.path, version))
|
||||
|
||||
return version
|
||||
|
||||
def review(self, settings, lines=None):
|
||||
errors = 0
|
||||
|
||||
for standard in standards.standards:
|
||||
print(type(standard))
|
||||
if type(candidate).__name__.lower() not in standard.types:
|
||||
continue
|
||||
if settings.standards_filter and standard.name not in settings.standards_filter:
|
||||
continue
|
||||
result = standard.check(candidate, settings)
|
||||
|
||||
if not result:
|
||||
abort("Standard '%s' returns an empty result object." %
|
||||
(standard.check.__name__))
|
||||
|
||||
for err in [err for err in result.errors
|
||||
if not err.lineno or is_line_in_ranges(err.lineno, lines_ranges(lines))]:
|
||||
if not standard.version:
|
||||
warn("{id}Best practice '{name}' not met:\n{path}:{error}".format(
|
||||
id=standard.id, name=standard.name, path=candidate.path, error=err), settings)
|
||||
elif LooseVersion(standard.version) > LooseVersion(candidate.version):
|
||||
warn("{id}Future standard '{name}' not met:\n{path}:{error}".format(
|
||||
id=standard.id, name=standard.name, path=candidate.path, error=err), settings)
|
||||
else:
|
||||
error("{id}Standard '{name}' not met:\n{path}:{error}".format(
|
||||
id=standard.id, name=standard.name, path=candidate.path, error=err))
|
||||
errors = errors + 1
|
||||
if not result.errors:
|
||||
if not standard.version:
|
||||
info("Best practice '%s' met" % standard.name, settings)
|
||||
elif LooseVersion(standard.version) > LooseVersion(candidate.version):
|
||||
info("Future standard '%s' met" % standard.name, settings)
|
||||
else:
|
||||
info("Standard '%s' met" % standard.name, settings)
|
||||
|
||||
return errors
|
||||
|
||||
def __repr__(self): # noqa
|
||||
return "%s (%s)" % (type(self).__name__, self.path)
|
||||
|
||||
def __getitem__(self, item): # noqa
|
||||
return self.__dict__.get(item)
|
||||
|
||||
|
||||
class RoleFile(Candidate):
|
||||
def __init__(self, filename, settings={}, standards=[]):
|
||||
super(RoleFile, self).__init__(filename, settings, standards)
|
||||
self.version = None
|
||||
# parentdir = os.path.dirname(os.path.abspath(filename))
|
||||
# while parentdir != os.path.dirname(parentdir):
|
||||
# meta_file = os.path.join(parentdir, "meta", "main.yml")
|
||||
# if os.path.exists(meta_file):
|
||||
# self.version = self._find_version(meta_file)
|
||||
# if self.version:
|
||||
# break
|
||||
|
||||
# role_modules = os.path.join(parentdir, "library")
|
||||
# if os.path.exists(role_modules):
|
||||
# module_loader.add_directory(role_modules)
|
||||
|
||||
|
||||
class Playbook(Candidate):
|
||||
pass
|
||||
|
||||
|
||||
class Task(RoleFile):
|
||||
def __init__(self, filename, settings={}, standards=[]):
|
||||
super(Task, self).__init__(filename, settings, standards)
|
||||
self.filetype = "tasks"
|
||||
|
||||
|
||||
class Handler(RoleFile):
|
||||
def __init__(self, filename, settings={}, standards=[]):
|
||||
super(Handler, self).__init__(filename, settings, standards)
|
||||
self.filetype = "handlers"
|
||||
|
||||
|
||||
class Vars(Candidate):
|
||||
pass
|
||||
|
||||
|
||||
class Unversioned(Candidate):
|
||||
def __init__(self, filename, settings={}, standards=[]):
|
||||
super(Unversioned, self).__init__(filename, settings, standards)
|
||||
self.expected_version = False
|
||||
|
||||
|
||||
class InventoryVars(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class HostVars(InventoryVars):
|
||||
pass
|
||||
|
||||
|
||||
class GroupVars(InventoryVars):
|
||||
pass
|
||||
|
||||
|
||||
class RoleVars(RoleFile):
|
||||
pass
|
||||
|
||||
|
||||
class Meta(RoleFile):
|
||||
pass
|
||||
|
||||
|
||||
class Inventory(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class Code(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class Template(RoleFile):
|
||||
pass
|
||||
|
||||
|
||||
class Doc(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class Makefile(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
class File(RoleFile):
|
||||
pass
|
||||
|
||||
|
||||
class Rolesfile(Unversioned):
|
||||
pass
|
||||
|
||||
|
||||
def classify(filename, settings={}, standards=[]):
|
||||
parentdir = os.path.basename(os.path.dirname(filename))
|
||||
basename = os.path.basename(filename)
|
||||
|
||||
if parentdir in ["tasks"]:
|
||||
return Task(filename, settings, standards)
|
||||
if parentdir in ["handlers"]:
|
||||
return Handler(filename, settings, standards)
|
||||
if parentdir in ["vars", "defaults"]:
|
||||
return RoleVars(filename, settings, standards)
|
||||
if "group_vars" in filename.split(os.sep):
|
||||
return GroupVars(filename, settings, standards)
|
||||
if "host_vars" in filename.split(os.sep):
|
||||
return HostVars(filename, settings, standards)
|
||||
if parentdir in ["meta"]:
|
||||
return Meta(filename, settings, standards)
|
||||
if parentdir in ["library", "lookup_plugins", "callback_plugins",
|
||||
"filter_plugins"] or filename.endswith(".py"):
|
||||
return Code(filename, settings, standards)
|
||||
if "inventory" in basename or "hosts" in basename or parentdir in ["inventory"]:
|
||||
print("hosts" in filename)
|
||||
return Inventory(filename, settings, standards)
|
||||
if "rolesfile" in basename or "requirements" in basename:
|
||||
return Rolesfile(filename, settings, standards)
|
||||
if "Makefile" in basename:
|
||||
return Makefile(filename, settings, standards)
|
||||
if "templates" in filename.split(os.sep) or basename.endswith(".j2"):
|
||||
return Template(filename, settings, standards)
|
||||
if "files" in filename.split(os.sep):
|
||||
return File(filename, settings, standards)
|
||||
if basename.endswith(".yml") or basename.endswith(".yaml"):
|
||||
return Playbook(filename, settings, standards)
|
||||
if "README" in basename:
|
||||
return Doc(filename, settings, standards)
|
||||
return None
|
||||
|
37
ansiblelater/command/review.py
Normal file
37
ansiblelater/command/review.py
Normal file
@ -0,0 +1,37 @@
|
||||
"""Review candidates."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
class Error(object):
|
||||
"""Default error object created if a rule failed."""
|
||||
|
||||
def __init__(self, lineno, message):
|
||||
"""
|
||||
Initialize a new error object and returns None.
|
||||
|
||||
:param lineno: Line number where the error from de rule occures
|
||||
:param message: Detailed error description provided by the rule
|
||||
|
||||
"""
|
||||
self.lineno = lineno
|
||||
self.message = message
|
||||
|
||||
def __repr__(self): # noqa
|
||||
if self.lineno:
|
||||
return "%s: %s" % (self.lineno, self.message)
|
||||
else:
|
||||
return " %s" % (self.message)
|
||||
|
||||
|
||||
class Result(object):
|
||||
def __init__(self, candidate, errors=None):
|
||||
self.candidate = candidate
|
||||
self.errors = errors or []
|
||||
|
||||
def message(self):
|
||||
return "\n".join(["{0}:{1}".format(self.candidate, error)
|
||||
for error in self.errors])
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
from ansiblelater import Standard
|
||||
from ansiblelater.standard import Standard
|
||||
|
||||
from ansiblelater.rules.yamlfiles import check_yaml_empty_lines
|
||||
from ansiblelater.rules.yamlfiles import check_yaml_indent
|
||||
|
@ -54,6 +54,7 @@ def get_logger(name=None, level=logging.DEBUG, json=False):
|
||||
logger.addHandler(_get_error_handler(json=json))
|
||||
logger.addHandler(_get_warn_handler(json=json))
|
||||
logger.addHandler(_get_info_handler(json=json))
|
||||
logger.addHandler(_get_critical_handler(json=json))
|
||||
logger.propagate = False
|
||||
|
||||
return logger
|
||||
@ -95,8 +96,20 @@ def _get_info_handler(json=False):
|
||||
return handler
|
||||
|
||||
|
||||
def abort(message):
|
||||
"""Format abort messages and return string."""
|
||||
def _get_critical_handler(json=False):
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setLevel(logging.CRITICAL)
|
||||
handler.addFilter(LogFilter(logging.CRITICAL))
|
||||
handler.setFormatter(logging.Formatter(critical("%(message)s")))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(jsonlogger.JsonFormatter("%(message)s"))
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
def critical(message):
|
||||
"""Format critical messages and return string."""
|
||||
return color_text(colorama.Fore.RED, "FATAL: {}".format(message))
|
||||
|
||||
|
||||
|
@ -2,7 +2,7 @@ import re
|
||||
import os
|
||||
|
||||
from collections import defaultdict
|
||||
from ansiblelater import Result, Error
|
||||
from ansiblelater.command.review import Result, Error
|
||||
from ansiblelater.utils import count_spaces
|
||||
from ansiblelater.utils.rulehelper import (get_normalized_tasks,
|
||||
get_normalized_yaml)
|
||||
|
@ -1,6 +1,6 @@
|
||||
from nested_lookup import nested_lookup
|
||||
|
||||
from ansiblelater import Error, Result
|
||||
from ansiblelater.command.review import Error, Result
|
||||
from ansiblelater.utils.rulehelper import get_raw_yaml, get_tasks
|
||||
|
||||
|
||||
|
@ -2,7 +2,7 @@ import re
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
from ansiblelater import Error, Result
|
||||
from ansiblelater.command.review import Error, Result
|
||||
from ansiblelater.utils.rulehelper import get_normalized_yaml
|
||||
|
||||
|
||||
|
@ -2,8 +2,7 @@ import codecs
|
||||
import yaml
|
||||
import os
|
||||
|
||||
from ansiblelater import Result
|
||||
from ansiblelater import Error
|
||||
from ansiblelater.command.review import Result, Error
|
||||
from ansiblelater.utils.rulehelper import get_action_tasks
|
||||
from ansiblelater.utils.rulehelper import get_normalized_yaml
|
||||
from ansiblelater.utils.rulehelper import get_normalized_task
|
||||
|
@ -13,8 +13,6 @@ from ansiblelater import logger, utils
|
||||
config_dir = AppDirs("ansible-later").user_config_dir
|
||||
default_config_file = os.path.join(config_dir, "config.yml")
|
||||
|
||||
logger = logger.get_logger(__name__)
|
||||
|
||||
|
||||
class Settings(object):
|
||||
"""
|
||||
@ -56,6 +54,8 @@ class Settings(object):
|
||||
tmp_dict["logging"]["level"] = levels[
|
||||
min(len(levels) - 1, tmp_dict["logging"]["level"] - 1)]
|
||||
|
||||
tmp_dict["rules"]["files"] = self._get_files(tmp_dict)
|
||||
|
||||
return tmp_dict
|
||||
|
||||
def _get_config(self):
|
||||
@ -92,6 +92,17 @@ class Settings(object):
|
||||
|
||||
return defaults
|
||||
|
||||
def _get_files(self, args):
|
||||
if len(args["rules"]["files"]) == 0:
|
||||
filelist = []
|
||||
for root, dirs, files in os.walk("."):
|
||||
for filename in files:
|
||||
filelist.append(os.path.join(root, filename))
|
||||
else:
|
||||
filelist = args["rules"]["files"]
|
||||
|
||||
return filelist
|
||||
|
||||
def _validate(self, config):
|
||||
try:
|
||||
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
||||
@ -101,4 +112,4 @@ class Settings(object):
|
||||
validator=e.validator,
|
||||
schema=format_as_index(list(e.relative_schema_path)[:-1])
|
||||
)
|
||||
logger.error("{schema}: {msg}".format(schema=schema_error, msg=e.message))
|
||||
utils.sysexit_with_message("{schema}: {msg}".format(schema=schema_error, msg=e.message))
|
||||
|
31
ansiblelater/standard.py
Normal file
31
ansiblelater/standard.py
Normal file
@ -0,0 +1,31 @@
|
||||
class Standard(object):
|
||||
"""
|
||||
Standard definition for all defined rules.
|
||||
|
||||
Later lookup the config file for a path to a rules directory
|
||||
or fallback to default `ansiblelater/data/*`.
|
||||
"""
|
||||
|
||||
def __init__(self, standard_dict):
|
||||
"""
|
||||
Initialize a new standard object and returns None.
|
||||
|
||||
:param standard_dict: Dictionary object containing all neseccary attributes
|
||||
|
||||
"""
|
||||
if "id" not in standard_dict:
|
||||
standard_dict.update(id="")
|
||||
else:
|
||||
standard_dict.update(id="[{}] ".format(standard_dict.get("id")))
|
||||
|
||||
self.id = standard_dict.get("id")
|
||||
self.name = standard_dict.get("name")
|
||||
self.version = standard_dict.get("version")
|
||||
self.check = standard_dict.get("check")
|
||||
self.types = standard_dict.get("types")
|
||||
|
||||
|
||||
def __repr__(self): # noqa
|
||||
return "Standard: %s (version: %s, types: %s)" % (
|
||||
self.name, self.version, self.types)
|
||||
|
@ -10,6 +10,7 @@ import colorama
|
||||
|
||||
import yaml
|
||||
from distutils.version import LooseVersion
|
||||
from ansiblelater import logger
|
||||
from ansible.module_utils.parsing.convert_bool import boolean as to_bool
|
||||
|
||||
try:
|
||||
@ -17,6 +18,10 @@ try:
|
||||
except ImportError:
|
||||
import configparser
|
||||
|
||||
|
||||
LOG = logger.get_logger(__name__)
|
||||
|
||||
|
||||
def count_spaces(c_string):
|
||||
leading_spaces = 0
|
||||
trailing_spaces = 0
|
||||
@ -62,16 +67,6 @@ def is_line_in_ranges(line, ranges):
|
||||
return not ranges or any([line in r for r in ranges])
|
||||
|
||||
|
||||
def read_standards(settings):
|
||||
if not settings.rulesdir:
|
||||
abort("Standards directory is not set on command line or in configuration file - aborting")
|
||||
sys.path.append(os.path.abspath(os.path.expanduser(settings.rulesdir)))
|
||||
try:
|
||||
standards = importlib.import_module('standards')
|
||||
except ImportError as e:
|
||||
abort("Could not import standards from directory %s: %s" % (settings.rulesdir, str(e)))
|
||||
return standards
|
||||
|
||||
|
||||
def read_config(config_file):
|
||||
config = configparser.RawConfigParser({'standards': None})
|
||||
@ -112,3 +107,12 @@ def add_dict_branch(tree, vector, value):
|
||||
vector[1:],
|
||||
value)
|
||||
return tree
|
||||
|
||||
|
||||
def sysexit(code=1):
|
||||
sys.exit(code)
|
||||
|
||||
|
||||
def sysexit_with_message(msg, code=1):
|
||||
LOG.critical(msg)
|
||||
sysexit(code)
|
||||
|
@ -6,12 +6,12 @@ from yamllint import linter
|
||||
from yamllint.config import YamlLintConfig
|
||||
# Workaround for import errors with ansble 2.1 and 2.3
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansiblelater import Error
|
||||
from ansiblelater.command.review import Error
|
||||
from .yamlhelper import normalize_task
|
||||
from .yamlhelper import action_tasks
|
||||
from .yamlhelper import parse_yaml_linenumbers
|
||||
from .yamlhelper import normalized_yaml
|
||||
from ansiblelater import LaterError, LaterAnsibleError
|
||||
from ansiblelater.exceptions import LaterError, LaterAnsibleError
|
||||
|
||||
|
||||
def get_tasks(candidate, settings):
|
||||
|
@ -28,7 +28,7 @@ import six
|
||||
import ansible.parsing.mod_args
|
||||
from ansible import constants
|
||||
from ansible.errors import AnsibleError
|
||||
from ansiblelater import LaterAnsibleError, LaterError
|
||||
from ansiblelater.exceptions import LaterAnsibleError, LaterError
|
||||
|
||||
try:
|
||||
# Try to import the Ansible 2 module first, it's the future-proof one
|
||||
|
Loading…
Reference in New Issue
Block a user