2019-10-08 09:39:27 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
"""Global settings definition."""
|
2019-10-07 12:44:45 +00:00
|
|
|
|
2019-10-07 06:52:00 +00:00
|
|
|
import os
|
2019-10-07 12:44:45 +00:00
|
|
|
import sys
|
2019-10-07 06:52:00 +00:00
|
|
|
|
2019-10-07 12:44:45 +00:00
|
|
|
import anyconfig
|
2019-10-08 09:30:31 +00:00
|
|
|
import jsonschema.exceptions
|
2019-10-08 09:39:27 +00:00
|
|
|
import yaml
|
2019-10-07 12:44:45 +00:00
|
|
|
from appdirs import AppDirs
|
|
|
|
from jsonschema._utils import format_as_index
|
|
|
|
from pkg_resources import resource_filename
|
2019-10-07 06:52:00 +00:00
|
|
|
|
2019-10-08 09:30:31 +00:00
|
|
|
import ansibledoctor.Exception
|
2019-10-08 09:39:27 +00:00
|
|
|
from ansibledoctor.Utils import Singleton
|
2019-10-07 06:52:00 +00:00
|
|
|
|
2019-10-07 12:44:45 +00:00
|
|
|
config_dir = AppDirs("ansible-doctor").user_config_dir
|
|
|
|
default_config_file = os.path.join(config_dir, "config.yml")
|
|
|
|
|
|
|
|
|
|
|
|
class Config():
|
|
|
|
"""
|
|
|
|
Create an object with all necessary settings.
|
|
|
|
|
|
|
|
Settings are loade from multiple locations in defined order (last wins):
|
|
|
|
- default settings defined by `self._get_defaults()`
|
|
|
|
- yaml config file, defaults to OS specific user config dir (https://pypi.org/project/appdirs/)
|
|
|
|
- provides cli parameters
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, args={}, config_file=None):
|
|
|
|
"""
|
|
|
|
Initialize a new settings class.
|
2019-10-07 06:52:00 +00:00
|
|
|
|
2019-10-07 12:44:45 +00:00
|
|
|
:param args: An optional dict of options, arguments and commands from the CLI.
|
|
|
|
:param config_file: An optional path to a yaml config file.
|
|
|
|
:returns: None
|
|
|
|
|
|
|
|
"""
|
|
|
|
self.config_file = None
|
|
|
|
self.schema = None
|
|
|
|
self.args = self._set_args(args)
|
2019-10-08 09:30:31 +00:00
|
|
|
self.base_dir = self._set_base_dir()
|
2019-10-07 12:44:45 +00:00
|
|
|
self.is_role = self._set_is_role() or False
|
2019-10-08 09:30:31 +00:00
|
|
|
self.dry_run = self._set_dry_run() or False
|
|
|
|
self.config = self._get_config()
|
2019-10-07 12:44:45 +00:00
|
|
|
self._annotations = self._set_annotations()
|
2019-10-08 12:57:46 +00:00
|
|
|
self._post_processing()
|
2019-10-07 12:44:45 +00:00
|
|
|
|
|
|
|
def _set_args(self, args):
|
|
|
|
defaults = self._get_defaults()
|
2019-10-08 09:30:31 +00:00
|
|
|
if args.get("config_file"):
|
|
|
|
self.config_file = os.path.abspath(os.path.expanduser(os.path.expandvars(args.get("config_file"))))
|
|
|
|
else:
|
|
|
|
self.config_file = default_config_file
|
2019-10-07 12:44:45 +00:00
|
|
|
|
|
|
|
args.pop("config_file", None)
|
|
|
|
tmp_args = dict(filter(lambda item: item[1] is not None, args.items()))
|
|
|
|
|
|
|
|
tmp_dict = {}
|
|
|
|
for key, value in tmp_args.items():
|
|
|
|
tmp_dict = self._add_dict_branch(tmp_dict, key.split("."), value)
|
|
|
|
|
|
|
|
# Override correct log level from argparse
|
|
|
|
levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
|
|
|
log_level = levels.index(defaults["logging"]["level"])
|
|
|
|
if tmp_dict.get("logging"):
|
|
|
|
for adjustment in tmp_dict["logging"]["level"]:
|
|
|
|
log_level = min(len(levels) - 1, max(log_level + adjustment, 0))
|
|
|
|
tmp_dict["logging"]["level"] = levels[log_level]
|
|
|
|
|
|
|
|
return tmp_dict
|
|
|
|
|
|
|
|
def _get_defaults(self):
|
|
|
|
default_template = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates")
|
|
|
|
defaults = {
|
|
|
|
"logging": {
|
|
|
|
"level": "WARNING",
|
|
|
|
"json": False
|
|
|
|
},
|
2019-10-08 09:30:31 +00:00
|
|
|
"output_dir": os.getcwd(),
|
2019-10-07 12:44:45 +00:00
|
|
|
"template_dir": default_template,
|
|
|
|
"template": "readme",
|
|
|
|
"force_overwrite": False,
|
2019-10-08 12:57:46 +00:00
|
|
|
"appent_to_file": "",
|
2019-10-07 12:44:45 +00:00
|
|
|
"exclude_files": [],
|
|
|
|
}
|
|
|
|
|
|
|
|
self.schema = anyconfig.gen_schema(defaults)
|
|
|
|
return defaults
|
|
|
|
|
|
|
|
def _get_config(self):
|
|
|
|
defaults = self._get_defaults()
|
|
|
|
source_files = []
|
|
|
|
source_files.append(self.config_file)
|
2019-10-08 09:30:31 +00:00
|
|
|
source_files.append(os.path.join(self.base_dir, ".ansibledoctor"))
|
|
|
|
source_files.append(os.path.join(self.base_dir, ".ansibledoctor.yml"))
|
|
|
|
source_files.append(os.path.join(self.base_dir, ".ansibledoctor.yaml"))
|
2019-10-07 12:44:45 +00:00
|
|
|
cli_options = self.args
|
|
|
|
|
|
|
|
for config in source_files:
|
|
|
|
if config and os.path.exists(config):
|
|
|
|
with open(config, "r", encoding="utf8") as stream:
|
|
|
|
s = stream.read()
|
2019-10-08 09:30:31 +00:00
|
|
|
try:
|
|
|
|
sdict = yaml.safe_load(s)
|
|
|
|
except yaml.parser.ParserError as e:
|
|
|
|
message = "{}\n{}".format(e.problem, str(e.problem_mark))
|
|
|
|
raise ansibledoctor.Exception.ConfigError("Unable to read file", message)
|
|
|
|
|
2019-10-07 12:44:45 +00:00
|
|
|
if self._validate(sdict):
|
|
|
|
anyconfig.merge(defaults, sdict, ac_merge=anyconfig.MS_DICTS)
|
|
|
|
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
|
|
|
|
|
|
|
|
if cli_options and self._validate(cli_options):
|
|
|
|
anyconfig.merge(defaults, cli_options, ac_merge=anyconfig.MS_DICTS)
|
|
|
|
|
|
|
|
return defaults
|
|
|
|
|
|
|
|
def _set_annotations(self):
|
|
|
|
annotations = {
|
|
|
|
"meta": {
|
|
|
|
"name": "meta",
|
|
|
|
"automatic": True
|
|
|
|
},
|
|
|
|
"todo": {
|
|
|
|
"name": "todo",
|
|
|
|
"automatic": True,
|
|
|
|
},
|
|
|
|
"var": {
|
|
|
|
"name": "var",
|
|
|
|
"automatic": True,
|
|
|
|
},
|
|
|
|
"example": {
|
|
|
|
"name": "example",
|
|
|
|
"regex": r"(\#\ *\@example\ *\: *.*)"
|
|
|
|
},
|
|
|
|
"tag": {
|
|
|
|
"name": "tag",
|
|
|
|
"automatic": True,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
return annotations
|
|
|
|
|
2019-10-08 09:30:31 +00:00
|
|
|
def _set_base_dir(self):
|
|
|
|
if self.args.get("base_dir"):
|
|
|
|
real = os.path.abspath(os.path.expanduser(os.path.expandvars(self.args.get("base_dir"))))
|
|
|
|
else:
|
|
|
|
real = os.getcwd()
|
|
|
|
return real
|
|
|
|
|
2019-10-07 12:44:45 +00:00
|
|
|
def _set_is_role(self):
|
2019-10-08 09:30:31 +00:00
|
|
|
if os.path.isdir(os.path.join(self.base_dir, "tasks")):
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _set_dry_run(self):
|
|
|
|
if self.args.get("dry_run"):
|
2019-10-07 12:44:45 +00:00
|
|
|
return True
|
|
|
|
|
2019-10-08 12:57:46 +00:00
|
|
|
def _post_processing(self):
|
|
|
|
# Override append file path
|
|
|
|
append_file = self.config.get("append_to_file")
|
|
|
|
if append_file:
|
|
|
|
if not os.path.isabs(os.path.expanduser(os.path.expandvars(append_file))):
|
|
|
|
append_file = os.path.join(self.base_dir, append_file)
|
|
|
|
|
|
|
|
self.config["append_to_file"] = os.path.abspath(os.path.expanduser(os.path.expandvars(append_file)))
|
|
|
|
|
2019-10-07 12:44:45 +00:00
|
|
|
def _validate(self, config):
|
|
|
|
try:
|
|
|
|
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
2019-10-08 09:30:31 +00:00
|
|
|
except jsonschema.exceptions.ValidationError as e:
|
|
|
|
schema_error = "Failed validating '{validator}' in schema{schema}\n{message}".format(
|
2019-10-07 12:44:45 +00:00
|
|
|
validator=e.validator,
|
2019-10-08 09:30:31 +00:00
|
|
|
schema=format_as_index(list(e.relative_schema_path)[:-1]),
|
|
|
|
message=e.message
|
2019-10-07 12:44:45 +00:00
|
|
|
)
|
2019-10-08 09:30:31 +00:00
|
|
|
raise ansibledoctor.Exception.ConfigError("Configuration error", schema_error)
|
2019-10-07 12:44:45 +00:00
|
|
|
|
2019-10-08 09:30:31 +00:00
|
|
|
return True
|
2019-10-07 12:44:45 +00:00
|
|
|
|
|
|
|
def _add_dict_branch(self, tree, vector, value):
|
|
|
|
key = vector[0]
|
|
|
|
tree[key] = value \
|
|
|
|
if len(vector) == 1 \
|
|
|
|
else self._add_dict_branch(tree[key] if key in tree else {},
|
|
|
|
vector[1:], value)
|
|
|
|
return tree
|
2019-10-07 06:52:00 +00:00
|
|
|
|
|
|
|
def get_annotations_definition(self, automatic=True):
|
|
|
|
annotations = {}
|
|
|
|
if automatic:
|
2019-10-07 12:44:45 +00:00
|
|
|
for k, item in self._annotations.items():
|
2019-10-07 06:52:00 +00:00
|
|
|
if "automatic" in item.keys() and item["automatic"]:
|
|
|
|
annotations[k] = item
|
|
|
|
return annotations
|
|
|
|
|
|
|
|
def get_annotations_names(self, automatic=True):
|
|
|
|
annotations = []
|
|
|
|
if automatic:
|
2019-10-07 12:44:45 +00:00
|
|
|
for k, item in self._annotations.items():
|
2019-10-07 06:52:00 +00:00
|
|
|
if "automatic" in item.keys() and item["automatic"]:
|
|
|
|
annotations.append(k)
|
|
|
|
return annotations
|
|
|
|
|
2019-10-07 12:44:45 +00:00
|
|
|
def get_template(self):
|
2019-10-07 06:52:00 +00:00
|
|
|
"""
|
|
|
|
Get the base dir for the template to use.
|
|
|
|
|
|
|
|
:return: str abs path
|
|
|
|
"""
|
2019-10-07 12:44:45 +00:00
|
|
|
template_dir = self.config.get("template_dir")
|
|
|
|
template = self.config.get("template")
|
|
|
|
return os.path.realpath(os.path.join(template_dir, template))
|
2019-10-07 06:52:00 +00:00
|
|
|
|
|
|
|
|
|
|
|
class SingleConfig(Config, metaclass=Singleton):
|
|
|
|
pass
|