ansible-later/ansiblelater/settings.py

231 lines
7.7 KiB
Python
Raw Normal View History

"""Global settings object definition."""
2019-03-28 00:20:43 +00:00
import os
import anyconfig
import jsonschema.exceptions
2019-04-11 12:17:44 +00:00
import pathspec
from appdirs import AppDirs
from jsonschema._utils import format_as_index
2019-03-28 00:20:43 +00:00
from pkg_resources import resource_filename
from ansiblelater import utils
2019-03-28 00:20:43 +00:00
config_dir = AppDirs("ansible-later").user_config_dir
default_config_file = os.path.join(config_dir, "config.yml")
class Settings:
"""
Create an object with all necessary settings.
Settings are loade from multiple locations in defined order (last wins):
- default settings defined by `self._get_defaults()`
- yaml config file, defaults to OS specific user config dir (https://pypi.org/project/appdirs/)
- provides cli parameters
"""
def __init__(self, args, config_file=default_config_file):
"""
Initialize a new settings class.
:param args: An optional dict of options, arguments and commands from the CLI.
:param config_file: An optional path to a yaml config file.
:returns: None
"""
self.config_file = config_file
2019-04-15 09:20:12 +00:00
self.schema = None
self.args_files = False
self.args = self._set_args(args)
2019-03-28 00:20:43 +00:00
self.config = self._get_config()
self._update_filelist()
2019-03-28 00:20:43 +00:00
def _set_args(self, args):
if args is None:
args = {}
2019-04-05 12:02:14 +00:00
defaults = self._get_defaults()
self.config_file = args.get("config_file") or default_config_file
2019-03-28 00:20:43 +00:00
tmp_args = dict(filter(lambda item: item[1] is not None, args.items()))
tmp_args.pop("config_file", None)
tmp_dict = {}
for key, value in tmp_args.items():
tmp_dict = utils.add_dict_branch(tmp_dict, key.split("."), value)
# Override correct log level from argparse
2019-04-05 12:02:14 +00:00
levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
2019-04-11 08:50:38 +00:00
log_level = levels.index(defaults["logging"]["level"])
if tmp_dict.get("logging"):
2019-04-05 12:02:14 +00:00
for adjustment in tmp_dict["logging"]["level"]:
log_level = min(len(levels) - 1, max(log_level + adjustment, 0))
tmp_dict["logging"]["level"] = levels[log_level]
if len(tmp_dict["rules"]["files"]) == 0:
2019-04-11 12:17:44 +00:00
tmp_dict["rules"]["files"] = ["*"]
else:
tmp_dict["rules"]["files"] = tmp_dict["rules"]["files"]
self.args_files = True
2019-04-02 14:34:03 +00:00
return tmp_dict
2019-03-28 00:20:43 +00:00
def _get_config(self):
defaults = self._get_defaults()
source_files = []
source_files.append(self.config_file)
source_files.append(os.path.join(os.getcwd(), ".later"))
source_files.append(os.path.join(os.getcwd(), ".later.yml"))
source_files.append(os.path.join(os.getcwd(), ".later.yaml"))
cli_options = self.args
2019-03-28 00:20:43 +00:00
for config in source_files:
if config and os.path.exists(config):
with utils.open_file(config) as stream:
s = stream.read()
sdict = utils.safe_load(s)
if self._validate(sdict):
anyconfig.merge(defaults, sdict, ac_merge=anyconfig.MS_DICTS)
2019-04-15 16:55:54 +00:00
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
2019-03-28 00:20:43 +00:00
if cli_options and self._validate(cli_options):
anyconfig.merge(defaults, cli_options, ac_merge=anyconfig.MS_DICTS)
2020-06-27 20:33:51 +00:00
library = os.path.relpath(os.path.normpath(os.path.join(os.getcwd(), "library")))
autodetect = []
if os.path.exists(library):
autodetect = [
os.path.splitext(f)[0]
for f in os.listdir(library)
if os.path.isfile(os.path.join(library, f)) and not f.startswith(".")
]
for f in autodetect:
if f not in defaults["ansible"]["custom_modules"]:
defaults["ansible"]["custom_modules"].append(f)
if defaults["rules"]["buildin"]:
defaults["rules"]["standards"].append(
os.path.join(resource_filename("ansiblelater", "rules"))
)
defaults["rules"]["standards"] = [
os.path.relpath(os.path.normpath(p)) for p in defaults["rules"]["standards"]
]
2019-03-28 00:20:43 +00:00
return defaults
def _get_defaults(self):
defaults = {
"rules": {
"buildin": True,
"standards": [],
"filter": [],
2019-04-17 10:33:23 +00:00
"exclude_filter": [],
"warning_filter": [
"ANSIBLE9999",
"ANSIBLE9998",
],
"ignore_dotfiles": True,
"exclude_files": [],
"version": ""
2019-03-28 00:20:43 +00:00
},
"logging": {
2019-04-11 09:01:22 +00:00
"level": "WARNING",
2019-04-03 15:42:46 +00:00
"json": False
2019-03-28 00:20:43 +00:00
},
"ansible": {
"custom_modules": [],
"double-braces": {
"min-spaces-inside": 1,
"max-spaces-inside": 1,
},
"literal-bools": ["True", "False", "yes", "no"],
"named-task": {
"exclude": [
"meta",
"debug",
"block",
"include_role",
"import_role",
"include_tasks",
"import_tasks",
"include_vars",
],
},
"native-yaml": {
"exclude": [],
},
},
"yamllint": {
"empty-lines": {
"max": 1,
"max-start": 0,
"max-end": 1,
},
"indentation": {
"spaces": 2,
"check-multi-line-strings": False,
"indent-sequences": True,
},
"hyphens": {
"max-spaces-after": 1
},
"document-start": {
"present": True
},
2019-10-15 22:31:25 +00:00
"document-end": {
"present": True
},
"colons": {
"max-spaces-before": 0,
"max-spaces-after": 1
},
},
2019-03-28 00:20:43 +00:00
}
2019-04-15 09:20:12 +00:00
self.schema = anyconfig.gen_schema(defaults)
return defaults
def _validate(self, config):
try:
anyconfig.validate(config, self.schema, ac_schema_safe=False)
return True
except jsonschema.exceptions.ValidationError as e:
validator = e.validator
path = format_as_index(
list(e.absolute_path)[0],
list(e.absolute_path)[1:],
2020-04-05 12:33:43 +00:00
)
msg = e.message
2020-04-05 12:33:43 +00:00
utils.sysexit_with_message(
"Error while loading configuration:\n"
f"Failed validating '{validator}' at {path}: {msg}"
)
def _update_filelist(self):
2019-04-11 12:17:44 +00:00
includes = self.config["rules"]["files"]
excludes = self.config["rules"]["exclude_files"]
ignore_dotfiles = self.config["rules"]["ignore_dotfiles"]
if ignore_dotfiles and not self.args_files:
2019-04-11 12:17:44 +00:00
excludes.append(".*")
else:
del excludes[:]
filelist = []
for root, _dirs, files in os.walk("."):
for filename in files:
2020-04-05 12:33:43 +00:00
filelist.append(os.path.relpath(os.path.normpath(os.path.join(root, filename))))
valid = []
2019-04-11 12:24:50 +00:00
includespec = pathspec.PathSpec.from_lines("gitwildmatch", includes)
excludespec = pathspec.PathSpec.from_lines("gitwildmatch", excludes)
for item in filelist:
2019-04-11 12:17:44 +00:00
if includespec.match_file(item) and not excludespec.match_file(item):
valid.append(item)
self.config["rules"]["files"] = valid