mirror of
https://github.com/thegeeklab/ansible-doctor.git
synced 2024-11-28 07:40:36 +00:00
bugfixes and error handling
This commit is contained in:
parent
69a7078251
commit
2a7d59b64d
@ -57,7 +57,7 @@ class Annotation:
|
|||||||
if not line:
|
if not line:
|
||||||
break
|
break
|
||||||
|
|
||||||
if re.match(regex, line):
|
if re.match(regex, line.strip()):
|
||||||
item = self._get_annotation_data(
|
item = self._get_annotation_data(
|
||||||
line, self._annotation_definition["name"])
|
line, self._annotation_definition["name"])
|
||||||
if item:
|
if item:
|
||||||
|
@ -10,6 +10,7 @@ from ansibledoctor.Config import SingleConfig
|
|||||||
from ansibledoctor.DocumentationGenerator import Generator
|
from ansibledoctor.DocumentationGenerator import Generator
|
||||||
from ansibledoctor.DocumentationParser import Parser
|
from ansibledoctor.DocumentationParser import Parser
|
||||||
from ansibledoctor.Utils import SingleLog
|
from ansibledoctor.Utils import SingleLog
|
||||||
|
import ansibledoctor.Exception
|
||||||
|
|
||||||
|
|
||||||
class AnsibleDoctor:
|
class AnsibleDoctor:
|
||||||
@ -33,7 +34,7 @@ class AnsibleDoctor:
|
|||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate documentation from annotated Ansible roles using templates")
|
description="Generate documentation from annotated Ansible roles using templates")
|
||||||
parser.add_argument("base_dir", nargs="?", help="role directory, (default: current working dir)")
|
parser.add_argument("base_dir", nargs="?", help="role directory, (default: current working dir)")
|
||||||
parser.add_argument("-c", "--config", nargs="?", help="location of configuration file")
|
parser.add_argument("-c", "--config", nargs="?", dest="config_file", help="location of configuration file")
|
||||||
parser.add_argument("-o", "--output", action="store", dest="output_dir", type=str,
|
parser.add_argument("-o", "--output", action="store", dest="output_dir", type=str,
|
||||||
help="output base dir")
|
help="output base dir")
|
||||||
parser.add_argument("-f", "--force", action="store_true", dest="force_overwrite",
|
parser.add_argument("-f", "--force", action="store_true", dest="force_overwrite",
|
||||||
@ -50,12 +51,17 @@ class AnsibleDoctor:
|
|||||||
return parser.parse_args().__dict__
|
return parser.parse_args().__dict__
|
||||||
|
|
||||||
def _get_config(self):
|
def _get_config(self):
|
||||||
|
try:
|
||||||
config = SingleConfig(args=self.args)
|
config = SingleConfig(args=self.args)
|
||||||
|
except ansibledoctor.Exception.ConfigError as e:
|
||||||
|
self.log.sysexit_with_message(e)
|
||||||
|
|
||||||
if config.is_role:
|
if config.is_role:
|
||||||
self.logger.info("Ansible role detected")
|
self.logger.info("Ansible role detected")
|
||||||
else:
|
else:
|
||||||
self.log.error("No Ansible role detected")
|
self.log.sysexit_with_message("No Ansible role detected")
|
||||||
sys.exit(1)
|
|
||||||
|
self.log.set_level(config.config["logging"]["level"])
|
||||||
|
self.logger.info("Using config file {}".format(config.config_file))
|
||||||
|
|
||||||
# TODO: user wrapper method to catch config exceptions
|
|
||||||
return config
|
return config
|
||||||
|
@ -5,11 +5,13 @@ import sys
|
|||||||
|
|
||||||
import anyconfig
|
import anyconfig
|
||||||
import yaml
|
import yaml
|
||||||
|
import jsonschema.exceptions
|
||||||
from appdirs import AppDirs
|
from appdirs import AppDirs
|
||||||
from jsonschema._utils import format_as_index
|
from jsonschema._utils import format_as_index
|
||||||
from pkg_resources import resource_filename
|
from pkg_resources import resource_filename
|
||||||
|
|
||||||
from ansibledoctor.Utils import Singleton
|
from ansibledoctor.Utils import Singleton
|
||||||
|
import ansibledoctor.Exception
|
||||||
|
|
||||||
config_dir = AppDirs("ansible-doctor").user_config_dir
|
config_dir = AppDirs("ansible-doctor").user_config_dir
|
||||||
default_config_file = os.path.join(config_dir, "config.yml")
|
default_config_file = os.path.join(config_dir, "config.yml")
|
||||||
@ -36,15 +38,19 @@ class Config():
|
|||||||
"""
|
"""
|
||||||
self.config_file = None
|
self.config_file = None
|
||||||
self.schema = None
|
self.schema = None
|
||||||
self.dry_run = False
|
|
||||||
self.args = self._set_args(args)
|
self.args = self._set_args(args)
|
||||||
self.config = self._get_config()
|
self.base_dir = self._set_base_dir()
|
||||||
self.is_role = self._set_is_role() or False
|
self.is_role = self._set_is_role() or False
|
||||||
|
self.dry_run = self._set_dry_run() or False
|
||||||
|
self.config = self._get_config()
|
||||||
self._annotations = self._set_annotations()
|
self._annotations = self._set_annotations()
|
||||||
|
|
||||||
def _set_args(self, args):
|
def _set_args(self, args):
|
||||||
defaults = self._get_defaults()
|
defaults = self._get_defaults()
|
||||||
self.config_file = args.get("config_file") or default_config_file
|
if args.get("config_file"):
|
||||||
|
self.config_file = os.path.abspath(os.path.expanduser(os.path.expandvars(args.get("config_file"))))
|
||||||
|
else:
|
||||||
|
self.config_file = default_config_file
|
||||||
|
|
||||||
args.pop("config_file", None)
|
args.pop("config_file", None)
|
||||||
tmp_args = dict(filter(lambda item: item[1] is not None, args.items()))
|
tmp_args = dict(filter(lambda item: item[1] is not None, args.items()))
|
||||||
@ -64,14 +70,13 @@ class Config():
|
|||||||
return tmp_dict
|
return tmp_dict
|
||||||
|
|
||||||
def _get_defaults(self):
|
def _get_defaults(self):
|
||||||
default_output = os.getcwd()
|
|
||||||
default_template = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates")
|
default_template = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates")
|
||||||
defaults = {
|
defaults = {
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "WARNING",
|
"level": "WARNING",
|
||||||
"json": False
|
"json": False
|
||||||
},
|
},
|
||||||
"output_dir": default_output,
|
"output_dir": os.getcwd(),
|
||||||
"template_dir": default_template,
|
"template_dir": default_template,
|
||||||
"template": "readme",
|
"template": "readme",
|
||||||
"force_overwrite": False,
|
"force_overwrite": False,
|
||||||
@ -84,18 +89,23 @@ class Config():
|
|||||||
def _get_config(self):
|
def _get_config(self):
|
||||||
defaults = self._get_defaults()
|
defaults = self._get_defaults()
|
||||||
source_files = []
|
source_files = []
|
||||||
|
|
||||||
source_files.append(self.config_file)
|
source_files.append(self.config_file)
|
||||||
# TODO: support multipel filename formats e.g. .yaml or .ansibledoctor
|
source_files.append(os.path.join(self.base_dir, ".ansibledoctor"))
|
||||||
source_files.append(os.path.relpath(
|
source_files.append(os.path.join(self.base_dir, ".ansibledoctor.yml"))
|
||||||
os.path.normpath(os.path.join(os.getcwd(), ".ansibledoctor.yml"))))
|
source_files.append(os.path.join(self.base_dir, ".ansibledoctor.yaml"))
|
||||||
cli_options = self.args
|
cli_options = self.args
|
||||||
|
|
||||||
for config in source_files:
|
for config in source_files:
|
||||||
if config and os.path.exists(config):
|
if config and os.path.exists(config):
|
||||||
with open(config, "r", encoding="utf8") as stream:
|
with open(config, "r", encoding="utf8") as stream:
|
||||||
s = stream.read()
|
s = stream.read()
|
||||||
# TODO: catch malformed files
|
try:
|
||||||
sdict = yaml.safe_load(s)
|
sdict = yaml.safe_load(s)
|
||||||
|
except yaml.parser.ParserError as e:
|
||||||
|
message = "{}\n{}".format(e.problem, str(e.problem_mark))
|
||||||
|
raise ansibledoctor.Exception.ConfigError("Unable to read file", message)
|
||||||
|
|
||||||
if self._validate(sdict):
|
if self._validate(sdict):
|
||||||
anyconfig.merge(defaults, sdict, ac_merge=anyconfig.MS_DICTS)
|
anyconfig.merge(defaults, sdict, ac_merge=anyconfig.MS_DICTS)
|
||||||
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
|
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
|
||||||
@ -130,23 +140,33 @@ class Config():
|
|||||||
}
|
}
|
||||||
return annotations
|
return annotations
|
||||||
|
|
||||||
|
def _set_base_dir(self):
|
||||||
|
if self.args.get("base_dir"):
|
||||||
|
real = os.path.abspath(os.path.expanduser(os.path.expandvars(self.args.get("base_dir"))))
|
||||||
|
else:
|
||||||
|
real = os.getcwd()
|
||||||
|
return real
|
||||||
|
|
||||||
def _set_is_role(self):
|
def _set_is_role(self):
|
||||||
if os.path.isdir(os.path.join(os.getcwd(), "tasks")):
|
if os.path.isdir(os.path.join(self.base_dir, "tasks")):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _set_dry_run(self):
|
||||||
|
if self.args.get("dry_run"):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _validate(self, config):
|
def _validate(self, config):
|
||||||
try:
|
try:
|
||||||
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
||||||
return True
|
except jsonschema.exceptions.ValidationError as e:
|
||||||
except Exception as e:
|
schema_error = "Failed validating '{validator}' in schema{schema}\n{message}".format(
|
||||||
schema_error = "Failed validating '{validator}' in schema{schema}".format(
|
|
||||||
validator=e.validator,
|
validator=e.validator,
|
||||||
schema=format_as_index(list(e.relative_schema_path)[:-1])
|
schema=format_as_index(list(e.relative_schema_path)[:-1]),
|
||||||
|
message=e.message
|
||||||
)
|
)
|
||||||
|
raise ansibledoctor.Exception.ConfigError("Configuration error", schema_error)
|
||||||
|
|
||||||
# TODO: raise exception
|
return True
|
||||||
print("{schema}: {msg}".format(schema=schema_error, msg=e.message))
|
|
||||||
sys.exit(999)
|
|
||||||
|
|
||||||
def _add_dict_branch(self, tree, vector, value):
|
def _add_dict_branch(self, tree, vector, value):
|
||||||
key = vector[0]
|
key = vector[0]
|
||||||
|
@ -8,12 +8,14 @@ import os
|
|||||||
import pprint
|
import pprint
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from functools import reduce
|
||||||
import jinja2.exceptions
|
import jinja2.exceptions
|
||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
from jinja2 import Environment
|
from jinja2 import Environment
|
||||||
from jinja2 import FileSystemLoader
|
from jinja2 import FileSystemLoader
|
||||||
from six import binary_type
|
from six import binary_type
|
||||||
from six import text_type
|
from six import text_type
|
||||||
|
import ansibledoctor.Exception
|
||||||
|
|
||||||
from ansibledoctor.Config import SingleConfig
|
from ansibledoctor.Config import SingleConfig
|
||||||
from ansibledoctor.Utils import FileUtils
|
from ansibledoctor.Utils import FileUtils
|
||||||
@ -26,8 +28,9 @@ class Generator:
|
|||||||
self.extension = "j2"
|
self.extension = "j2"
|
||||||
self._parser = None
|
self._parser = None
|
||||||
self.config = SingleConfig()
|
self.config = SingleConfig()
|
||||||
self.log = SingleLog().logger
|
self.log = SingleLog()
|
||||||
self.log.info("Using template dir: " + self.config.get_template())
|
self.logger = self.log.logger
|
||||||
|
self.logger.info("Using template dir: " + self.config.get_template())
|
||||||
self._parser = doc_parser
|
self._parser = doc_parser
|
||||||
self._scan_template()
|
self._scan_template()
|
||||||
|
|
||||||
@ -43,16 +46,16 @@ class Generator:
|
|||||||
|
|
||||||
relative_file = file[len(base_dir) + 1:]
|
relative_file = file[len(base_dir) + 1:]
|
||||||
if ntpath.basename(file)[:1] != "_":
|
if ntpath.basename(file)[:1] != "_":
|
||||||
self.log.debug("Found template file: " + relative_file)
|
self.logger.debug("Found template file: " + relative_file)
|
||||||
self.template_files.append(relative_file)
|
self.template_files.append(relative_file)
|
||||||
else:
|
else:
|
||||||
self.log.debug("Ignoring template file: " + relative_file)
|
self.logger.debug("Ignoring template file: " + relative_file)
|
||||||
|
|
||||||
def _create_dir(self, directory):
|
def _create_dir(self, directory):
|
||||||
if not self.config.dry_run:
|
if not self.config.dry_run:
|
||||||
os.makedirs(directory, exist_ok=True)
|
os.makedirs(directory, exist_ok=True)
|
||||||
else:
|
else:
|
||||||
self.log.info("Creating dir: " + directory)
|
self.logger.info("Creating dir: " + directory)
|
||||||
|
|
||||||
def _write_doc(self):
|
def _write_doc(self):
|
||||||
files_to_overwite = []
|
files_to_overwite = []
|
||||||
@ -64,17 +67,19 @@ class Generator:
|
|||||||
|
|
||||||
if len(files_to_overwite) > 0 and self.config.config.get("force_overwrite") is False:
|
if len(files_to_overwite) > 0 and self.config.config.get("force_overwrite") is False:
|
||||||
if not self.config.dry_run:
|
if not self.config.dry_run:
|
||||||
self.log.warn("This files will be overwritten:")
|
self.logger.warn("This files will be overwritten:")
|
||||||
print(*files_to_overwite, sep="\n")
|
print(*files_to_overwite, sep="\n")
|
||||||
resulst = FileUtils.query_yes_no("Do you want to continue?")
|
|
||||||
if resulst != "yes":
|
try:
|
||||||
sys.exit()
|
FileUtils.query_yes_no("Do you want to continue?")
|
||||||
|
except ansibledoctor.Exception.InputError:
|
||||||
|
self.log.sysexit_with_message("Aborted...")
|
||||||
|
|
||||||
for file in self.template_files:
|
for file in self.template_files:
|
||||||
doc_file = self.config.config.get("output_dir") + "/" + file[:-len(self.extension) - 1]
|
doc_file = os.path.join(self.config.config.get("output_dir"), os.path.splitext(file)[0])
|
||||||
source_file = self.config.get_template() + "/" + file
|
source_file = self.config.get_template() + "/" + file
|
||||||
|
|
||||||
self.log.debug("Writing doc output to: " + doc_file + " from: " + source_file)
|
self.logger.debug("Writing doc output to: " + doc_file + " from: " + source_file)
|
||||||
|
|
||||||
# make sure the directory exists
|
# make sure the directory exists
|
||||||
self._create_dir(os.path.dirname(os.path.realpath(doc_file)))
|
self._create_dir(os.path.dirname(os.path.realpath(doc_file)))
|
||||||
@ -87,19 +92,20 @@ class Generator:
|
|||||||
# print(json.dumps(self._parser.get_data(), indent=4, sort_keys=True))
|
# print(json.dumps(self._parser.get_data(), indent=4, sort_keys=True))
|
||||||
jenv = Environment(loader=FileSystemLoader(self.config.get_template()), lstrip_blocks=True, trim_blocks=True)
|
jenv = Environment(loader=FileSystemLoader(self.config.get_template()), lstrip_blocks=True, trim_blocks=True)
|
||||||
jenv.filters["to_nice_yaml"] = self._to_nice_yaml
|
jenv.filters["to_nice_yaml"] = self._to_nice_yaml
|
||||||
|
jenv.filters["deep_get"] = self._deep_get
|
||||||
data = jenv.from_string(data).render(self._parser.get_data(), role=self._parser.get_data())
|
data = jenv.from_string(data).render(self._parser.get_data(), role=self._parser.get_data())
|
||||||
if not self.config.dry_run:
|
if not self.config.dry_run:
|
||||||
with open(doc_file, "wb") as outfile:
|
with open(doc_file, "wb") as outfile:
|
||||||
outfile.write(data.encode("utf-8"))
|
outfile.write(data.encode("utf-8"))
|
||||||
self.log.info("Writing to: " + doc_file)
|
self.logger.info("Writing to: " + doc_file)
|
||||||
else:
|
else:
|
||||||
self.log.info("Writing to: " + doc_file)
|
self.logger.info("Writing to: " + doc_file)
|
||||||
except jinja2.exceptions.UndefinedError as e:
|
except (jinja2.exceptions.UndefinedError, jinja2.exceptions.TemplateSyntaxError)as e:
|
||||||
self.log.error("Jinja2 templating error: <" + str(e) + "> when loading file: '" + file + "', run in debug mode to see full except")
|
self.log.sysexit_with_message(
|
||||||
sys.exit(1)
|
"Jinja2 templating error while loading file: '{}'\n{}".format(file, str(e)))
|
||||||
except UnicodeEncodeError as e:
|
except UnicodeEncodeError as e:
|
||||||
self.log.error("Unable to print special chars: <" + str(e) + ">, run in debug mode to see full except")
|
self.log.sysexit_with_message(
|
||||||
sys.exit(1)
|
"Unable to print special characters\n{}".format(str(e)))
|
||||||
|
|
||||||
def _to_nice_yaml(self, a, indent=4, *args, **kw):
|
def _to_nice_yaml(self, a, indent=4, *args, **kw):
|
||||||
"""Make verbose, human readable yaml."""
|
"""Make verbose, human readable yaml."""
|
||||||
@ -109,6 +115,10 @@ class Generator:
|
|||||||
yaml.dump(a, stream, **kw)
|
yaml.dump(a, stream, **kw)
|
||||||
return stream.getvalue().rstrip()
|
return stream.getvalue().rstrip()
|
||||||
|
|
||||||
|
def _deep_get(self, _, dictionary, keys, *args, **kw):
|
||||||
|
default = None
|
||||||
|
return reduce(lambda d, key: d.get(key, default) if isinstance(d, dict) else default, keys.split("."), dictionary)
|
||||||
|
|
||||||
def render(self):
|
def render(self):
|
||||||
self.log.info("Using output dir: " + self.config.config.get("output_dir"))
|
self.logger.info("Using output dir: " + self.config.config.get("output_dir"))
|
||||||
self._write_doc()
|
self._write_doc()
|
||||||
|
@ -46,10 +46,13 @@ class Parser:
|
|||||||
if any("meta/main." + ext in rfile for ext in extensions):
|
if any("meta/main." + ext in rfile for ext in extensions):
|
||||||
with open(rfile, "r", encoding="utf8") as yaml_file:
|
with open(rfile, "r", encoding="utf8") as yaml_file:
|
||||||
try:
|
try:
|
||||||
data = defaultdict(dict, yaml.load(yaml_file, Loader=yaml.SafeLoader))
|
data = defaultdict(dict, yaml.safe_load(yaml_file))
|
||||||
if data.get("galaxy_info"):
|
if data.get("galaxy_info"):
|
||||||
for key, value in data.get("galaxy_info").items():
|
for key, value in data.get("galaxy_info").items():
|
||||||
self._data["meta"][key] = {"value": value}
|
self._data["meta"][key] = {"value": value}
|
||||||
|
|
||||||
|
if data.get("dependencies") is not None:
|
||||||
|
self._data["meta"]["dependencies"] = {"value": data.get("dependencies")}
|
||||||
except yaml.YAMLError as exc:
|
except yaml.YAMLError as exc:
|
||||||
print(exc)
|
print(exc)
|
||||||
|
|
||||||
@ -60,7 +63,6 @@ class Parser:
|
|||||||
self.log.info("Finding annotations for: @" + annotaion)
|
self.log.info("Finding annotations for: @" + annotaion)
|
||||||
self._annotation_objs[annotaion] = Annotation(name=annotaion, files_registry=self._files_registry)
|
self._annotation_objs[annotaion] = Annotation(name=annotaion, files_registry=self._files_registry)
|
||||||
tags[annotaion] = self._annotation_objs[annotaion].get_details()
|
tags[annotaion] = self._annotation_objs[annotaion].get_details()
|
||||||
# print(json.dumps(tags, indent=4, sort_keys=True))
|
|
||||||
anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS)
|
anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS)
|
||||||
|
|
||||||
def get_data(self):
|
def get_data(self):
|
||||||
|
22
ansibledoctor/Exception.py
Normal file
22
ansibledoctor/Exception.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Custom exception definition."""
|
||||||
|
|
||||||
|
|
||||||
|
class DoctorError(Exception):
|
||||||
|
"""Generic exception class for ansible-doctor."""
|
||||||
|
|
||||||
|
def __init__(self, msg, original_exception=""):
|
||||||
|
super(DoctorError, self).__init__(msg + ("\n%s" % original_exception))
|
||||||
|
self.original_exception = original_exception
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigError(DoctorError):
|
||||||
|
"""Errors related to config file handling."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InputError(DoctorError):
|
||||||
|
"""Errors related to config file handling."""
|
||||||
|
|
||||||
|
pass
|
@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
import pathspec
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from ansibledoctor.Config import SingleConfig
|
from ansibledoctor.Config import SingleConfig
|
||||||
@ -31,32 +32,18 @@ class Registry:
|
|||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
extensions = YAML_EXTENSIONS
|
extensions = YAML_EXTENSIONS
|
||||||
base_dir = os.getcwd()
|
base_dir = self.config.base_dir
|
||||||
|
role_name = os.path.basename(base_dir)
|
||||||
|
excludes = self.config.config.get("exclude_files")
|
||||||
|
excludespec = pathspec.PathSpec.from_lines("gitwildmatch", excludes)
|
||||||
|
|
||||||
self.log.debug("Scan for files: " + base_dir)
|
self.log.debug("Scan for files: " + base_dir)
|
||||||
|
|
||||||
for extension in extensions:
|
for extension in extensions:
|
||||||
for filename in glob.iglob(base_dir + "/**/*." + extension, recursive=True):
|
pattern = os.path.join(base_dir, "**/*." + extension)
|
||||||
if self._is_excluded_yaml_file(filename, base_dir):
|
for filename in glob.iglob(pattern, recursive=True):
|
||||||
self.log.debug("Excluding: " + filename)
|
if not excludespec.match_file(filename):
|
||||||
else:
|
self.log.debug("Adding file to '{}': {}".format(role_name, os.path.relpath(filename, base_dir)))
|
||||||
self.log.debug("Adding to role:" + base_dir + " => " + filename)
|
|
||||||
self._doc.append(filename)
|
self._doc.append(filename)
|
||||||
|
else:
|
||||||
# TODO: not working...
|
self.log.debug("Excluding file: {}".format(os.path.relpath(filename, base_dir)))
|
||||||
def _is_excluded_yaml_file(self, file, base_dir):
|
|
||||||
"""
|
|
||||||
Sub method for handling file exclusions based on the full path starts with.
|
|
||||||
|
|
||||||
:param file:
|
|
||||||
:param role_base_dir:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
excluded = self.config.config.get("exclude_files")
|
|
||||||
|
|
||||||
is_filtered = False
|
|
||||||
for excluded_dir in excluded:
|
|
||||||
if file.startswith(base_dir + "/" + excluded_dir):
|
|
||||||
is_filtered = True
|
|
||||||
|
|
||||||
return is_filtered
|
|
||||||
|
@ -8,6 +8,7 @@ from distutils.util import strtobool
|
|||||||
import colorama
|
import colorama
|
||||||
import yaml
|
import yaml
|
||||||
from pythonjsonlogger import jsonlogger
|
from pythonjsonlogger import jsonlogger
|
||||||
|
import ansibledoctor.Exception
|
||||||
|
|
||||||
CONSOLE_FORMAT = "{}[%(levelname)s]{} %(message)s"
|
CONSOLE_FORMAT = "{}[%(levelname)s]{} %(message)s"
|
||||||
JSON_FORMAT = "(asctime) (levelname) (message)"
|
JSON_FORMAT = "(asctime) (levelname) (message)"
|
||||||
@ -75,6 +76,7 @@ class Log:
|
|||||||
self.logger.addHandler(self._get_warn_handler(json=json))
|
self.logger.addHandler(self._get_warn_handler(json=json))
|
||||||
self.logger.addHandler(self._get_info_handler(json=json))
|
self.logger.addHandler(self._get_info_handler(json=json))
|
||||||
self.logger.addHandler(self._get_critical_handler(json=json))
|
self.logger.addHandler(self._get_critical_handler(json=json))
|
||||||
|
self.logger.addHandler(self._get_debug_handler(json=json))
|
||||||
self.logger.propagate = False
|
self.logger.propagate = False
|
||||||
|
|
||||||
def _get_error_handler(self, json=False):
|
def _get_error_handler(self, json=False):
|
||||||
@ -106,7 +108,7 @@ class Log:
|
|||||||
handler.setLevel(logging.INFO)
|
handler.setLevel(logging.INFO)
|
||||||
handler.addFilter(LogFilter(logging.INFO))
|
handler.addFilter(LogFilter(logging.INFO))
|
||||||
handler.setFormatter(MultilineFormatter(
|
handler.setFormatter(MultilineFormatter(
|
||||||
self.info(CONSOLE_FORMAT.format(colorama.Fore.BLUE, colorama.Style.RESET_ALL))))
|
self.info(CONSOLE_FORMAT.format(colorama.Fore.CYAN, colorama.Style.RESET_ALL))))
|
||||||
|
|
||||||
if json:
|
if json:
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
@ -125,6 +127,18 @@ class Log:
|
|||||||
|
|
||||||
return handler
|
return handler
|
||||||
|
|
||||||
|
def _get_debug_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
|
handler.setLevel(logging.DEBUG)
|
||||||
|
handler.addFilter(LogFilter(logging.DEBUG))
|
||||||
|
handler.setFormatter(MultilineFormatter(
|
||||||
|
self.critical(CONSOLE_FORMAT.format(colorama.Fore.BLUE, colorama.Style.RESET_ALL))))
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
def set_level(self, s):
|
def set_level(self, s):
|
||||||
self.logger.setLevel(s)
|
self.logger.setLevel(s)
|
||||||
|
|
||||||
@ -159,6 +173,13 @@ class Log:
|
|||||||
"""
|
"""
|
||||||
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
|
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
|
||||||
|
|
||||||
|
def sysexit(self, code=1):
|
||||||
|
sys.exit(code)
|
||||||
|
|
||||||
|
def sysexit_with_message(self, msg, code=1):
|
||||||
|
self.logger.critical(str(msg))
|
||||||
|
self.sysexit(code)
|
||||||
|
|
||||||
|
|
||||||
class SingleLog(Log, metaclass=Singleton):
|
class SingleLog(Log, metaclass=Singleton):
|
||||||
pass
|
pass
|
||||||
@ -169,9 +190,8 @@ class FileUtils:
|
|||||||
def create_path(path):
|
def create_path(path):
|
||||||
os.makedirs(path, exist_ok=True)
|
os.makedirs(path, exist_ok=True)
|
||||||
|
|
||||||
# http://code.activestate.com/recipes/577058/
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def query_yes_no(question, default="yes"):
|
def query_yes_no(question, default=True):
|
||||||
"""Ask a yes/no question via input() and return their answer.
|
"""Ask a yes/no question via input() and return their answer.
|
||||||
|
|
||||||
"question" is a string that is presented to the user.
|
"question" is a string that is presented to the user.
|
||||||
@ -181,25 +201,16 @@ class FileUtils:
|
|||||||
|
|
||||||
The "answer" return value is one of "yes" or "no".
|
The "answer" return value is one of "yes" or "no".
|
||||||
"""
|
"""
|
||||||
valid = {"yes": "yes", "y": "yes", "ye": "yes",
|
if default:
|
||||||
"no": "no", "n": "no"}
|
|
||||||
if default is None:
|
|
||||||
prompt = " [y/n] "
|
|
||||||
elif default == "yes":
|
|
||||||
prompt = "[Y/n]"
|
prompt = "[Y/n]"
|
||||||
elif default == "no":
|
|
||||||
prompt = " [y/N] "
|
|
||||||
else:
|
else:
|
||||||
raise ValueError("Invalid default answer: '%s'" % default)
|
prompt = "[N/y]"
|
||||||
|
|
||||||
while 1:
|
try:
|
||||||
choice = input(question + prompt).lower()
|
choice = input("{} {} ".format(question, prompt)) or default
|
||||||
if default is not None and choice == "":
|
to_bool(choice)
|
||||||
return default
|
except (KeyboardInterrupt, ValueError) as e:
|
||||||
elif choice in valid.keys():
|
raise ansibledoctor.Exception.InputError("Error while reading input", e)
|
||||||
return valid[choice]
|
|
||||||
else:
|
|
||||||
sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n")
|
|
||||||
|
|
||||||
|
|
||||||
def to_bool(string):
|
def to_bool(string):
|
||||||
|
@ -1,9 +1,12 @@
|
|||||||
{% set meta = role.meta | default({}) %}
|
{% set meta = role.meta | default({}) %}
|
||||||
# {{ (meta.name | default({"value": "_undefined_"})).value }}
|
# {{ name | deep_get(meta, "name.value") | default("_undefined_") }}
|
||||||
|
{% if description | deep_get(meta, "description.value") %}
|
||||||
|
|
||||||
{% if meta.description is defined %}
|
{{ description | deep_get(meta, "description.value") }}
|
||||||
{{ meta.description.value }}
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{# Vars #}
|
{# Vars #}
|
||||||
{% include '_vars.j2' %}
|
{% include '_vars.j2' %}
|
||||||
|
|
||||||
|
{# Meta #}
|
||||||
|
{% include '_meta.j2' %}
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
#============================================================================================================
|
|
||||||
# This is a dump of the documentation variable : tags
|
|
||||||
#============================================================================================================
|
|
||||||
{{ tag | pprint }}
|
|
25
ansibledoctor/templates/readme/_meta.j2
Normal file
25
ansibledoctor/templates/readme/_meta.j2
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
{% set meta = role.meta | default({}) %}
|
||||||
|
{% if meta %}
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
{% if meta | deep_get(meta, "dependencies.value") %}
|
||||||
|
{% for item in meta.dependencies.value %}
|
||||||
|
* {{ item }}
|
||||||
|
{% endfor %}
|
||||||
|
{% else %}
|
||||||
|
None.
|
||||||
|
{% endif %}
|
||||||
|
{% if license | deep_get(meta, "license.value") %}
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
{{ meta.license.value }}
|
||||||
|
{% endif %}
|
||||||
|
{% if author | deep_get(meta, "author.value") %}
|
||||||
|
|
||||||
|
## Author
|
||||||
|
|
||||||
|
{{ meta.author.value }}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
@ -11,13 +11,14 @@
|
|||||||
{{ desc_line }}
|
{{ desc_line }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% if item.value is defined and item.value %}
|
||||||
|
|
||||||
#### Default value
|
#### Default value
|
||||||
|
|
||||||
```YAML
|
```YAML
|
||||||
{{ item.value | to_nice_yaml(indent=2) }}
|
{{ item.value | to_nice_yaml(indent=2) }}
|
||||||
```
|
```
|
||||||
|
{% endif %}
|
||||||
{% if item.example is defined and item.example %}
|
{% if item.example is defined and item.example %}
|
||||||
|
|
||||||
#### Example usage
|
#### Example usage
|
||||||
|
Loading…
Reference in New Issue
Block a user