refactor logging anf configuration handling

This commit is contained in:
Robert Kaussow 2019-10-07 14:44:45 +02:00
parent f265180bdf
commit 69a7078251
16 changed files with 355 additions and 495 deletions

View File

@ -19,6 +19,11 @@ class AnnotationItem:
def __init__(self):
self.data = defaultdict(dict)
def __str__(self):
for key in self.data.keys():
for sub in self.data.get(key):
return "AnnotationItem({}: {})".format(key, sub)
def get_obj(self):
return self.data
@ -28,7 +33,7 @@ class Annotation:
self._all_items = defaultdict(dict)
self._file_handler = None
self.config = SingleConfig()
self.log = SingleLog()
self.log = SingleLog().logger
self._files_registry = files_registry
self._all_annotations = self.config.get_annotations_definition()
@ -56,6 +61,7 @@ class Annotation:
item = self._get_annotation_data(
line, self._annotation_definition["name"])
if item:
self.log.info(str(item))
self._populate_item(item.get_obj().items())
self._file_handler.close()

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python3
import argparse
import logging
import os
import sys
@ -14,10 +15,10 @@ from ansibledoctor.Utils import SingleLog
class AnsibleDoctor:
def __init__(self):
self.config = SingleConfig()
self.log = SingleLog(self.config.debug_level)
args = self._cli_args()
self._parse_args(args)
self.log = SingleLog()
self.logger = self.log.logger
self.args = self._cli_args()
self.config = self._get_config()
doc_parser = Parser()
doc_generator = Generator(doc_parser)
@ -30,101 +31,31 @@ class AnsibleDoctor:
:return: args objec
"""
parser = argparse.ArgumentParser(
description="Generate documentation from annotated playbooks and roles using templates")
parser.add_argument("project_dir", nargs="?", default=os.getcwd(),
help="role directory, (default: current working dir)")
parser.add_argument("-c", "--conf", nargs="?", default="",
help="location of configuration file")
parser.add_argument("-o", "--output", action="store", dest="output", type=str,
description="Generate documentation from annotated Ansible roles using templates")
parser.add_argument("base_dir", nargs="?", help="role directory, (default: current working dir)")
parser.add_argument("-c", "--config", nargs="?", help="location of configuration file")
parser.add_argument("-o", "--output", action="store", dest="output_dir", type=str,
help="output base dir")
parser.add_argument("-f", "--force", action="store_true", help="force overwrite output file")
parser.add_argument("-f", "--force", action="store_true", dest="force_overwrite",
help="force overwrite output file")
parser.add_argument("-d", "--dry-run", action="store_true", help="dry run without writing")
parser.add_argument("-D", "--default", action="store_true", help="print the default configuration")
parser.add_argument("-p", "--print", nargs="?", default="_unset_",
help="use print template instead of writing to files")
# parser.add_argument("-p", "--print", action="store_true",
# help="print to stdout instead of file")
parser.add_argument("-v", dest="logging.level", action="append_const", const=-1,
help="increase log level")
parser.add_argument("-q", dest="logging.level", action="append_const",
const=1, help="decrease log level")
parser.add_argument("--version", action="version", version="%(prog)s {}".format(__version__))
debug_level = parser.add_mutually_exclusive_group()
debug_level.add_argument("-v", action="store_true", help="Set debug level to info")
debug_level.add_argument("-vv", action="store_true", help="Set debug level to debug")
debug_level.add_argument("-vvv", action="store_true", help="Set debug level to trace")
return parser.parse_args().__dict__
return parser.parse_args()
def _parse_args(self, args):
"""
Use an args object to apply all the configuration combinations to the config object.
:param args:
:return: None
"""
self.config.set_base_dir(os.path.abspath(args.project_dir))
# search for config file
if args.conf != "":
conf_file = os.path.abspath(args.conf)
if os.path.isfile(conf_file) and os.path.basename(conf_file) == self.config.config_file_name:
self.config.load_config_file(conf_file)
# re apply log level based on config
self.log.set_level(self.config.debug_level)
else:
self.log.warn("No configuration file found: " + conf_file)
def _get_config(self):
config = SingleConfig(args=self.args)
if config.is_role:
self.logger.info("Ansible role detected")
else:
conf_file = self.config.get_base_dir() + "/" + self.config.config_file_name
if os.path.isfile(conf_file):
self.config.load_config_file(conf_file)
# re apply log level based on config
self.log.set_level(self.config.debug_level)
# sample configuration
if args.default:
print(self.config.sample_config)
sys.exit()
# Debug levels
if args.v is True:
self.log.set_level("info")
elif args.vv is True:
self.log.set_level("debug")
elif args.vvv is True:
self.log.set_level("trace")
# need to send the message after the log levels have been set
self.log.debug("using configuration file: " + conf_file)
# Overwrite
if args.force is True:
self.config.template_overwrite = True
# Dry run
if args.dry_run is True:
self.config.dry_run = True
if self.log.log_level > 1:
self.log.set_level(1)
self.log.info("Running in Dry mode: Therefore setting log level at least to INFO")
# Print template
if args.print == "_unset_":
pass
elif args.print is None:
self.config.use_print_template = "all"
else:
self.config.use_print_template = args.print
# output dir
if args.output is not None:
self.config.output_dir = os.path.abspath(args.output)
# some debug
self.log.debug(args)
self.log.info("Using base dir: " + self.config.get_base_dir())
if self.config.is_role:
self.log.info("This is detected as: ROLE ")
elif self.config.is_role is not None and not self.config.is_role:
self.log.info("This is detected as: PLAYBOOK ")
else:
self.log.error([
self.config.get_base_dir() + "/tasks"
], "No ansible role detected, checked for: ")
self.log.error("No Ansible role detected")
sys.exit(1)
# TODO: user wrapper method to catch config exceptions
return config

View File

@ -1,209 +1,186 @@
#!/usr/bin/env python3
import os
"""Global settings object definition."""
import os
import sys
import anyconfig
import yaml
from appdirs import AppDirs
from jsonschema._utils import format_as_index
from pkg_resources import resource_filename
from ansibledoctor.Utils import Singleton
config_dir = AppDirs("ansible-doctor").user_config_dir
default_config_file = os.path.join(config_dir, "config.yml")
class Config:
sample_config = """---
# filename: doctor.conf.yaml
# base directoy to scan, relative dir to configuration file
# base_dir: "./"
class Config():
"""
Create an object with all necessary settings.
# documentation output directory, relative dir to configuration file.
output_dir: "./doc"
Settings are loade from multiple locations in defined order (last wins):
- default settings defined by `self._get_defaults()`
- yaml config file, defaults to OS specific user config dir (https://pypi.org/project/appdirs/)
- provides cli parameters
"""
# directory containing templates, relative dir to configuration file,
# comment to use default build in ones
# template_dir: "./template"
def __init__(self, args={}, config_file=None):
"""
Initialize a new settings class.
# template directory name within template_dir
# build in "doc_and_readme" and "readme"
template: "readme"
:param args: An optional dict of options, arguments and commands from the CLI.
:param config_file: An optional path to a yaml config file.
:returns: None
# Overwrite documentation pages if already exist
# this is equal to -y
# template_overwrite : False
"""
self.config_file = None
self.schema = None
self.dry_run = False
self.args = self._set_args(args)
self.config = self._get_config()
self.is_role = self._set_is_role() or False
self._annotations = self._set_annotations()
# set the debug level: trace | debug | info | warn
# see -v | -vv | -vvv
# debug_level: "warn"
def _set_args(self, args):
defaults = self._get_defaults()
self.config_file = args.get("config_file") or default_config_file
# when searching for yaml files in roles projects,
# excluded this paths (dir and files) from analysis
# default values
excluded_roles_dirs: []
args.pop("config_file", None)
tmp_args = dict(filter(lambda item: item[1] is not None, args.items()))
"""
# path to the documentation output dir
output_dir = ""
tmp_dict = {}
for key, value in tmp_args.items():
tmp_dict = self._add_dict_branch(tmp_dict, key.split("."), value)
# project base directory
_base_dir = ""
# Override correct log level from argparse
levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
log_level = levels.index(defaults["logging"]["level"])
if tmp_dict.get("logging"):
for adjustment in tmp_dict["logging"]["level"]:
log_level = min(len(levels) - 1, max(log_level + adjustment, 0))
tmp_dict["logging"]["level"] = levels[log_level]
# current directory of this object,
# used to get the default template directory
script_base_dir = ""
return tmp_dict
# path to the directory that contains the templates
template_dir = ""
# default template name
default_template = "readme"
# template to use
template = ""
# flag to ask if files can be overwritten
template_overwrite = False
# flag to use the cli print template
use_print_template = False
def _get_defaults(self):
default_output = os.getcwd()
default_template = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates")
defaults = {
"logging": {
"level": "WARNING",
"json": False
},
"output_dir": default_output,
"template_dir": default_template,
"template": "readme",
"force_overwrite": False,
"exclude_files": [],
}
# don"t modify any file
dry_run = False
self.schema = anyconfig.gen_schema(defaults)
return defaults
# default debug level
debug_level = "warn"
def _get_config(self):
defaults = self._get_defaults()
source_files = []
source_files.append(self.config_file)
# TODO: support multipel filename formats e.g. .yaml or .ansibledoctor
source_files.append(os.path.relpath(
os.path.normpath(os.path.join(os.getcwd(), ".ansibledoctor.yml"))))
cli_options = self.args
# internal flag
is_role = None
# internal when is_rote is True
project_name = ""
for config in source_files:
if config and os.path.exists(config):
with open(config, "r", encoding="utf8") as stream:
s = stream.read()
# TODO: catch malformed files
sdict = yaml.safe_load(s)
if self._validate(sdict):
anyconfig.merge(defaults, sdict, ac_merge=anyconfig.MS_DICTS)
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
# name of the config file to search for
config_file_name = "doctor.conf.yaml"
# if config file is not in root of project, this is used to make output relative to config file
_config_file_dir = ""
if cli_options and self._validate(cli_options):
anyconfig.merge(defaults, cli_options, ac_merge=anyconfig.MS_DICTS)
excluded_roles_dirs = []
# annotation search patterns
# for any pattern like " # @annotation: [annotation_key] # description "
# name = annotation ( without "@" )
# allow_multiple = True allow to repeat the same annotation, i.e. @todo
# automatic = True this action will be parsed based on the annotation in name without calling the parse method
annotations = {
"meta": {
"name": "meta",
"automatic": True
},
"todo": {
"name": "todo",
"automatic": True,
},
"var": {
"name": "var",
"automatic": True,
},
"example": {
"name": "example",
"regex": r"(\#\ *\@example\ *\: *.*)"
},
"tag": {
"name": "tag",
"automatic": True,
},
}
def __init__(self):
self.script_base_dir = os.path.dirname(os.path.realpath(__file__))
def set_base_dir(self, directory):
self._base_dir = directory
self._set_is_role()
def get_base_dir(self):
return self._base_dir
def get_annotations_definition(self, automatic=True):
annotations = {}
if automatic:
for k, item in self.annotations.items():
if "automatic" in item.keys() and item["automatic"]:
annotations[k] = item
return annotations
def get_annotations_names(self, automatic=True):
annotations = []
if automatic:
for k, item in self.annotations.items():
if "automatic" in item.keys() and item["automatic"]:
annotations.append(k)
return defaults
def _set_annotations(self):
annotations = {
"meta": {
"name": "meta",
"automatic": True
},
"todo": {
"name": "todo",
"automatic": True,
},
"var": {
"name": "var",
"automatic": True,
},
"example": {
"name": "example",
"regex": r"(\#\ *\@example\ *\: *.*)"
},
"tag": {
"name": "tag",
"automatic": True,
},
}
return annotations
def _set_is_role(self):
if os.path.isdir(self._base_dir + "/tasks"):
self.is_role = True
else:
self.is_role = None
if os.path.isdir(os.path.join(os.getcwd(), "tasks")):
return True
def get_output_dir(self):
"""
Get the relative path to cwd of the output directory for the documentation.
def _validate(self, config):
try:
anyconfig.validate(config, self.schema, ac_schema_safe=False)
return True
except Exception as e:
schema_error = "Failed validating '{validator}' in schema{schema}".format(
validator=e.validator,
schema=format_as_index(list(e.relative_schema_path)[:-1])
)
:return: str path
"""
if self.use_print_template:
return ""
if self.output_dir == "":
return os.path.realpath(self._base_dir)
elif os.path.isabs(self.output_dir):
return os.path.realpath(self.output_dir)
elif not os.path.isabs(self.output_dir):
return os.path.realpath(self._config_file_dir + "/" + self.output_dir)
# TODO: raise exception
print("{schema}: {msg}".format(schema=schema_error, msg=e.message))
sys.exit(999)
def get_template_base_dir(self):
def _add_dict_branch(self, tree, vector, value):
key = vector[0]
tree[key] = value \
if len(vector) == 1 \
else self._add_dict_branch(tree[key] if key in tree else {},
vector[1:], value)
return tree
def get_annotations_definition(self, automatic=True):
annotations = {}
if automatic:
for k, item in self._annotations.items():
if "automatic" in item.keys() and item["automatic"]:
annotations[k] = item
return annotations
def get_annotations_names(self, automatic=True):
annotations = []
if automatic:
for k, item in self._annotations.items():
if "automatic" in item.keys() and item["automatic"]:
annotations.append(k)
return annotations
def get_template(self):
"""
Get the base dir for the template to use.
:return: str abs path
"""
if self.use_print_template:
return os.path.realpath(self.script_base_dir + "/templates/cliprint")
if self.template == "":
template = self.default_template
else:
template = self.template
if self.template_dir == "":
return os.path.realpath(self.script_base_dir + "/templates/" + template)
elif os.path.isabs(self.template_dir):
return os.path.realpath(self.template_dir + "/" + template)
elif not os.path.isabs(self.template_dir):
return os.path.realpath(self._config_file_dir + "/" + self.template_dir + "/" + template)
def load_config_file(self, file):
allow_to_overwrite = [
"base_dir",
"output_dir",
"template_dir",
"template",
"template_overwrite",
"debug_level",
"excluded_roles_dirs",
]
with open(file, "r") as yaml_file:
try:
self._config_file_dir = os.path.dirname(os.path.realpath(file))
data = yaml.safe_load(yaml_file)
if data:
for item_to_configure in allow_to_overwrite:
if item_to_configure in data.keys():
self.__setattr__(item_to_configure, data[item_to_configure])
except yaml.YAMLError as exc:
print(exc)
template_dir = self.config.get("template_dir")
template = self.config.get("template")
return os.path.realpath(os.path.join(template_dir, template))
class SingleConfig(Config, metaclass=Singleton):

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
DOCTOR_CONF_FILE = "doctor.conf.yaml"
YAML_EXTENSIONS = ["yaml","yml"]
YAML_EXTENSIONS = ["yaml", "yml"]

View File

@ -26,8 +26,8 @@ class Generator:
self.extension = "j2"
self._parser = None
self.config = SingleConfig()
self.log = SingleLog()
self.log.info("Using template dir: " + self.config.get_template_base_dir())
self.log = SingleLog().logger
self.log.info("Using template dir: " + self.config.get_template())
self._parser = doc_parser
self._scan_template()
@ -37,43 +37,44 @@ class Generator:
:return: None
"""
base_dir = self.config.get_template_base_dir()
base_dir = self.config.get_template()
for file in glob.iglob(base_dir + "/**/*." + self.extension, recursive=True):
relative_file = file[len(base_dir) + 1:]
if ntpath.basename(file)[:1] != "_":
self.log.trace("[GENERATOR] found template file: " + relative_file)
self.log.debug("Found template file: " + relative_file)
self.template_files.append(relative_file)
else:
self.log.debug("[GENERATOR] ignoring template file: " + relative_file)
self.log.debug("Ignoring template file: " + relative_file)
def _create_dir(self, directory):
if not self.config.dry_run:
os.makedirs(directory, exist_ok=True)
else:
self.log.info("[GENERATOR][DRY] Creating dir: " + dir)
self.log.info("Creating dir: " + directory)
def _write_doc(self):
files_to_overwite = []
for file in self.template_files:
doc_file = self.config.get_output_dir() + "/" + file[:-len(self.extension) - 1]
doc_file = os.path.join(self.config.config.get("output_dir"), os.path.splitext(file)[0])
if os.path.isfile(doc_file):
files_to_overwite.append(doc_file)
if len(files_to_overwite) > 0 and self.config.template_overwrite is False:
SingleLog.print("This files will be overwritten:", files_to_overwite)
if len(files_to_overwite) > 0 and self.config.config.get("force_overwrite") is False:
if not self.config.dry_run:
self.log.warn("This files will be overwritten:")
print(*files_to_overwite, sep="\n")
resulst = FileUtils.query_yes_no("Do you want to continue?")
if resulst != "yes":
sys.exit()
for file in self.template_files:
doc_file = self.config.get_output_dir() + "/" + file[:-len(self.extension) - 1]
source_file = self.config.get_template_base_dir() + "/" + file
doc_file = self.config.config.get("output_dir") + "/" + file[:-len(self.extension) - 1]
source_file = self.config.get_template() + "/" + file
self.log.trace("[GENERATOR] Writing doc output to: " + doc_file + " from: " + source_file)
self.log.debug("Writing doc output to: " + doc_file + " from: " + source_file)
# make sure the directory exists
self._create_dir(os.path.dirname(os.path.realpath(doc_file)))
@ -83,25 +84,22 @@ class Generator:
data = template.read()
if data is not None:
try:
print(json.dumps(self._parser.get_data(), indent=4, sort_keys=True))
jenv = Environment(loader=FileSystemLoader(self.config.get_template_base_dir()), lstrip_blocks=True, trim_blocks=True, autoescape=True)
# print(json.dumps(self._parser.get_data(), indent=4, sort_keys=True))
jenv = Environment(loader=FileSystemLoader(self.config.get_template()), lstrip_blocks=True, trim_blocks=True)
jenv.filters["to_nice_yaml"] = self._to_nice_yaml
data = jenv.from_string(data).render(self._parser.get_data(), role=self._parser.get_data())
if not self.config.dry_run:
with open(doc_file, "w") as outfile:
outfile.write(data)
with open(doc_file, "wb") as outfile:
outfile.write(data.encode("utf-8"))
self.log.info("Writing to: " + doc_file)
else:
self.log.info("[GENERATOR][DRY] Writing to: " + doc_file)
self.log.info("Writing to: " + doc_file)
except jinja2.exceptions.UndefinedError as e:
self.log.error("Jinja2 templating error: <" + str(e) + "> when loading file: '" + file + "', run in debug mode to see full except")
if self.log.log_level < 1:
raise
sys.exit(1)
except UnicodeEncodeError as e:
self.log.error("At the moment I'm unable to print special chars: <" + str(e) + ">, run in debug mode to see full except")
if self.log.log_level < 1:
raise
sys.exit()
self.log.error("Unable to print special chars: <" + str(e) + ">, run in debug mode to see full except")
sys.exit(1)
def _to_nice_yaml(self, a, indent=4, *args, **kw):
"""Make verbose, human readable yaml."""
@ -111,31 +109,6 @@ class Generator:
yaml.dump(a, stream, **kw)
return stream.getvalue().rstrip()
def print_to_cli(self):
for file in self.template_files:
source_file = self.config.get_template_base_dir() + "/" + file
with open(source_file, "r") as template:
data = template.read()
if data is not None:
try:
data = Environment(loader=FileSystemLoader(self.config.get_template_base_dir()), lstrip_blocks=True, trim_blocks=True, autoescape=True).from_string(data).render(self._parser.get_data(), r=self._parser)
print(data)
except jinja2.exceptions.UndefinedError as e:
self.log.error("Jinja2 templating error: <" + str(e) + "> when loading file: '" + file + "', run in debug mode to see full except")
if self.log.log_level < 1:
raise
except UnicodeEncodeError as e:
self.log.error("At the moment I'm unable to print special chars: <" + str(e) + ">, run in debug mode to see full except")
if self.log.log_level < 1:
raise
except Exception:
print("Unexpected error:", sys.exc_info()[0])
raise
def render(self):
if self.config.use_print_template:
self.print_to_cli()
else:
self.log.info("Using output dir: " + self.config.get_output_dir())
self._write_doc()
self.log.info("Using output dir: " + self.config.config.get("output_dir"))
self._write_doc()

View File

@ -20,7 +20,7 @@ class Parser:
self._annotation_objs = {}
self._data = defaultdict(dict)
self.config = SingleConfig()
self.log = SingleLog()
self.log = SingleLog().logger
self._files_registry = Registry()
self._parse_meta_file()
self._parse_vars_file()

View File

@ -17,7 +17,7 @@ class Registry:
def __init__(self):
self._doc = []
self.config = SingleConfig()
self.log = SingleLog()
self.log = SingleLog().logger
self._scan_for_yamls()
def get_files(self):
@ -31,19 +31,20 @@ class Registry:
:return: None
"""
extensions = YAML_EXTENSIONS
base_dir = self.config.get_base_dir()
base_dir = os.getcwd()
self.log.debug("Scan for files: " + base_dir)
for extension in extensions:
for filename in glob.iglob(base_dir + "/**/*." + extension, recursive=True):
if self._is_excluded_yaml_file(filename, base_dir):
self.log.trace("Excluding: " + filename)
self.log.debug("Excluding: " + filename)
else:
self.log.trace("Adding to role:" + base_dir + " => " + filename)
self.log.debug("Adding to role:" + base_dir + " => " + filename)
self._doc.append(filename)
def _is_excluded_yaml_file(self, file, role_base_dir=None):
# TODO: not working...
def _is_excluded_yaml_file(self, file, base_dir):
"""
Sub method for handling file exclusions based on the full path starts with.
@ -51,8 +52,7 @@ class Registry:
:param role_base_dir:
:return:
"""
base_dir = role_base_dir
excluded = self.config.excluded_roles_dirs.copy()
excluded = self.config.config.get("exclude_files")
is_filtered = False
for excluded_dir in excluded:

View File

@ -1,9 +1,27 @@
#!/usr/bin/python3
import logging
import os
import pprint
import sys
from distutils.util import strtobool
import colorama
import yaml
from pythonjsonlogger import jsonlogger
CONSOLE_FORMAT = "{}[%(levelname)s]{} %(message)s"
JSON_FORMAT = "(asctime) (levelname) (message)"
def _should_do_markup():
py_colors = os.environ.get("PY_COLORS", None)
if py_colors is not None:
return to_bool(py_colors, strict=False)
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
colorama.init(autoreset=True, strip=not _should_do_markup())
class Singleton(type):
@ -15,70 +33,131 @@ class Singleton(type):
return cls._instances[cls]
class Log:
levels = {
"trace": -1,
"debug": 0,
"info": 1,
"warn": 2,
"error": 3,
}
log_level = 1
class LogFilter(object):
"""A custom log filter which excludes log messages above the logged level."""
def __init__(self, level=1):
self.set_level(level)
def __init__(self, level):
"""
Initialize a new custom log filter.
:param level: Log level limit
:returns: None
"""
self.__level = level
def filter(self, logRecord): # noqa
# https://docs.python.org/3/library/logging.html#logrecord-attributes
return logRecord.levelno <= self.__level
class MultilineFormatter(logging.Formatter):
"""Logging Formatter to reset color after newline characters."""
def format(self, record): # noqa
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
return logging.Formatter.format(self, record)
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
"""Logging Formatter to remove newline characters."""
def format(self, record): # noqa
record.msg = record.msg.replace("\n", " ")
return jsonlogger.JsonFormatter.format(self, record)
class Log:
def __init__(self, level=logging.WARN, name="ansibledoctor", json=False):
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.logger.addHandler(self._get_error_handler(json=json))
self.logger.addHandler(self._get_warn_handler(json=json))
self.logger.addHandler(self._get_info_handler(json=json))
self.logger.addHandler(self._get_critical_handler(json=json))
self.logger.propagate = False
def _get_error_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.ERROR)
handler.addFilter(LogFilter(logging.ERROR))
handler.setFormatter(MultilineFormatter(
self.error(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_warn_handler(self, json=False):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.WARN)
handler.addFilter(LogFilter(logging.WARN))
handler.setFormatter(MultilineFormatter(
self.warn(CONSOLE_FORMAT.format(colorama.Fore.YELLOW, colorama.Style.RESET_ALL))))
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_info_handler(self, json=False):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
handler.addFilter(LogFilter(logging.INFO))
handler.setFormatter(MultilineFormatter(
self.info(CONSOLE_FORMAT.format(colorama.Fore.BLUE, colorama.Style.RESET_ALL))))
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_critical_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.CRITICAL)
handler.addFilter(LogFilter(logging.CRITICAL))
handler.setFormatter(MultilineFormatter(
self.critical(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def set_level(self, s):
self.logger.setLevel(s)
if isinstance(s, str):
for level, v in self.levels.items():
if level == s:
self.log_level = v
elif isinstance(s, int):
if s in range(4):
self.log_level = s
def debug(self, msg):
"""Format info messages and return string."""
return msg
def trace(self, msg, h=""):
if self.log_level <= -1:
self._p("*TRACE*: " + h, msg)
def critical(self, msg):
"""Format critical messages and return string."""
return msg
def debug(self, msg, h=""):
if self.log_level <= 0:
self._p("*DEBUG*: " + h, msg)
def error(self, msg):
"""Format error messages and return string."""
return msg
def info(self, msg, h=""):
if self.log_level <= 1:
self._p("*INFO*: " + h, msg)
def warn(self, msg):
"""Format warn messages and return string."""
return msg
def warn(self, msg, h=""):
if self.log_level <= 2:
self._p("*WARN*: " + h, msg)
def info(self, msg):
"""Format info messages and return string."""
return msg
def error(self, msg, h=""):
if self.log_level <= 3:
self._p("*ERROR*: " + h, msg)
def _color_text(self, color, msg):
"""
Colorize strings.
@staticmethod
def _p(head, msg, print_type=True):
:param color: colorama color settings
:param msg: string to colorize
:returns: string
if isinstance(msg, list):
t = " <list>" if print_type else ""
print(head + t)
i = 0
for line in msg:
print(" [" + str(i) + "]: " + str(line))
i += 1
elif isinstance(msg, dict):
t = " <dict>" if print_type else ""
print(head + t)
pprint.pprint(msg)
else:
print(head + str(msg))
@staticmethod
def print(msg, data):
Log._p(msg, data, False)
"""
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
class SingleLog(Log, metaclass=Singleton):
@ -121,3 +200,7 @@ class FileUtils:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n")
def to_bool(string):
return bool(strtobool(str(string)))

View File

@ -3,8 +3,10 @@
from ansibledoctor.Cli import AnsibleDoctor
def main():
doc = AnsibleDoctor()
AnsibleDoctor()
if __name__ == "__main__":
main()

View File

@ -1,15 +0,0 @@
# ============================================================
# Actions (variables: action)
# ============================================================
{% for role in r.get_roles(False) %}
{% if r.get_type("action",role) %}
{{ r.capitalize(r.fprn(role)) }}:
{% endif %}
{##}
{% for key , values in r.get_multi_type("action",role) %}
{{ r.capitalize(key) }}:
{% for item in values %}
* {{ item.desc }}
{% endfor %}
{% endfor %}
{% endfor %}

View File

@ -1,10 +0,0 @@
# ============================================================
# Project Description
# ============================================================
{% for role in r.get_roles(False) %}
{{ r.capitalize(r.fprn(role)) }}:
{% for item in r.get_type("meta",role) %}
{{ r.cli_left_space(r.capitalize(item.key),25) }} {{ item.value }}
{% endfor %}
{% endfor %}

View File

@ -1,19 +0,0 @@
# ============================================================
# Tags (variable: tag)
# ============================================================
{% for role in r.get_roles(False) %}
{{ r.capitalize(r.fprn(role)) }}:
{% for item in r.get_type("tag",role) %}
{{ r.cli_left_space(" * "+item.key,25) }} {{ r.capitalize(item.desc) }}
{% endfor %}
{% endfor %}
{#{{ tag | pprint }}#}
Duplicate Tags:
{% for k,v in r.get_duplicates("tag") %}
{{ " * "+k }} in files:
{% for item in v %}
{{ item.file }} {% if item.line != "" %}(line: {{ item.line }}) {% endif %}
{% endfor %}
{% endfor %}

View File

@ -1,26 +0,0 @@
# ============================================================
# Todo (variables: todo)
# ============================================================
{% for role in r.get_roles(False) %}
{% if r.get_type("todo",role) %}
{{ r.capitalize(r.fprn(role)) }}:
{% endif %}
{##}
{% for key , values in r.get_multi_type("todo",role) %}
{% if key == "_unset_" %}
Todos without section:
{% for item in values %}
* {{ item.desc }}
{% endfor %}
{% endif %}
{% endfor %}
{% for key , values in r.get_multi_type("todo",role) %}
{% if key != "_unset_" %}
{{ r.capitalize(key) }}:
{% for item in values %}
* {{ item.desc }}
{% endfor %}
{% endif %}
{% endfor %}
{% endfor %}

View File

@ -1,22 +0,0 @@
# ============================================================
# Variables (variable: var)
# ============================================================
{% for role in r.get_roles(False) %}
{% if r.get_type("var",role) %}
{{ r.capitalize(r.fprn(role)) }}:
{% endif %}
{% for item in r.get_type("var",role) %}
{{ r.cli_left_space(" * "+ item.key+": "+item.value,35) }} {{ item.desc }}
{% endfor %}
{% endfor %}
Duplicate Vars:
{% for k,v in r.get_duplicates("var") %}
{{ " * "+k }} in files:
{% for item in v %}
{{ item.file }} {% if item.line != "" %}(line: {{ item.line }}) {% endif %}
{% endfor %}
{% endfor %}
{#{{ var | pprint }}#}

View File

@ -1,21 +0,0 @@
### CLI tempate ###
{% if r.cli_print_section() == "all" or r.cli_print_section() == "info" %}
{% include '_description.j2' %}
{% endif %}
{% if r.cli_print_section() == "all" or r.cli_print_section() == "action" %}
{% include '_action.j2' %}
{% endif %}
{% if r.cli_print_section() == "all" or r.cli_print_section() == "tag" %}
{% include '_tags.j2' %}
{% endif %}
{% if r.cli_print_section() == "all" or r.cli_print_section() == "todo" %}
{% include '_todo.j2' %}
{% endif %}
{% if r.cli_print_section() == "all" or r.cli_print_section() == "var" %}
{% include '_var.j2' %}
{% endif %}

View File

@ -5,7 +5,8 @@ import io
import os
import re
from setuptools import find_packages, setup
from setuptools import find_packages
from setuptools import setup
PACKAGE_NAME = "ansibledoctor"