refactor: replace logger by structlog (#718)

BREAKING CHANGE: Replace the custom logger and `python-json-logger` with
`structlog`. This will also change the layout and general structure of
the log messages.

The original `python-json-logger` package is unmaintained and has caused
some issues. Using https://github.com/nhairs/python-json-logger.git
instead has fixed the logging issues but prevents PyPI package
uploads...

```
HTTP Error 400: Can't have direct dependency: python-json-logger@ git+https://github.com/nhairs/python-json-logger.git@v3.1.0. See https://packaging.python.org/specifications/core-metadata for more information.
```
This commit is contained in:
Robert Kaussow 2024-06-17 13:51:03 +02:00 committed by GitHub
parent ed113e37ea
commit 8e22e87a31
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 172 additions and 574 deletions

View File

@ -6,9 +6,10 @@ import re
from collections import defaultdict from collections import defaultdict
import anyconfig import anyconfig
import structlog
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.utils import SingleLog, _split_string from ansibledoctor.utils import _split_string, sysexit_with_message
class AnnotationItem: class AnnotationItem:
@ -37,8 +38,7 @@ class Annotation:
self._all_items = defaultdict(dict) self._all_items = defaultdict(dict)
self._file_handler = None self._file_handler = None
self.config = SingleConfig() self.config = SingleConfig()
self.log = SingleLog() self.log = structlog.get_logger()
self.logger = self.log.logger
self._files_registry = files_registry self._files_registry = files_registry
self._all_annotations = self.config.get_annotations_definition() self._all_annotations = self.config.get_annotations_definition()
@ -67,7 +67,7 @@ class Annotation:
num, line, self._annotation_definition["name"], rfile num, line, self._annotation_definition["name"], rfile
) )
if item: if item:
self.logger.info(str(item)) self.log.info(f"Found {item!s}")
self._populate_item( self._populate_item(
item.get_obj().items(), self._annotation_definition["name"] item.get_obj().items(), self._annotation_definition["name"]
) )
@ -85,7 +85,7 @@ class Annotation:
try: try:
anyconfig.merge(self._all_items[key], value, ac_merge=anyconfig.MS_DICTS) anyconfig.merge(self._all_items[key], value, ac_merge=anyconfig.MS_DICTS)
except ValueError as e: except ValueError as e:
self.log.sysexit_with_message(f"Unable to merge annotation values:\n{e}") sysexit_with_message("Failed to merge annotation values", error=e)
def _get_annotation_data(self, num, line, name, rfile): def _get_annotation_data(self, num, line, name, rfile):
""" """
@ -171,15 +171,15 @@ class Annotation:
if parts[2].startswith("$"): if parts[2].startswith("$"):
source = "".join([x.strip() for x in multiline]) source = "".join([x.strip() for x in multiline])
multiline = self._str_to_json(key, source, rfile, num, line) multiline = self._str_to_json(key, source, rfile, num)
item.data[key][parts[1]] = multiline item.data[key][parts[1]] = multiline
return item return item
def _str_to_json(self, key, string, rfile, num, line): def _str_to_json(self, key, string, rfile, num):
try: try:
return {key: json.loads(string)} return {key: json.loads(string)}
except ValueError: except ValueError:
self.log.sysexit_with_message( sysexit_with_message(
f"Json value error: Can't parse json in {rfile}:{num!s}:\n{line.strip()}" f"ValueError: Failed to parse json in {rfile}:{num!s}", file=rfile
) )

View File

@ -4,30 +4,30 @@
import argparse import argparse
import os import os
import structlog
import ansibledoctor.exception import ansibledoctor.exception
from ansibledoctor import __version__ from ansibledoctor import __version__
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.doc_generator import Generator from ansibledoctor.doc_generator import Generator
from ansibledoctor.doc_parser import Parser from ansibledoctor.doc_parser import Parser
from ansibledoctor.utils import SingleLog from ansibledoctor.utils import sysexit_with_message
class AnsibleDoctor: class AnsibleDoctor:
"""Create main object.""" """Create main object."""
def __init__(self): log = structlog.get_logger()
self.log = SingleLog()
self.logger = self.log.logger
def __init__(self):
try: try:
self.config = SingleConfig() self.config = SingleConfig()
self.config.load(args=self._parse_args()) self.config.load(args=self._parse_args())
self.log.register_hanlers(json=self.config.config.logging.json)
self._execute() self._execute()
except ansibledoctor.exception.DoctorError as e: except ansibledoctor.exception.DoctorError as e:
self.log.sysexit_with_message(e) sysexit_with_message(e)
except KeyboardInterrupt: except KeyboardInterrupt:
self.log.sysexit_with_message("Aborted...") sysexit_with_message("Aborted...")
def _parse_args(self): def _parse_args(self):
""" """
@ -123,25 +123,19 @@ class AnsibleDoctor:
for item in walkdirs: for item in walkdirs:
os.chdir(item) os.chdir(item)
self.config.load(root_path=os.getcwd()) self.config.load(root_path=os.getcwd())
self.log.register_hanlers(json=self.config.config.logging.json)
try: self.log.debug("Switch working directory", path=item)
self.log.set_level(self.config.config.logging.level) self.log.info("Lookup config file", path=self.config.config_files)
except ValueError as e:
self.log.sysexit_with_message(f"Can not set log level.\n{e!s}")
self.logger.info(f"Using config file: {self.config.config_files}")
self.logger.debug(f"Using working directory: {os.path.relpath(item, self.log.ctx)}")
if self.config.config.role.autodetect: if self.config.config.role.autodetect:
if self.config.is_role(): if self.config.is_role():
self.logger.info(f"Ansible role detected: {self.config.config.role_name}") structlog.contextvars.bind_contextvars(role=self.config.config.role_name)
self.log.info("Ansible role detected")
else: else:
self.log.sysexit_with_message("No Ansible role detected") sysexit_with_message("No Ansible role detected")
else: else:
self.logger.info("Ansible role detection disabled") self.log.info("Ansible role detection disabled")
doc_parser = Parser() doc_parser = Parser()
doc_generator = Generator(doc_parser) doc_generator = Generator(doc_parser)

View File

@ -1,9 +1,13 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""Global settings definition.""" """Global settings definition."""
import logging
import os import os
import re import re
from io import StringIO
import colorama
import structlog
from appdirs import AppDirs from appdirs import AppDirs
from dynaconf import Dynaconf, ValidationError, Validator from dynaconf import Dynaconf, ValidationError, Validator
@ -198,6 +202,8 @@ class Config:
self.config.update(self.args) self.config.update(self.args)
self.validate() self.validate()
self._init_logger()
def validate(self): def validate(self):
try: try:
self.config.validators.validate_all() self.config.validators.validate_all()
@ -226,6 +232,81 @@ class Config:
annotations.append(k) annotations.append(k)
return annotations return annotations
def _init_logger(self):
styles = structlog.dev.ConsoleRenderer.get_default_level_styles()
styles["debug"] = colorama.Fore.BLUE
processors = [
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.processors.StackInfoRenderer(),
structlog.dev.set_exc_info,
structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S", utc=False),
]
if self.config.logging.json:
processors.append(ErrorStringifier())
processors.append(structlog.processors.JSONRenderer())
else:
processors.append(MultilineConsoleRenderer(level_styles=styles))
try:
structlog.configure(
processors=processors,
wrapper_class=structlog.make_filtering_bound_logger(
logging.getLevelName(self.config.get("logging.level")),
),
)
structlog.contextvars.unbind_contextvars()
except KeyError as e:
raise ansibledoctor.exception.ConfigError(f"Can not set log level: {e!s}") from e
class ErrorStringifier:
"""A processor that converts exceptions to a string representation."""
def __call__(self, _, __, event_dict):
if "error" not in event_dict:
return event_dict
err = event_dict.get("error")
if isinstance(err, Exception):
event_dict["error"] = f"{err.__class__.__name__}: {err}"
return event_dict
class MultilineConsoleRenderer(structlog.dev.ConsoleRenderer):
"""A processor for printing multiline strings."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __call__(self, _, __, event_dict):
err = None
if "error" in event_dict:
err = event_dict.pop("error")
event_dict = super().__call__(_, __, event_dict)
if not err:
return event_dict
sio = StringIO()
sio.write(event_dict)
if isinstance(err, Exception):
sio.write(
f"\n{colorama.Fore.RED}{err.__class__.__name__}:"
f"{colorama.Style.RESET_ALL} {str(err).strip()}"
)
else:
sio.write(f"\n{err.strip()}")
return sio.getvalue()
class SingleConfig(Config, metaclass=Singleton): class SingleConfig(Config, metaclass=Singleton):
"""Singleton config class.""" """Singleton config class."""

View File

@ -7,20 +7,20 @@ from functools import reduce
import jinja2.exceptions import jinja2.exceptions
import ruamel.yaml import ruamel.yaml
import structlog
from jinja2 import Environment, FileSystemLoader from jinja2 import Environment, FileSystemLoader
from jinja2.filters import pass_eval_context from jinja2.filters import pass_eval_context
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.template import Template from ansibledoctor.template import Template
from ansibledoctor.utils import FileUtils, SingleLog from ansibledoctor.utils import FileUtils, sysexit_with_message
class Generator: class Generator:
"""Generate documentation from jinja2 templates.""" """Generate documentation from jinja2 templates."""
def __init__(self, doc_parser): def __init__(self, doc_parser):
self.log = SingleLog() self.log = structlog.get_logger()
self.logger = self.log.logger
self.config = SingleConfig() self.config = SingleConfig()
self.template = Template( self.template = Template(
self.config.config.get("template.name"), self.config.config.get("template.name"),
@ -32,9 +32,9 @@ class Generator:
if not self.config.config["dry_run"] and not os.path.isdir(directory): if not self.config.config["dry_run"] and not os.path.isdir(directory):
try: try:
os.makedirs(directory, exist_ok=True) os.makedirs(directory, exist_ok=True)
self.logger.info(f"Creating dir: {directory}") self.log.info(f"Creating dir: {directory}")
except FileExistsError as e: except FileExistsError as e:
self.log.sysexit_with_message(e) sysexit_with_message(e)
def _write_doc(self): def _write_doc(self):
files_to_overwite = [] files_to_overwite = []
@ -55,7 +55,7 @@ class Generator:
with open(header_file) as a: with open(header_file) as a:
header_content = a.read() header_content = a.read()
except FileNotFoundError as e: except FileNotFoundError as e:
self.log.sysexit_with_message(f"Can not open custom header file\n{e!s}") sysexit_with_message("Can not open custom header file", path=header_file, error=e)
if ( if (
len(files_to_overwite) > 0 len(files_to_overwite) > 0
@ -69,9 +69,9 @@ class Generator:
try: try:
if not FileUtils.query_yes_no(f"{prompt}\nDo you want to continue?"): if not FileUtils.query_yes_no(f"{prompt}\nDo you want to continue?"):
self.log.sysexit_with_message("Aborted...") sysexit_with_message("Aborted...")
except KeyboardInterrupt: except KeyboardInterrupt:
self.log.sysexit_with_message("Aborted...") sysexit_with_message("Aborted...")
for tf in self.template.files: for tf in self.template.files:
doc_file = os.path.join( doc_file = os.path.join(
@ -79,10 +79,7 @@ class Generator:
) )
template = os.path.join(self.template.path, tf) template = os.path.join(self.template.path, tf)
self.logger.debug( self.log.debug("Writing renderer output", path=doc_file, src=os.path.dirname(template))
f"Writing renderer output to: {os.path.relpath(doc_file, self.log.ctx)} "
f"from: {os.path.dirname(template)}"
)
# make sure the directory exists # make sure the directory exists
self._create_dir(os.path.dirname(doc_file)) self._create_dir(os.path.dirname(doc_file))
@ -111,21 +108,16 @@ class Generator:
with open(doc_file, "wb") as outfile: with open(doc_file, "wb") as outfile:
outfile.write(header_content.encode("utf-8")) outfile.write(header_content.encode("utf-8"))
outfile.write(data.encode("utf-8")) outfile.write(data.encode("utf-8"))
self.logger.info(f"Writing to: {doc_file}")
else:
self.logger.info(f"Writing to: {doc_file}")
except ( except (
jinja2.exceptions.UndefinedError, jinja2.exceptions.UndefinedError,
jinja2.exceptions.TemplateSyntaxError, jinja2.exceptions.TemplateSyntaxError,
jinja2.exceptions.TemplateRuntimeError, jinja2.exceptions.TemplateRuntimeError,
) as e: ) as e:
self.log.sysexit_with_message( sysexit_with_message(
f"Jinja2 templating error while loading file: {tf}\n{e!s}" "Jinja2 template error while loading file", path=tf, error=e
) )
except UnicodeEncodeError as e: except UnicodeEncodeError as e:
self.log.sysexit_with_message( sysexit_with_message("Failed to print special characters", error=e)
f"Unable to print special characters\n{e!s}"
)
def _to_nice_yaml(self, a, indent=4, **kw): def _to_nice_yaml(self, a, indent=4, **kw):
"""Make verbose, human readable yaml.""" """Make verbose, human readable yaml."""
@ -157,5 +149,4 @@ class Generator:
return jinja2.filters.do_mark_safe(normalized) return jinja2.filters.do_mark_safe(normalized)
def render(self): def render(self):
self.logger.info(f"Using renderer destination: {self.config.config.get('renderer.dest')}")
self._write_doc() self._write_doc()

View File

@ -5,13 +5,14 @@ import fnmatch
from collections import defaultdict from collections import defaultdict
import anyconfig import anyconfig
import structlog
from ansibledoctor.annotation import Annotation from ansibledoctor.annotation import Annotation
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.contstants import YAML_EXTENSIONS from ansibledoctor.contstants import YAML_EXTENSIONS
from ansibledoctor.exception import YAMLError from ansibledoctor.exception import YAMLError
from ansibledoctor.file_registry import Registry from ansibledoctor.file_registry import Registry
from ansibledoctor.utils import SingleLog, flatten from ansibledoctor.utils import flatten, sysexit_with_message
from ansibledoctor.utils.yamlhelper import parse_yaml, parse_yaml_ansible from ansibledoctor.utils.yamlhelper import parse_yaml, parse_yaml_ansible
@ -22,8 +23,7 @@ class Parser:
self._annotation_objs = {} self._annotation_objs = {}
self._data = defaultdict(dict) self._data = defaultdict(dict)
self.config = SingleConfig() self.config = SingleConfig()
self.log = SingleLog() self.log = structlog.get_logger()
self.logger = SingleLog().logger
self._files_registry = Registry() self._files_registry = Registry()
self._parse_meta_file() self._parse_meta_file()
self._parse_var_files() self._parse_var_files()
@ -37,7 +37,7 @@ class Parser:
try: try:
raw = parse_yaml(yamlfile) raw = parse_yaml(yamlfile)
except YAMLError as e: except YAMLError as e:
self.log.sysexit_with_message(f"Unable to read yaml file {rfile}\n{e}") sysexit_with_message("Failed to read yaml file", path=rfile, error=e)
data = defaultdict(dict, raw or {}) data = defaultdict(dict, raw or {})
@ -53,7 +53,7 @@ class Parser:
try: try:
raw = parse_yaml(yamlfile) raw = parse_yaml(yamlfile)
except YAMLError as e: except YAMLError as e:
self.log.sysexit_with_message(f"Unable to read yaml file {rfile}\n{e}") sysexit_with_message("Failed to read yaml file", path=rfile, error=e)
data = defaultdict(dict, raw) data = defaultdict(dict, raw)
if data.get("galaxy_info"): if data.get("galaxy_info"):
@ -70,7 +70,7 @@ class Parser:
try: try:
raw = parse_yaml_ansible(yamlfile) raw = parse_yaml_ansible(yamlfile)
except YAMLError as e: except YAMLError as e:
self.log.sysexit_with_message(f"Unable to read yaml file {rfile}\n{e}") sysexit_with_message("Failed to read yaml file", path=rfile, error=e)
tags = [] tags = []
for task in raw: for task in raw:
@ -89,7 +89,7 @@ class Parser:
"""Generate the documentation data object.""" """Generate the documentation data object."""
tags = defaultdict(dict) tags = defaultdict(dict)
for annotation in self.config.get_annotations_names(automatic=True): for annotation in self.config.get_annotations_names(automatic=True):
self.logger.info(f"Finding annotations for: @{annotation}") self.log.info(f"Lookup annotation @{annotation}")
self._annotation_objs[annotation] = Annotation( self._annotation_objs[annotation] = Annotation(
name=annotation, files_registry=self._files_registry name=annotation, files_registry=self._files_registry
) )
@ -98,7 +98,7 @@ class Parser:
try: try:
anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS) anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS)
except ValueError as e: except ValueError as e:
self.log.sysexit_with_message(f"Unable to merge annotation values:\n{e}") sysexit_with_message("Failed to merge annotation values", error=e)
def get_data(self): def get_data(self):
return self._data return self._data

View File

@ -5,10 +5,10 @@ import glob
import os import os
import pathspec import pathspec
import structlog
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.contstants import YAML_EXTENSIONS from ansibledoctor.contstants import YAML_EXTENSIONS
from ansibledoctor.utils import SingleLog
class Registry: class Registry:
@ -21,8 +21,7 @@ class Registry:
def __init__(self): def __init__(self):
self._doc = [] self._doc = []
self.config = SingleConfig() self.config = SingleConfig()
self.log = SingleLog() self.log = structlog.get_logger()
self.logger = self.log.logger
self._scan_for_yamls() self._scan_for_yamls()
def get_files(self): def get_files(self):
@ -37,19 +36,16 @@ class Registry:
""" """
extensions = YAML_EXTENSIONS extensions = YAML_EXTENSIONS
base_dir = self.config.config.base_dir base_dir = self.config.config.base_dir
role_name = self.config.config.role_name
excludes = self.config.config.get("exclude_files") excludes = self.config.config.get("exclude_files")
excludespec = pathspec.PathSpec.from_lines("gitwildmatch", excludes) excludespec = pathspec.PathSpec.from_lines("gitwildmatch", excludes)
self.logger.debug(f"Scan for files: {os.path.relpath(base_dir,self.log.ctx)}") self.log.debug("Lookup role files", path=base_dir)
for extension in extensions: for extension in extensions:
pattern = os.path.join(base_dir, "**/*." + extension) pattern = os.path.join(base_dir, "**/*." + extension)
for filename in glob.iglob(pattern, recursive=True): for filename in glob.iglob(pattern, recursive=True):
if not excludespec.match_file(filename): if not excludespec.match_file(filename):
self.log.debug( self.log.debug("Found role file", path=os.path.relpath(filename, base_dir))
f"Adding file to role: {role_name}: {os.path.relpath(filename, base_dir)}"
)
self._doc.append(filename) self._doc.append(filename)
else: else:
self.log.debug(f"Excluding file: {os.path.relpath(filename, base_dir)}") self.log.debug("Skippped role file", path=os.path.relpath(filename, base_dir))

View File

@ -7,17 +7,18 @@ import os
import shutil import shutil
import tempfile import tempfile
import structlog
from git import GitCommandError, Repo from git import GitCommandError, Repo
import ansibledoctor.exception import ansibledoctor.exception
from ansibledoctor.utils import SingleLog from ansibledoctor.utils import sysexit_with_message
class Template: class Template:
""" """
Represents a template that can be used to generate content. Represents a template that can be used to generate content.
Templates can besourced from a local file or a Git repository. The `Template` class handles Templates can be sourced from a local file or a Git repository. The `Template` class handles
the initialization and setup of a template, including cloning a Git repository if necessary. the initialization and setup of a template, including cloning a Git repository if necessary.
Args: Args:
@ -33,8 +34,7 @@ class Template:
""" """
def __init__(self, name, src): def __init__(self, name, src):
self.log = SingleLog() self.log = structlog.get_logger()
self.logger = self.log.logger
self.name = name self.name = name
self.src = src self.src = src
@ -68,10 +68,10 @@ class Template:
atexit.register(self._cleanup_temp_dir, temp_dir) atexit.register(self._cleanup_temp_dir, temp_dir)
try: try:
self.logger.debug(f"Cloning template repo: {repo_url}") self.log.debug("Cloning template repo", src=repo_url)
repo = Repo.clone_from(repo_url, temp_dir) repo = Repo.clone_from(repo_url, temp_dir)
if branch_or_tag: if branch_or_tag:
self.logger.debug(f"Checking out branch or tag: {branch_or_tag}") self.log.debug(f"Checking out branch or tag: {branch_or_tag}")
try: try:
repo.git.checkout(branch_or_tag) repo.git.checkout(branch_or_tag)
except GitCommandError as e: except GitCommandError as e:
@ -93,17 +93,17 @@ class Template:
template_files = [] template_files = []
if os.path.isdir(self.path): if os.path.isdir(self.path):
self.logger.info(f"Using template src: {self.src} name: {self.name}") self.log.info("Lookup template files", src=self.src)
else: else:
self.log.sysexit_with_message(f"Can not open template directory {self.path}") sysexit_with_message("Can not open template directory", path=self.path)
for file in glob.iglob(self.path + "/**/*.j2", recursive=True): for file in glob.iglob(self.path + "/**/*.j2", recursive=True):
relative_file = file[len(self.path) + 1 :] relative_file = file[len(self.path) + 1 :]
if ntpath.basename(file)[:1] != "_": if ntpath.basename(file)[:1] != "_":
self.logger.debug(f"Found template file: {relative_file}") self.log.debug("Found template file", path=relative_file)
template_files.append(relative_file) template_files.append(relative_file)
else: else:
self.logger.debug(f"Ignoring template file: {relative_file}") self.log.debug("Skipped template file", path=relative_file)
return template_files return template_files

View File

@ -1,16 +1,11 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""Global utility methods and classes.""" """Global utility methods and classes."""
import logging
import os import os
import sys import sys
from collections.abc import Iterable from collections.abc import Iterable
import colorama import structlog
from pythonjsonlogger import jsonlogger
CONSOLE_FORMAT = "{}{}[%(levelname)s]{} %(message)s"
JSON_FORMAT = "%(asctime)s %(levelname)s %(message)s"
def strtobool(value): def strtobool(value):
@ -49,14 +44,6 @@ def flatten(items):
yield x yield x
def _should_do_markup():
py_colors = os.environ.get("PY_COLORS", None)
if py_colors is not None:
return to_bool(py_colors)
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
def _split_string(string, delimiter, escape, maxsplit=None): def _split_string(string, delimiter, escape, maxsplit=None):
result = [] result = []
current_element = [] current_element = []
@ -91,7 +78,13 @@ def _split_string(string, delimiter, escape, maxsplit=None):
return result return result
colorama.init(autoreset=True, strip=not _should_do_markup()) def sysexit(code=1):
sys.exit(code)
def sysexit_with_message(msg, code=1, **kwargs):
structlog.get_logger().critical(str(msg).strip(), **kwargs)
sysexit(code)
class Singleton(type): class Singleton(type):
@ -105,209 +98,6 @@ class Singleton(type):
return cls._instances[cls] return cls._instances[cls]
class LogFilter:
"""Exclude log messages above the logged level."""
def __init__(self, level):
"""
Initialize a new custom log filter.
:param level: Log level limit
:returns: None
"""
self.__level = level
def filter(self, logRecord): # noqa
# https://docs.python.org/3/library/logging.html#logrecord-attributes
return logRecord.levelno <= self.__level
class MultilineFormatter(logging.Formatter):
"""Reset color after newline characters."""
def format(self, record):
record.msg = record.msg.strip().replace("\n", f"\n{colorama.Style.RESET_ALL}... ")
return logging.Formatter.format(self, record)
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
"""Remove newline characters."""
def format(self, record):
record.msg = record.msg.replace("\n", " ")
return jsonlogger.JsonFormatter.format(self, record)
class Log:
"""Handle logging."""
def __init__(self, level=logging.WARNING, name="ansibledoctor", json=False):
self.ctx = os.getcwd()
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.register_hanlers(json=json)
self.logger.propagate = False
def _get_error_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.ERROR)
handler.addFilter(LogFilter(logging.ERROR))
handler.setFormatter(
MultilineFormatter(
self.error(
CONSOLE_FORMAT.format(
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_warning_handler(self, json=False):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.WARNING)
handler.addFilter(LogFilter(logging.WARNING))
handler.setFormatter(
MultilineFormatter(
self.warning(
CONSOLE_FORMAT.format(
colorama.Fore.YELLOW, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_info_handler(self, json=False):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
handler.addFilter(LogFilter(logging.INFO))
handler.setFormatter(
MultilineFormatter(
self.info(
CONSOLE_FORMAT.format(
colorama.Fore.CYAN, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_critical_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.CRITICAL)
handler.addFilter(LogFilter(logging.CRITICAL))
handler.setFormatter(
MultilineFormatter(
self.critical(
CONSOLE_FORMAT.format(
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_debug_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.DEBUG)
handler.addFilter(LogFilter(logging.DEBUG))
handler.setFormatter(
MultilineFormatter(
self.debug(
CONSOLE_FORMAT.format(
colorama.Fore.BLUE, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def register_hanlers(self, json=False):
"""
Enable or disable JSON logging.
:param enable: True to enable JSON logging, False to disable
"""
# Remove all existing handlers
for handler in self.logger.handlers[:]:
self.logger.removeHandler(handler)
self.logger.addHandler(self._get_error_handler(json=json))
self.logger.addHandler(self._get_warning_handler(json=json))
self.logger.addHandler(self._get_info_handler(json=json))
self.logger.addHandler(self._get_critical_handler(json=json))
self.logger.addHandler(self._get_debug_handler(json=json))
def set_level(self, s):
self.logger.setLevel(s.upper())
def debug(self, msg):
"""Format info messages and return string."""
return msg
def critical(self, msg):
"""Format critical messages and return string."""
return msg
def error(self, msg):
"""Format error messages and return string."""
return msg
def warning(self, msg):
"""Format warning messages and return string."""
return msg
def info(self, msg):
"""Format info messages and return string."""
return msg
def _color_text(self, color, msg):
"""
Colorize strings.
:param color: colorama color settings
:param msg: string to colorize
:returns: string
"""
return f"{color}{msg}{colorama.Style.RESET_ALL}"
def sysexit(self, code=1):
sys.exit(code)
def sysexit_with_message(self, msg, code=1):
self.logger.critical(str(msg).strip())
self.sysexit(code)
class SingleLog(Log, metaclass=Singleton):
"""Singleton logging class."""
pass
class FileUtils: class FileUtils:
"""Mics static methods for file handling.""" """Mics static methods for file handling."""

View File

@ -4,6 +4,7 @@ from collections import defaultdict
from contextlib import suppress from contextlib import suppress
import ruamel.yaml import ruamel.yaml
import yaml
from ansible.parsing.yaml.loader import AnsibleLoader from ansible.parsing.yaml.loader import AnsibleLoader
import ansibledoctor.exception import ansibledoctor.exception
@ -27,16 +28,12 @@ def parse_yaml_ansible(yamlfile):
loader = AnsibleLoader(yamlfile) loader = AnsibleLoader(yamlfile)
data = loader.get_single_data() or [] data = loader.get_single_data() or []
except ( except (
ruamel.yaml.parser.ParserError, yaml.parser.ParserError,
ruamel.yaml.scanner.ScannerError, yaml.scanner.ScannerError,
ruamel.yaml.constructor.ConstructorError, yaml.constructor.ConstructorError,
ruamel.yaml.composer.ComposerError, yaml.composer.ComposerError,
) as e: ) as e:
message = ( raise ansibledoctor.exception.YAMLError(e) from e
f"{e.context} in line {e.context_mark.line}, column {e.context_mark.line}\n"
f"{e.problem} in line {e.problem_mark.line}, column {e.problem_mark.column}"
)
raise ansibledoctor.exception.YAMLError(message) from e
return data return data
@ -58,11 +55,7 @@ def parse_yaml(yamlfile):
ruamel.yaml.constructor.ConstructorError, ruamel.yaml.constructor.ConstructorError,
ruamel.yaml.composer.ComposerError, ruamel.yaml.composer.ComposerError,
) as e: ) as e:
message = ( raise ansibledoctor.exception.YAMLError(e) from e
f"{e.context} in line {e.context_mark.line}, column {e.context_mark.line}\n"
f"{e.problem} in line {e.problem_mark.line}, column {e.problem_mark.column}"
)
raise ansibledoctor.exception.YAMLError(message) from e
return data return data

281
poetry.lock generated
View File

@ -46,25 +46,6 @@ files = [
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
] ]
[[package]]
name = "attrs"
version = "23.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
files = [
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
]
[package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
dev = ["attrs[tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
[[package]] [[package]]
name = "cffi" name = "cffi"
version = "1.16.0" version = "1.16.0"
@ -282,26 +263,6 @@ toml = ["toml"]
vault = ["hvac"] vault = ["hvac"]
yaml = ["ruamel.yaml"] yaml = ["ruamel.yaml"]
[[package]]
name = "environs"
version = "11.0.0"
description = "simplified environment variable parsing"
optional = false
python-versions = ">=3.8"
files = [
{file = "environs-11.0.0-py3-none-any.whl", hash = "sha256:e0bcfd41c718c07a7db422f9109e490746450da38793fe4ee197f397b9343435"},
{file = "environs-11.0.0.tar.gz", hash = "sha256:069727a8f73d8ba8d033d3cd95c0da231d44f38f1da773bf076cef168d312ee8"},
]
[package.dependencies]
marshmallow = ">=3.13.0"
python-dotenv = "*"
[package.extras]
dev = ["environs[tests]", "pre-commit (>=3.5,<4.0)", "tox"]
django = ["dj-database-url", "dj-email-url", "django-cache-url"]
tests = ["environs[django]", "pytest"]
[[package]] [[package]]
name = "exceptiongroup" name = "exceptiongroup"
version = "1.2.1" version = "1.2.1"
@ -397,41 +358,6 @@ MarkupSafe = ">=2.0"
[package.extras] [package.extras]
i18n = ["Babel (>=2.7)"] i18n = ["Babel (>=2.7)"]
[[package]]
name = "jsonschema"
version = "4.22.0"
description = "An implementation of JSON Schema validation for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"},
{file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"},
]
[package.dependencies]
attrs = ">=22.2.0"
jsonschema-specifications = ">=2023.03.6"
referencing = ">=0.28.4"
rpds-py = ">=0.7.1"
[package.extras]
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
[[package]]
name = "jsonschema-specifications"
version = "2023.12.1"
description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
optional = false
python-versions = ">=3.8"
files = [
{file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"},
{file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"},
]
[package.dependencies]
referencing = ">=0.31.0"
[[package]] [[package]]
name = "markdown-it-py" name = "markdown-it-py"
version = "3.0.0" version = "3.0.0"
@ -525,25 +451,6 @@ files = [
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
] ]
[[package]]
name = "marshmallow"
version = "3.21.3"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
python-versions = ">=3.8"
files = [
{file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"},
{file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"},
]
[package.dependencies]
packaging = ">=17.0"
[package.extras]
dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"]
docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"]
tests = ["pytest", "pytz", "simplejson"]
[[package]] [[package]]
name = "mdurl" name = "mdurl"
version = "0.1.2" version = "0.1.2"
@ -674,41 +581,6 @@ pytest = ">=6.2.5"
[package.extras] [package.extras]
dev = ["pre-commit", "pytest-asyncio", "tox"] dev = ["pre-commit", "pytest-asyncio", "tox"]
[[package]]
name = "python-dotenv"
version = "1.0.1"
description = "Read key-value pairs from a .env file and set them as environment variables"
optional = false
python-versions = ">=3.8"
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
]
[package.extras]
cli = ["click (>=5.0)"]
[[package]]
name = "python-json-logger"
version = "3.1.0"
description = "JSON Log Formatter for the Python Logging Package"
optional = false
python-versions = ">=3.8"
files = []
develop = false
[package.dependencies]
typing_extensions = "*"
[package.extras]
dev = ["backports.zoneinfo", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec", "mypy", "orjson", "pylint", "pytest", "tzdata", "validate-pyproject[all]"]
[package.source]
type = "git"
url = "https://github.com/nhairs/python-json-logger.git"
reference = "v3.1.0"
resolved_reference = "e2b40e420f4181fd921b01a2fd0b23ca82b5fc9b"
[[package]] [[package]]
name = "pyyaml" name = "pyyaml"
version = "6.0.1" version = "6.0.1"
@ -769,21 +641,6 @@ files = [
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
] ]
[[package]]
name = "referencing"
version = "0.35.1"
description = "JSON Referencing + Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"},
{file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"},
]
[package.dependencies]
attrs = ">=22.2.0"
rpds-py = ">=0.7.0"
[[package]] [[package]]
name = "resolvelib" name = "resolvelib"
version = "0.8.1" version = "0.8.1"
@ -819,114 +676,6 @@ pygments = ">=2.13.0,<3.0.0"
[package.extras] [package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"] jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "rpds-py"
version = "0.18.1"
description = "Python bindings to Rust's persistent data structures (rpds)"
optional = false
python-versions = ">=3.8"
files = [
{file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"},
{file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"},
{file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"},
{file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"},
{file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"},
{file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"},
{file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"},
{file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"},
{file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"},
{file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"},
{file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"},
{file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"},
{file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"},
{file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"},
{file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"},
{file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"},
{file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"},
{file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"},
{file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"},
{file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"},
{file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"},
{file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"},
{file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"},
{file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"},
{file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"},
{file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"},
{file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"},
{file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"},
{file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"},
{file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"},
{file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"},
{file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"},
{file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"},
{file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"},
{file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"},
{file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"},
]
[[package]] [[package]]
name = "ruamel-yaml" name = "ruamel-yaml"
version = "0.18.6" version = "0.18.6"
@ -1041,6 +790,23 @@ files = [
{file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"},
] ]
[[package]]
name = "structlog"
version = "24.2.0"
description = "Structured Logging for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "structlog-24.2.0-py3-none-any.whl", hash = "sha256:983bd49f70725c5e1e3867096c0c09665918936b3db27341b41d294283d7a48a"},
{file = "structlog-24.2.0.tar.gz", hash = "sha256:0e3fe74924a6d8857d3f612739efb94c72a7417d7c7c008d12276bca3b5bf13b"},
]
[package.extras]
dev = ["freezegun (>=0.2.8)", "mypy (>=1.4)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "rich", "simplejson", "twisted"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"]
tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"]
typing = ["mypy (>=1.4)", "rich", "twisted"]
[[package]] [[package]]
name = "toml" name = "toml"
version = "0.10.2" version = "0.10.2"
@ -1063,21 +829,10 @@ files = [
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
] ]
[[package]]
name = "typing-extensions"
version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[extras] [extras]
ansible-core = ["ansible-core"] ansible-core = ["ansible-core"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.9.0" python-versions = "^3.9.0"
content-hash = "f67e617787566ebf3e3bab85ddf75d97fe65a030d2ecec6d6877332d104d9cd0" content-hash = "bf88dff6070204056a2c1c15172585388329d54aa2eebdc2ff909cd3f8a412ab"

View File

@ -35,15 +35,13 @@ Jinja2 = "3.1.4"
anyconfig = "0.14.0" anyconfig = "0.14.0"
appdirs = "1.4.4" appdirs = "1.4.4"
colorama = "0.4.6" colorama = "0.4.6"
environs = "11.0.0"
jsonschema = "4.22.0"
pathspec = "0.12.1" pathspec = "0.12.1"
python = "^3.9.0" python = "^3.9.0"
python-json-logger = { git = "https://github.com/nhairs/python-json-logger.git", tag = "v3.1.0" }
"ruamel.yaml" = "0.18.6" "ruamel.yaml" = "0.18.6"
dynaconf = "3.2.5" dynaconf = "3.2.5"
gitpython = "3.1.43" gitpython = "3.1.43"
ansible-core = { version = "2.14.17", optional = true } ansible-core = { version = "2.14.17", optional = true }
structlog = "24.2.0"
[tool.poetry.extras] [tool.poetry.extras]
ansible-core = ["ansible-core"] ansible-core = ["ansible-core"]