mirror of
https://github.com/thegeeklab/ansible-doctor.git
synced 2024-11-22 04:40:43 +00:00
add yapf as formatter
This commit is contained in:
parent
ce216b41dc
commit
12aaab20d7
@ -51,7 +51,7 @@ local PipelineTest = {
|
|||||||
PythonVersion(pyversion='3.5'),
|
PythonVersion(pyversion='3.5'),
|
||||||
PythonVersion(pyversion='3.6'),
|
PythonVersion(pyversion='3.6'),
|
||||||
PythonVersion(pyversion='3.7'),
|
PythonVersion(pyversion='3.7'),
|
||||||
PythonVersion(pyversion='3.8-rc'),
|
PythonVersion(pyversion='3.8'),
|
||||||
],
|
],
|
||||||
depends_on: [
|
depends_on: [
|
||||||
'lint',
|
'lint',
|
||||||
|
18
.flake8
18
.flake8
@ -1,8 +1,18 @@
|
|||||||
[flake8]
|
[flake8]
|
||||||
# Temp disable Docstring checks D101, D102, D103, D107
|
ignore = D102, D103, D107, D202, W503
|
||||||
ignore = E501, W503, F401, N813, D101, D102, D103, D107
|
max-line-length = 99
|
||||||
max-line-length = 110
|
|
||||||
inline-quotes = double
|
inline-quotes = double
|
||||||
exclude = .git,.tox,__pycache__,build,dist,tests,*.pyc,*.egg-info,.cache,.eggs,env*
|
exclude =
|
||||||
|
.git
|
||||||
|
.tox
|
||||||
|
__pycache__
|
||||||
|
build
|
||||||
|
dist
|
||||||
|
tests
|
||||||
|
*.pyc
|
||||||
|
*.egg-info
|
||||||
|
.cache
|
||||||
|
.eggs
|
||||||
|
env*
|
||||||
application-import-names = ansiblelater
|
application-import-names = ansiblelater
|
||||||
format = ${cyan}%(path)s:%(row)d:%(col)d${reset}: ${red_bold}%(code)s${reset} %(text)s
|
format = ${cyan}%(path)s:%(row)d:%(col)d${reset}: ${red_bold}%(code)s${reset} %(text)s
|
||||||
|
2
.github/settings.yml
vendored
2
.github/settings.yml
vendored
@ -56,5 +56,3 @@ branches:
|
|||||||
- continuous-integration/drone/pr
|
- continuous-integration/drone/pr
|
||||||
enforce_admins: null
|
enforce_admins: null
|
||||||
restrictions: null
|
restrictions: null
|
||||||
|
|
||||||
...
|
|
||||||
|
@ -2,18 +2,17 @@
|
|||||||
"""Find and parse annotations to AnnotationItem objects."""
|
"""Find and parse annotations to AnnotationItem objects."""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import pprint
|
|
||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import anyconfig
|
import anyconfig
|
||||||
|
|
||||||
from ansibledoctor.Config import SingleConfig
|
from ansibledoctor.Config import SingleConfig
|
||||||
from ansibledoctor.FileRegistry import Registry
|
|
||||||
from ansibledoctor.Utils import SingleLog
|
from ansibledoctor.Utils import SingleLog
|
||||||
|
|
||||||
|
|
||||||
class AnnotationItem:
|
class AnnotationItem:
|
||||||
|
"""Handle annotations."""
|
||||||
|
|
||||||
# next time improve this by looping over public available attributes
|
# next time improve this by looping over public available attributes
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -30,6 +29,8 @@ class AnnotationItem:
|
|||||||
|
|
||||||
|
|
||||||
class Annotation:
|
class Annotation:
|
||||||
|
"""Handle annotations."""
|
||||||
|
|
||||||
def __init__(self, name, files_registry):
|
def __init__(self, name, files_registry):
|
||||||
self._all_items = defaultdict(dict)
|
self._all_items = defaultdict(dict)
|
||||||
self._file_handler = None
|
self._file_handler = None
|
||||||
@ -62,7 +63,8 @@ class Annotation:
|
|||||||
|
|
||||||
if re.match(regex, line.strip()):
|
if re.match(regex, line.strip()):
|
||||||
item = self._get_annotation_data(
|
item = self._get_annotation_data(
|
||||||
num, line, self._annotation_definition["name"], rfile)
|
num, line, self._annotation_definition["name"], rfile
|
||||||
|
)
|
||||||
if item:
|
if item:
|
||||||
self.logger.info(str(item))
|
self.logger.info(str(item))
|
||||||
self._populate_item(item.get_obj().items())
|
self._populate_item(item.get_obj().items())
|
||||||
@ -166,4 +168,7 @@ class Annotation:
|
|||||||
return {key: json.loads(string)}
|
return {key: json.loads(string)}
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.log.sysexit_with_message(
|
self.log.sysexit_with_message(
|
||||||
"Json value error: Can't parse json in {}:{}:\n{}".format(rfile, str(num), line.strip()))
|
"Json value error: Can't parse json in {}:{}:\n{}".format(
|
||||||
|
rfile, str(num), line.strip()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
@ -2,9 +2,6 @@
|
|||||||
"""Entrypoint and CLI handler."""
|
"""Entrypoint and CLI handler."""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import ansibledoctor.Exception
|
import ansibledoctor.Exception
|
||||||
from ansibledoctor import __version__
|
from ansibledoctor import __version__
|
||||||
@ -15,6 +12,7 @@ from ansibledoctor.Utils import SingleLog
|
|||||||
|
|
||||||
|
|
||||||
class AnsibleDoctor:
|
class AnsibleDoctor:
|
||||||
|
"""Main doctor object."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.log = SingleLog()
|
self.log = SingleLog()
|
||||||
@ -34,20 +32,42 @@ class AnsibleDoctor:
|
|||||||
"""
|
"""
|
||||||
# TODO: add function to print to stdout instead of file
|
# TODO: add function to print to stdout instead of file
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate documentation from annotated Ansible roles using templates")
|
description="Generate documentation from annotated Ansible roles using templates"
|
||||||
parser.add_argument("role_dir", nargs="?", help="role directory (default: current working dir)")
|
)
|
||||||
parser.add_argument("-c", "--config", dest="config_file", help="location of configuration file")
|
parser.add_argument(
|
||||||
parser.add_argument("-o", "--output", dest="output_dir", action="store",
|
"role_dir", nargs="?", help="role directory (default: current working dir)"
|
||||||
help="output base dir")
|
)
|
||||||
parser.add_argument("-f", "--force", dest="force_overwrite", action="store_true", default=None,
|
parser.add_argument(
|
||||||
help="force overwrite output file")
|
"-c", "--config", dest="config_file", help="location of configuration file"
|
||||||
parser.add_argument("-d", "--dry-run", dest="dry_run", action="store_true", default=None,
|
)
|
||||||
help="dry run without writing")
|
parser.add_argument(
|
||||||
parser.add_argument("-v", dest="logging.level", action="append_const", const=-1,
|
"-o", "--output", dest="output_dir", action="store", help="output base dir"
|
||||||
help="increase log level")
|
)
|
||||||
parser.add_argument("-q", dest="logging.level", action="append_const",
|
parser.add_argument(
|
||||||
const=1, help="decrease log level")
|
"-f",
|
||||||
parser.add_argument("--version", action="version", version="%(prog)s {}".format(__version__))
|
"--force",
|
||||||
|
dest="force_overwrite",
|
||||||
|
action="store_true",
|
||||||
|
default=None,
|
||||||
|
help="force overwrite output file"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-d",
|
||||||
|
"--dry-run",
|
||||||
|
dest="dry_run",
|
||||||
|
action="store_true",
|
||||||
|
default=None,
|
||||||
|
help="dry run without writing"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--version", action="version", version="%(prog)s {}".format(__version__)
|
||||||
|
)
|
||||||
|
|
||||||
return parser.parse_args().__dict__
|
return parser.parse_args().__dict__
|
||||||
|
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""Global settings definition."""
|
"""Global settings definition."""
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
import anyconfig
|
import anyconfig
|
||||||
import environs
|
import environs
|
||||||
@ -11,7 +9,6 @@ import jsonschema.exceptions
|
|||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
from appdirs import AppDirs
|
from appdirs import AppDirs
|
||||||
from jsonschema._utils import format_as_index
|
from jsonschema._utils import format_as_index
|
||||||
from pkg_resources import resource_filename
|
|
||||||
|
|
||||||
import ansibledoctor.Exception
|
import ansibledoctor.Exception
|
||||||
from ansibledoctor.Utils import Singleton
|
from ansibledoctor.Utils import Singleton
|
||||||
@ -116,11 +113,7 @@ class Config():
|
|||||||
"var": {
|
"var": {
|
||||||
"name": "var",
|
"name": "var",
|
||||||
"automatic": True,
|
"automatic": True,
|
||||||
"subtypes": [
|
"subtypes": ["value", "example", "description"]
|
||||||
"value",
|
|
||||||
"example",
|
|
||||||
"description"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"example": {
|
"example": {
|
||||||
"name": "example",
|
"name": "example",
|
||||||
@ -192,7 +185,9 @@ class Config():
|
|||||||
if '"{}" not set'.format(envname) in str(e):
|
if '"{}" not set'.format(envname) in str(e):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
raise ansibledoctor.Exception.ConfigError("Unable to read environment variable", str(e))
|
raise ansibledoctor.Exception.ConfigError(
|
||||||
|
"Unable to read environment variable", str(e)
|
||||||
|
)
|
||||||
|
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
@ -224,7 +219,9 @@ class Config():
|
|||||||
s = stream.read()
|
s = stream.read()
|
||||||
try:
|
try:
|
||||||
file_dict = ruamel.yaml.safe_load(s)
|
file_dict = ruamel.yaml.safe_load(s)
|
||||||
except (ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError) as e:
|
except (
|
||||||
|
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
|
||||||
|
) as e:
|
||||||
message = "{} {}".format(e.context, e.problem)
|
message = "{} {}".format(e.context, e.problem)
|
||||||
raise ansibledoctor.Exception.ConfigError(
|
raise ansibledoctor.Exception.ConfigError(
|
||||||
"Unable to read config file {}".format(config), message
|
"Unable to read config file {}".format(config), message
|
||||||
@ -313,4 +310,6 @@ class Config():
|
|||||||
|
|
||||||
|
|
||||||
class SingleConfig(Config, metaclass=Singleton):
|
class SingleConfig(Config, metaclass=Singleton):
|
||||||
|
"""Singleton config class."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
@ -1,13 +1,9 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""Prepare output and write compiled jinja2 templates."""
|
"""Prepare output and write compiled jinja2 templates."""
|
||||||
|
|
||||||
import codecs
|
|
||||||
import glob
|
import glob
|
||||||
import json
|
|
||||||
import ntpath
|
import ntpath
|
||||||
import os
|
import os
|
||||||
import pprint
|
|
||||||
import sys
|
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
|
|
||||||
import jinja2.exceptions
|
import jinja2.exceptions
|
||||||
@ -15,8 +11,6 @@ import ruamel.yaml
|
|||||||
from jinja2 import Environment
|
from jinja2 import Environment
|
||||||
from jinja2 import FileSystemLoader
|
from jinja2 import FileSystemLoader
|
||||||
from jinja2.filters import evalcontextfilter
|
from jinja2.filters import evalcontextfilter
|
||||||
from six import binary_type
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
import ansibledoctor.Exception
|
import ansibledoctor.Exception
|
||||||
from ansibledoctor.Config import SingleConfig
|
from ansibledoctor.Config import SingleConfig
|
||||||
@ -25,6 +19,8 @@ from ansibledoctor.Utils import SingleLog
|
|||||||
|
|
||||||
|
|
||||||
class Generator:
|
class Generator:
|
||||||
|
"""Generate documentation from jinja2 templates."""
|
||||||
|
|
||||||
def __init__(self, doc_parser):
|
def __init__(self, doc_parser):
|
||||||
self.template_files = []
|
self.template_files = []
|
||||||
self.extension = "j2"
|
self.extension = "j2"
|
||||||
@ -67,7 +63,10 @@ class Generator:
|
|||||||
files_to_overwite = []
|
files_to_overwite = []
|
||||||
|
|
||||||
for file in self.template_files:
|
for file in self.template_files:
|
||||||
doc_file = os.path.join(self.config.config.get("output_dir"), os.path.splitext(file)[0])
|
doc_file = os.path.join(
|
||||||
|
self.config.config.get("output_dir"),
|
||||||
|
os.path.splitext(file)[0]
|
||||||
|
)
|
||||||
if os.path.isfile(doc_file):
|
if os.path.isfile(doc_file):
|
||||||
files_to_overwite.append(doc_file)
|
files_to_overwite.append(doc_file)
|
||||||
|
|
||||||
@ -95,7 +94,10 @@ class Generator:
|
|||||||
self.log.sysexit_with_message("Aborted...")
|
self.log.sysexit_with_message("Aborted...")
|
||||||
|
|
||||||
for file in self.template_files:
|
for file in self.template_files:
|
||||||
doc_file = os.path.join(self.config.config.get("output_dir"), os.path.splitext(file)[0])
|
doc_file = os.path.join(
|
||||||
|
self.config.config.get("output_dir"),
|
||||||
|
os.path.splitext(file)[0]
|
||||||
|
)
|
||||||
source_file = self.config.get_template() + "/" + file
|
source_file = self.config.get_template() + "/" + file
|
||||||
|
|
||||||
self.logger.debug("Writing doc output to: " + doc_file + " from: " + source_file)
|
self.logger.debug("Writing doc output to: " + doc_file + " from: " + source_file)
|
||||||
@ -109,7 +111,11 @@ class Generator:
|
|||||||
if data is not None:
|
if data is not None:
|
||||||
try:
|
try:
|
||||||
# print(json.dumps(role_data, indent=4, sort_keys=True))
|
# print(json.dumps(role_data, indent=4, sort_keys=True))
|
||||||
jenv = Environment(loader=FileSystemLoader(self.config.get_template()), lstrip_blocks=True, trim_blocks=True) # nosec
|
jenv = Environment(
|
||||||
|
loader=FileSystemLoader(self.config.get_template()),
|
||||||
|
lstrip_blocks=True,
|
||||||
|
trim_blocks=True
|
||||||
|
) # nosec
|
||||||
jenv.filters["to_nice_yaml"] = self._to_nice_yaml
|
jenv.filters["to_nice_yaml"] = self._to_nice_yaml
|
||||||
jenv.filters["deep_get"] = self._deep_get
|
jenv.filters["deep_get"] = self._deep_get
|
||||||
jenv.filters["save_join"] = self._save_join
|
jenv.filters["save_join"] = self._save_join
|
||||||
@ -121,12 +127,18 @@ class Generator:
|
|||||||
self.logger.info("Writing to: " + doc_file)
|
self.logger.info("Writing to: " + doc_file)
|
||||||
else:
|
else:
|
||||||
self.logger.info("Writing to: " + doc_file)
|
self.logger.info("Writing to: " + doc_file)
|
||||||
except (jinja2.exceptions.UndefinedError, jinja2.exceptions.TemplateSyntaxError)as e:
|
except (
|
||||||
|
jinja2.exceptions.UndefinedError, jinja2.exceptions.TemplateSyntaxError
|
||||||
|
) as e:
|
||||||
self.log.sysexit_with_message(
|
self.log.sysexit_with_message(
|
||||||
"Jinja2 templating error while loading file: '{}'\n{}".format(file, str(e)))
|
"Jinja2 templating error while loading file: '{}'\n{}".format(
|
||||||
|
file, str(e)
|
||||||
|
)
|
||||||
|
)
|
||||||
except UnicodeEncodeError as e:
|
except UnicodeEncodeError as e:
|
||||||
self.log.sysexit_with_message(
|
self.log.sysexit_with_message(
|
||||||
"Unable to print special characters\n{}".format(str(e)))
|
"Unable to print special characters\n{}".format(str(e))
|
||||||
|
)
|
||||||
|
|
||||||
def _to_nice_yaml(self, a, indent=4, *args, **kw):
|
def _to_nice_yaml(self, a, indent=4, *args, **kw):
|
||||||
"""Make verbose, human readable yaml."""
|
"""Make verbose, human readable yaml."""
|
||||||
@ -138,7 +150,10 @@ class Generator:
|
|||||||
|
|
||||||
def _deep_get(self, _, dictionary, keys, *args, **kw):
|
def _deep_get(self, _, dictionary, keys, *args, **kw):
|
||||||
default = None
|
default = None
|
||||||
return reduce(lambda d, key: d.get(key, default) if isinstance(d, dict) else default, keys.split("."), dictionary)
|
return reduce(
|
||||||
|
lambda d, key: d.get(key, default)
|
||||||
|
if isinstance(d, dict) else default, keys.split("."), dictionary
|
||||||
|
)
|
||||||
|
|
||||||
@evalcontextfilter
|
@evalcontextfilter
|
||||||
def _save_join(self, eval_ctx, value, d=u"", attribute=None):
|
def _save_join(self, eval_ctx, value, d=u"", attribute=None):
|
||||||
|
@ -2,8 +2,6 @@
|
|||||||
"""Parse static files."""
|
"""Parse static files."""
|
||||||
|
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import anyconfig
|
import anyconfig
|
||||||
@ -19,6 +17,8 @@ from ansibledoctor.Utils import UnsafeTag
|
|||||||
|
|
||||||
|
|
||||||
class Parser:
|
class Parser:
|
||||||
|
"""Parse yaml files."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._annotation_objs = {}
|
self._annotation_objs = {}
|
||||||
self._data = defaultdict(dict)
|
self._data = defaultdict(dict)
|
||||||
@ -36,13 +36,21 @@ class Parser:
|
|||||||
if any(fnmatch.fnmatch(rfile, "*/defaults/*." + ext) for ext in YAML_EXTENSIONS):
|
if any(fnmatch.fnmatch(rfile, "*/defaults/*." + ext) for ext in YAML_EXTENSIONS):
|
||||||
with open(rfile, "r", encoding="utf8") as yaml_file:
|
with open(rfile, "r", encoding="utf8") as yaml_file:
|
||||||
try:
|
try:
|
||||||
ruamel.yaml.add_constructor(UnsafeTag.yaml_tag, UnsafeTag.yaml_constructor, constructor=ruamel.yaml.SafeConstructor)
|
ruamel.yaml.add_constructor(
|
||||||
|
UnsafeTag.yaml_tag,
|
||||||
|
UnsafeTag.yaml_constructor,
|
||||||
|
constructor=ruamel.yaml.SafeConstructor
|
||||||
|
)
|
||||||
data = defaultdict(dict, (ruamel.yaml.safe_load(yaml_file) or {}))
|
data = defaultdict(dict, (ruamel.yaml.safe_load(yaml_file) or {}))
|
||||||
for key, value in data.items():
|
for key, value in data.items():
|
||||||
self._data["var"][key] = {"value": {key: value}}
|
self._data["var"][key] = {"value": {key: value}}
|
||||||
except (ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError) as e:
|
except (
|
||||||
|
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
|
||||||
|
) as e:
|
||||||
message = "{} {}".format(e.context, e.problem)
|
message = "{} {}".format(e.context, e.problem)
|
||||||
self.log.sysexit_with_message("Unable to read yaml file {}\n{}".format(rfile, message))
|
self.log.sysexit_with_message(
|
||||||
|
"Unable to read yaml file {}\n{}".format(rfile, message)
|
||||||
|
)
|
||||||
|
|
||||||
def _parse_meta_file(self):
|
def _parse_meta_file(self):
|
||||||
for rfile in self._files_registry.get_files():
|
for rfile in self._files_registry.get_files():
|
||||||
@ -55,12 +63,18 @@ class Parser:
|
|||||||
self._data["meta"][key] = {"value": value}
|
self._data["meta"][key] = {"value": value}
|
||||||
|
|
||||||
if data.get("dependencies") is not None:
|
if data.get("dependencies") is not None:
|
||||||
self._data["meta"]["dependencies"] = {"value": data.get("dependencies")}
|
self._data["meta"]["dependencies"] = {
|
||||||
|
"value": data.get("dependencies")
|
||||||
|
}
|
||||||
|
|
||||||
self._data["meta"]["name"] = {"value": self.config.config["role_name"]}
|
self._data["meta"]["name"] = {"value": self.config.config["role_name"]}
|
||||||
except (ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError) as e:
|
except (
|
||||||
|
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
|
||||||
|
) as e:
|
||||||
message = "{} {}".format(e.context, e.problem)
|
message = "{} {}".format(e.context, e.problem)
|
||||||
self.log.sysexit_with_message("Unable to read yaml file {}\n{}".format(rfile, message))
|
self.log.sysexit_with_message(
|
||||||
|
"Unable to read yaml file {}\n{}".format(rfile, message)
|
||||||
|
)
|
||||||
|
|
||||||
def _parse_task_tags(self):
|
def _parse_task_tags(self):
|
||||||
for rfile in self._files_registry.get_files():
|
for rfile in self._files_registry.get_files():
|
||||||
@ -68,9 +82,13 @@ class Parser:
|
|||||||
with open(rfile, "r", encoding="utf8") as yaml_file:
|
with open(rfile, "r", encoding="utf8") as yaml_file:
|
||||||
try:
|
try:
|
||||||
data = ruamel.yaml.safe_load(yaml_file)
|
data = ruamel.yaml.safe_load(yaml_file)
|
||||||
except (ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError) as e:
|
except (
|
||||||
|
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
|
||||||
|
) as e:
|
||||||
message = "{} {}".format(e.context, e.problem)
|
message = "{} {}".format(e.context, e.problem)
|
||||||
self.log.sysexit_with_message("Unable to read yaml file {}\n{}".format(rfile, message))
|
self.log.sysexit_with_message(
|
||||||
|
"Unable to read yaml file {}\n{}".format(rfile, message)
|
||||||
|
)
|
||||||
|
|
||||||
tags_found = nested_lookup("tags", data)
|
tags_found = nested_lookup("tags", data)
|
||||||
for tag in tags_found:
|
for tag in tags_found:
|
||||||
@ -81,7 +99,9 @@ class Parser:
|
|||||||
tags = defaultdict(dict)
|
tags = defaultdict(dict)
|
||||||
for annotaion in self.config.get_annotations_names(automatic=True):
|
for annotaion in self.config.get_annotations_names(automatic=True):
|
||||||
self.logger.info("Finding annotations for: @" + annotaion)
|
self.logger.info("Finding annotations for: @" + annotaion)
|
||||||
self._annotation_objs[annotaion] = Annotation(name=annotaion, files_registry=self._files_registry)
|
self._annotation_objs[annotaion] = Annotation(
|
||||||
|
name=annotaion, files_registry=self._files_registry
|
||||||
|
)
|
||||||
tags[annotaion] = self._annotation_objs[annotaion].get_details()
|
tags[annotaion] = self._annotation_objs[annotaion].get_details()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
import pathspec
|
import pathspec
|
||||||
|
|
||||||
@ -13,6 +12,7 @@ from ansibledoctor.Utils import SingleLog
|
|||||||
|
|
||||||
|
|
||||||
class Registry:
|
class Registry:
|
||||||
|
"""Register all yaml files."""
|
||||||
|
|
||||||
_doc = {}
|
_doc = {}
|
||||||
log = None
|
log = None
|
||||||
@ -46,7 +46,13 @@ class Registry:
|
|||||||
pattern = os.path.join(role_dir, "**/*." + extension)
|
pattern = os.path.join(role_dir, "**/*." + extension)
|
||||||
for filename in glob.iglob(pattern, recursive=True):
|
for filename in glob.iglob(pattern, recursive=True):
|
||||||
if not excludespec.match_file(filename):
|
if not excludespec.match_file(filename):
|
||||||
self.log.debug("Adding file to '{}': {}".format(role_name, os.path.relpath(filename, role_dir)))
|
self.log.debug(
|
||||||
|
"Adding file to '{}': {}".format(
|
||||||
|
role_name, os.path.relpath(filename, role_dir)
|
||||||
|
)
|
||||||
|
)
|
||||||
self._doc.append(filename)
|
self._doc.append(filename)
|
||||||
else:
|
else:
|
||||||
self.log.debug("Excluding file: {}".format(os.path.relpath(filename, role_dir)))
|
self.log.debug(
|
||||||
|
"Excluding file: {}".format(os.path.relpath(filename, role_dir))
|
||||||
|
)
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import pprint
|
|
||||||
import sys
|
import sys
|
||||||
from distutils.util import strtobool
|
from distutils.util import strtobool
|
||||||
|
|
||||||
@ -32,6 +31,8 @@ colorama.init(autoreset=True, strip=not _should_do_markup())
|
|||||||
|
|
||||||
|
|
||||||
class Singleton(type):
|
class Singleton(type):
|
||||||
|
"""Meta singleton class."""
|
||||||
|
|
||||||
_instances = {}
|
_instances = {}
|
||||||
|
|
||||||
def __call__(cls, *args, **kwargs):
|
def __call__(cls, *args, **kwargs):
|
||||||
@ -61,7 +62,7 @@ class LogFilter(object):
|
|||||||
class MultilineFormatter(logging.Formatter):
|
class MultilineFormatter(logging.Formatter):
|
||||||
"""Logging Formatter to reset color after newline characters."""
|
"""Logging Formatter to reset color after newline characters."""
|
||||||
|
|
||||||
def format(self, record): # noqa
|
def format(self, record): # noqa
|
||||||
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
|
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
|
||||||
return logging.Formatter.format(self, record)
|
return logging.Formatter.format(self, record)
|
||||||
|
|
||||||
@ -69,12 +70,14 @@ class MultilineFormatter(logging.Formatter):
|
|||||||
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
|
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
|
||||||
"""Logging Formatter to remove newline characters."""
|
"""Logging Formatter to remove newline characters."""
|
||||||
|
|
||||||
def format(self, record): # noqa
|
def format(self, record): # noqa
|
||||||
record.msg = record.msg.replace("\n", " ")
|
record.msg = record.msg.replace("\n", " ")
|
||||||
return jsonlogger.JsonFormatter.format(self, record)
|
return jsonlogger.JsonFormatter.format(self, record)
|
||||||
|
|
||||||
|
|
||||||
class Log:
|
class Log:
|
||||||
|
"""Handle logging."""
|
||||||
|
|
||||||
def __init__(self, level=logging.WARN, name="ansibledoctor", json=False):
|
def __init__(self, level=logging.WARN, name="ansibledoctor", json=False):
|
||||||
self.logger = logging.getLogger(name)
|
self.logger = logging.getLogger(name)
|
||||||
self.logger.setLevel(level)
|
self.logger.setLevel(level)
|
||||||
@ -89,8 +92,11 @@ class Log:
|
|||||||
handler = logging.StreamHandler(sys.stderr)
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
handler.setLevel(logging.ERROR)
|
handler.setLevel(logging.ERROR)
|
||||||
handler.addFilter(LogFilter(logging.ERROR))
|
handler.addFilter(LogFilter(logging.ERROR))
|
||||||
handler.setFormatter(MultilineFormatter(
|
handler.setFormatter(
|
||||||
self.error(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
MultilineFormatter(
|
||||||
|
self.error(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if json:
|
if json:
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
@ -101,8 +107,11 @@ class Log:
|
|||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
handler.setLevel(logging.WARN)
|
handler.setLevel(logging.WARN)
|
||||||
handler.addFilter(LogFilter(logging.WARN))
|
handler.addFilter(LogFilter(logging.WARN))
|
||||||
handler.setFormatter(MultilineFormatter(
|
handler.setFormatter(
|
||||||
self.warn(CONSOLE_FORMAT.format(colorama.Fore.YELLOW, colorama.Style.RESET_ALL))))
|
MultilineFormatter(
|
||||||
|
self.warn(CONSOLE_FORMAT.format(colorama.Fore.YELLOW, colorama.Style.RESET_ALL))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if json:
|
if json:
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
@ -113,8 +122,11 @@ class Log:
|
|||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
handler.setLevel(logging.INFO)
|
handler.setLevel(logging.INFO)
|
||||||
handler.addFilter(LogFilter(logging.INFO))
|
handler.addFilter(LogFilter(logging.INFO))
|
||||||
handler.setFormatter(MultilineFormatter(
|
handler.setFormatter(
|
||||||
self.info(CONSOLE_FORMAT.format(colorama.Fore.CYAN, colorama.Style.RESET_ALL))))
|
MultilineFormatter(
|
||||||
|
self.info(CONSOLE_FORMAT.format(colorama.Fore.CYAN, colorama.Style.RESET_ALL))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if json:
|
if json:
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
@ -125,8 +137,11 @@ class Log:
|
|||||||
handler = logging.StreamHandler(sys.stderr)
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
handler.setLevel(logging.CRITICAL)
|
handler.setLevel(logging.CRITICAL)
|
||||||
handler.addFilter(LogFilter(logging.CRITICAL))
|
handler.addFilter(LogFilter(logging.CRITICAL))
|
||||||
handler.setFormatter(MultilineFormatter(
|
handler.setFormatter(
|
||||||
self.critical(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
MultilineFormatter(
|
||||||
|
self.critical(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if json:
|
if json:
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
@ -137,8 +152,11 @@ class Log:
|
|||||||
handler = logging.StreamHandler(sys.stderr)
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
handler.setLevel(logging.DEBUG)
|
handler.setLevel(logging.DEBUG)
|
||||||
handler.addFilter(LogFilter(logging.DEBUG))
|
handler.addFilter(LogFilter(logging.DEBUG))
|
||||||
handler.setFormatter(MultilineFormatter(
|
handler.setFormatter(
|
||||||
self.critical(CONSOLE_FORMAT.format(colorama.Fore.BLUE, colorama.Style.RESET_ALL))))
|
MultilineFormatter(
|
||||||
|
self.critical(CONSOLE_FORMAT.format(colorama.Fore.BLUE, colorama.Style.RESET_ALL))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if json:
|
if json:
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
@ -188,10 +206,14 @@ class Log:
|
|||||||
|
|
||||||
|
|
||||||
class SingleLog(Log, metaclass=Singleton):
|
class SingleLog(Log, metaclass=Singleton):
|
||||||
|
"""Singleton logging class."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class UnsafeTag:
|
class UnsafeTag:
|
||||||
|
"""Handle custom yaml unsafe tag."""
|
||||||
|
|
||||||
yaml_tag = u"!unsafe"
|
yaml_tag = u"!unsafe"
|
||||||
|
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
@ -203,6 +225,8 @@ class UnsafeTag:
|
|||||||
|
|
||||||
|
|
||||||
class FileUtils:
|
class FileUtils:
|
||||||
|
"""Mics static methods for file handling."""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_path(path):
|
def create_path(path):
|
||||||
os.makedirs(path, exist_ok=True)
|
os.makedirs(path, exist_ok=True)
|
||||||
|
14
setup.cfg
14
setup.cfg
@ -10,11 +10,21 @@ default_section = THIRDPARTY
|
|||||||
known_first_party = ansibledoctor
|
known_first_party = ansibledoctor
|
||||||
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||||
force_single_line = true
|
force_single_line = true
|
||||||
line_length = 110
|
line_length = 99
|
||||||
skip_glob = **/env/*,**/docs/*
|
skip_glob = **/.env*,**/env/*,**/docs/*
|
||||||
|
|
||||||
|
[yapf]
|
||||||
|
based_on_style = google
|
||||||
|
column_limit = 99
|
||||||
|
dedent_closing_brackets = true
|
||||||
|
coalesce_brackets = true
|
||||||
|
split_before_logical_operator = true
|
||||||
|
|
||||||
[tool:pytest]
|
[tool:pytest]
|
||||||
filterwarnings =
|
filterwarnings =
|
||||||
ignore::FutureWarning
|
ignore::FutureWarning
|
||||||
ignore:.*collections.*:DeprecationWarning
|
ignore:.*collections.*:DeprecationWarning
|
||||||
ignore:.*pep8.*:FutureWarning
|
ignore:.*pep8.*:FutureWarning
|
||||||
|
|
||||||
|
[coverage:run]
|
||||||
|
omit = **/tests/*
|
||||||
|
16
setup.py
16
setup.py
@ -33,26 +33,28 @@ setup(
|
|||||||
keywords="ansible role documentation",
|
keywords="ansible role documentation",
|
||||||
author=get_property("__author__", PACKAGE_NAME),
|
author=get_property("__author__", PACKAGE_NAME),
|
||||||
author_email=get_property("__email__", PACKAGE_NAME),
|
author_email=get_property("__email__", PACKAGE_NAME),
|
||||||
url="https://github.com/xoxys/ansible-doctor",
|
url=get_property("__url__", PACKAGE_NAME)
|
||||||
license=get_property("__url__", PACKAGE_NAME),
|
license=get_property("__license__", PACKAGE_NAME),
|
||||||
long_description=get_readme(),
|
long_description=get_readme(),
|
||||||
long_description_content_type="text/markdown",
|
long_description_content_type="text/markdown",
|
||||||
packages=find_packages(exclude=["*.tests", "tests", "tests.*"]),
|
packages=find_packages(exclude=["*.tests", "tests", "tests.*"]),
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
python_requires=">=3.5",
|
python_requires=">=3.5,<4",
|
||||||
classifiers=[
|
classifiers=[
|
||||||
"Development Status :: 5 - Production/Stable",
|
"Development Status :: 5 - Production/Stable",
|
||||||
"Environment :: Console",
|
"Environment :: Console",
|
||||||
|
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"Intended Audience :: Information Technology",
|
"Intended Audience :: Information Technology",
|
||||||
"Intended Audience :: System Administrators",
|
"Intended Audience :: System Administrators",
|
||||||
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
|
|
||||||
"Natural Language :: English",
|
"Natural Language :: English",
|
||||||
"Operating System :: POSIX",
|
"Operating System :: POSIX",
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3",
|
||||||
"Topic :: System :: Installation/Setup",
|
"Programming Language :: Python :: 3.5",
|
||||||
"Topic :: System :: Systems Administration",
|
"Programming Language :: Python :: 3.6",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
"Topic :: Utilities",
|
"Topic :: Utilities",
|
||||||
"Topic :: Software Development",
|
"Topic :: Software Development",
|
||||||
"Topic :: Software Development :: Documentation",
|
"Topic :: Software Development :: Documentation",
|
||||||
|
@ -1,11 +1,8 @@
|
|||||||
# open issue
|
|
||||||
# https://gitlab.com/pycqa/flake8-docstrings/issues/36
|
|
||||||
pydocstyle<4.0.0
|
pydocstyle<4.0.0
|
||||||
flake8
|
flake8
|
||||||
flake8-colors
|
flake8-colors
|
||||||
flake8-blind-except
|
flake8-blind-except
|
||||||
flake8-builtins
|
flake8-builtins
|
||||||
flake8-colors
|
|
||||||
flake8-docstrings<=3.0.0
|
flake8-docstrings<=3.0.0
|
||||||
flake8-isort
|
flake8-isort
|
||||||
flake8-logging-format
|
flake8-logging-format
|
||||||
@ -17,3 +14,4 @@ pytest
|
|||||||
pytest-mock
|
pytest-mock
|
||||||
pytest-cov
|
pytest-cov
|
||||||
bandit
|
bandit
|
||||||
|
yapf
|
||||||
|
Loading…
Reference in New Issue
Block a user