mirror of
https://github.com/thegeeklab/ansible-doctor.git
synced 2024-11-21 20:30:43 +00:00
fork; initial commit
This commit is contained in:
commit
b3cd1d0978
8
.flake8
Normal file
8
.flake8
Normal file
@ -0,0 +1,8 @@
|
||||
[flake8]
|
||||
# Temp disable Docstring checks D101, D102, D103, D107
|
||||
ignore = E501, W503, F401, N813, D101, D102, D103, D107
|
||||
max-line-length = 100
|
||||
inline-quotes = double
|
||||
exclude = .git,.tox,__pycache__,build,dist,tests,*.pyc,*.egg-info,.cache,.eggs,env*
|
||||
application-import-names = ansiblelater
|
||||
format = ${cyan}%(path)s:%(row)d:%(col)d${reset}: ${red_bold}%(code)s${reset} %(text)s
|
103
.gitignore
vendored
Normal file
103
.gitignore
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# virtualenv
|
||||
.venv
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
env*/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# Ignore ide addons
|
||||
.server-script
|
||||
.on-save.json
|
||||
.vscode
|
||||
.pytest_cache
|
||||
|
||||
pip-wheel-metadata
|
165
LICENSE
Normal file
165
LICENSE
Normal file
@ -0,0 +1,165 @@
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
||||
This version of the GNU Lesser General Public License incorporates
|
||||
the terms and conditions of version 3 of the GNU General Public
|
||||
License, supplemented by the additional permissions listed below.
|
||||
|
||||
0. Additional Definitions.
|
||||
|
||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
||||
General Public License.
|
||||
|
||||
"The Library" refers to a covered work governed by this License,
|
||||
other than an Application or a Combined Work as defined below.
|
||||
|
||||
An "Application" is any work that makes use of an interface provided
|
||||
by the Library, but which is not otherwise based on the Library.
|
||||
Defining a subclass of a class defined by the Library is deemed a mode
|
||||
of using an interface provided by the Library.
|
||||
|
||||
A "Combined Work" is a work produced by combining or linking an
|
||||
Application with the Library. The particular version of the Library
|
||||
with which the Combined Work was made is also called the "Linked
|
||||
Version".
|
||||
|
||||
The "Minimal Corresponding Source" for a Combined Work means the
|
||||
Corresponding Source for the Combined Work, excluding any source code
|
||||
for portions of the Combined Work that, considered in isolation, are
|
||||
based on the Application, and not on the Linked Version.
|
||||
|
||||
The "Corresponding Application Code" for a Combined Work means the
|
||||
object code and/or source code for the Application, including any data
|
||||
and utility programs needed for reproducing the Combined Work from the
|
||||
Application, but excluding the System Libraries of the Combined Work.
|
||||
|
||||
1. Exception to Section 3 of the GNU GPL.
|
||||
|
||||
You may convey a covered work under sections 3 and 4 of this License
|
||||
without being bound by section 3 of the GNU GPL.
|
||||
|
||||
2. Conveying Modified Versions.
|
||||
|
||||
If you modify a copy of the Library, and, in your modifications, a
|
||||
facility refers to a function or data to be supplied by an Application
|
||||
that uses the facility (other than as an argument passed when the
|
||||
facility is invoked), then you may convey a copy of the modified
|
||||
version:
|
||||
|
||||
a) under this License, provided that you make a good faith effort to
|
||||
ensure that, in the event an Application does not supply the
|
||||
function or data, the facility still operates, and performs
|
||||
whatever part of its purpose remains meaningful, or
|
||||
|
||||
b) under the GNU GPL, with none of the additional permissions of
|
||||
this License applicable to that copy.
|
||||
|
||||
3. Object Code Incorporating Material from Library Header Files.
|
||||
|
||||
The object code form of an Application may incorporate material from
|
||||
a header file that is part of the Library. You may convey such object
|
||||
code under terms of your choice, provided that, if the incorporated
|
||||
material is not limited to numerical parameters, data structure
|
||||
layouts and accessors, or small macros, inline functions and templates
|
||||
(ten or fewer lines in length), you do both of the following:
|
||||
|
||||
a) Give prominent notice with each copy of the object code that the
|
||||
Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the object code with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
4. Combined Works.
|
||||
|
||||
You may convey a Combined Work under terms of your choice that,
|
||||
taken together, effectively do not restrict modification of the
|
||||
portions of the Library contained in the Combined Work and reverse
|
||||
engineering for debugging such modifications, if you also do each of
|
||||
the following:
|
||||
|
||||
a) Give prominent notice with each copy of the Combined Work that
|
||||
the Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
c) For a Combined Work that displays copyright notices during
|
||||
execution, include the copyright notice for the Library among
|
||||
these notices, as well as a reference directing the user to the
|
||||
copies of the GNU GPL and this license document.
|
||||
|
||||
d) Do one of the following:
|
||||
|
||||
0) Convey the Minimal Corresponding Source under the terms of this
|
||||
License, and the Corresponding Application Code in a form
|
||||
suitable for, and under terms that permit, the user to
|
||||
recombine or relink the Application with a modified version of
|
||||
the Linked Version to produce a modified Combined Work, in the
|
||||
manner specified by section 6 of the GNU GPL for conveying
|
||||
Corresponding Source.
|
||||
|
||||
1) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (a) uses at run time
|
||||
a copy of the Library already present on the user's computer
|
||||
system, and (b) will operate properly with a modified version
|
||||
of the Library that is interface-compatible with the Linked
|
||||
Version.
|
||||
|
||||
e) Provide Installation Information, but only if you would otherwise
|
||||
be required to provide such information under section 6 of the
|
||||
GNU GPL, and only to the extent that such information is
|
||||
necessary to install and execute a modified version of the
|
||||
Combined Work produced by recombining or relinking the
|
||||
Application with a modified version of the Linked Version. (If
|
||||
you use option 4d0, the Installation Information must accompany
|
||||
the Minimal Corresponding Source and Corresponding Application
|
||||
Code. If you use option 4d1, you must provide the Installation
|
||||
Information in the manner specified by section 6 of the GNU GPL
|
||||
for conveying Corresponding Source.)
|
||||
|
||||
5. Combined Libraries.
|
||||
|
||||
You may place library facilities that are a work based on the
|
||||
Library side by side in a single library together with other library
|
||||
facilities that are not Applications and are not covered by this
|
||||
License, and convey such a combined library under terms of your
|
||||
choice, if you do both of the following:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work based
|
||||
on the Library, uncombined with any other library facilities,
|
||||
conveyed under the terms of this License.
|
||||
|
||||
b) Give prominent notice with the combined library that part of it
|
||||
is a work based on the Library, and explaining where to find the
|
||||
accompanying uncombined form of the same work.
|
||||
|
||||
6. Revised Versions of the GNU Lesser General Public License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU Lesser General Public License from time to time. Such new
|
||||
versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Library as you received it specifies that a certain numbered version
|
||||
of the GNU Lesser General Public License "or any later version"
|
||||
applies to it, you have the option of following the terms and
|
||||
conditions either of that published version or of any later version
|
||||
published by the Free Software Foundation. If the Library as you
|
||||
received it does not specify a version number of the GNU Lesser
|
||||
General Public License, you may choose any version of the GNU Lesser
|
||||
General Public License ever published by the Free Software Foundation.
|
||||
|
||||
If the Library as you received it specifies that a proxy can decide
|
||||
whether future versions of the GNU Lesser General Public License shall
|
||||
apply, that proxy's public statement of acceptance of any version is
|
||||
permanent authorization for you to choose that version for the
|
||||
Library.
|
2
MANIFEST.in
Normal file
2
MANIFEST.in
Normal file
@ -0,0 +1,2 @@
|
||||
include LICENSE
|
||||
recursive-include ansibledoctor/templates *
|
139
ansibledoctor/Annotation.py
Normal file
139
ansibledoctor/Annotation.py
Normal file
@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import pprint
|
||||
import re
|
||||
from collections import defaultdict
|
||||
|
||||
import anyconfig
|
||||
import yaml
|
||||
|
||||
from ansibledoctor.Config import SingleConfig
|
||||
from ansibledoctor.FileRegistry import Registry
|
||||
from ansibledoctor.Utils import SingleLog
|
||||
|
||||
|
||||
class AnnotationItem:
|
||||
|
||||
# next time improve this by looping over public available attributes
|
||||
def __init__(self):
|
||||
self.data = defaultdict(dict)
|
||||
|
||||
def get_obj(self):
|
||||
return self.data
|
||||
|
||||
|
||||
class Annotation:
|
||||
def __init__(self, name, files_registry):
|
||||
self._all_items = defaultdict(dict)
|
||||
self._file_handler = None
|
||||
self.config = SingleConfig()
|
||||
self.log = SingleLog()
|
||||
self._files_registry = files_registry
|
||||
|
||||
self._all_annotations = self.config.get_annotations_definition()
|
||||
|
||||
if name in self._all_annotations.keys():
|
||||
self._annotation_definition = self._all_annotations[name]
|
||||
|
||||
if self._annotation_definition is not None:
|
||||
self._find_annotation()
|
||||
|
||||
def get_details(self):
|
||||
return self._all_items
|
||||
|
||||
def _find_annotation(self):
|
||||
regex = r"(\#\ *\@" + self._annotation_definition["name"] + r"\ +.*)"
|
||||
for rfile in self._files_registry.get_files():
|
||||
self._file_handler = open(rfile, encoding="utf8")
|
||||
|
||||
while True:
|
||||
line = self._file_handler.readline()
|
||||
if not line:
|
||||
break
|
||||
|
||||
if re.match(regex, line):
|
||||
item = self._get_annotation_data(
|
||||
line, self._annotation_definition["name"])
|
||||
if item:
|
||||
self._populate_item(item.get_obj().items())
|
||||
|
||||
self._file_handler.close()
|
||||
|
||||
def _populate_item(self, item):
|
||||
for key, value in item:
|
||||
anyconfig.merge(self._all_items[key],
|
||||
value, ac_merge=anyconfig.MS_DICTS)
|
||||
|
||||
def _get_annotation_data(self, line, name):
|
||||
"""
|
||||
Make some string conversion on a line in order to get the relevant data.
|
||||
|
||||
:param line:
|
||||
"""
|
||||
item = AnnotationItem()
|
||||
|
||||
# step1 remove the annotation
|
||||
# reg1 = "(\#\ *\@"++"\ *)"
|
||||
reg1 = r"(\#\ *\@" + name + r"\ *)"
|
||||
line1 = re.sub(reg1, "", line).strip()
|
||||
|
||||
# step3 take the main key value from the annotation
|
||||
parts = [part.strip() for part in line1.split(":", 2)]
|
||||
key = str(parts[0])
|
||||
item.data[key] = {}
|
||||
multiline_char = [">"]
|
||||
|
||||
if len(parts) < 2:
|
||||
return
|
||||
|
||||
if len(parts) == 2:
|
||||
parts = parts[:1] + ["value"] + parts[1:]
|
||||
|
||||
if name == "var":
|
||||
try:
|
||||
content = {key: json.loads(parts[2].strip())}
|
||||
except ValueError:
|
||||
content = {key: parts[2].strip()}
|
||||
else:
|
||||
content = parts[2]
|
||||
|
||||
item.data[key][parts[1]] = content
|
||||
|
||||
# step4 check for multiline description
|
||||
if parts[2] in multiline_char:
|
||||
multiline = []
|
||||
stars_with_annotation = r"(\#\ *[\@][\w]+)"
|
||||
current_file_position = self._file_handler.tell()
|
||||
|
||||
while True:
|
||||
next_line = self._file_handler.readline()
|
||||
|
||||
if not next_line.strip():
|
||||
self._file_handler.seek(current_file_position)
|
||||
break
|
||||
|
||||
# match if annotation in line
|
||||
if re.match(stars_with_annotation, next_line):
|
||||
self._file_handler.seek(current_file_position)
|
||||
break
|
||||
# match if empty line or commented empty line
|
||||
test_line = next_line.replace("#", "").strip()
|
||||
if len(test_line) == 0:
|
||||
self._file_handler.seek(current_file_position)
|
||||
break
|
||||
|
||||
# match if does not start with comment
|
||||
test_line2 = next_line.strip()
|
||||
if test_line2[:1] != "#":
|
||||
self._file_handler.seek(current_file_position)
|
||||
break
|
||||
|
||||
final = next_line.replace("#", "").rstrip()
|
||||
if final[:1] == " ":
|
||||
final = final[1:]
|
||||
multiline.append(final)
|
||||
|
||||
item.data[key][parts[1]] = multiline
|
||||
|
||||
return item
|
130
ansibledoctor/Cli.py
Normal file
130
ansibledoctor/Cli.py
Normal file
@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ansibledoctor import __version__
|
||||
from ansibledoctor.Config import SingleConfig
|
||||
from ansibledoctor.DocumentationGenerator import Generator
|
||||
from ansibledoctor.DocumentationParser import Parser
|
||||
from ansibledoctor.Utils import SingleLog
|
||||
|
||||
|
||||
class AnsibleDoctor:
|
||||
|
||||
def __init__(self):
|
||||
self.config = SingleConfig()
|
||||
self.log = SingleLog(self.config.debug_level)
|
||||
args = self._cli_args()
|
||||
self._parse_args(args)
|
||||
|
||||
doc_parser = Parser()
|
||||
doc_generator = Generator(doc_parser)
|
||||
doc_generator.render()
|
||||
|
||||
def _cli_args(self):
|
||||
"""
|
||||
Use argparse for parsing CLI arguments.
|
||||
|
||||
:return: args objec
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate documentation from annotated playbooks and roles using templates")
|
||||
parser.add_argument("project_dir", nargs="?", default=os.getcwd(),
|
||||
help="role directory, (default: current working dir)")
|
||||
parser.add_argument("-c", "--conf", nargs="?", default="",
|
||||
help="location of configuration file")
|
||||
parser.add_argument("-o", "--output", action="store", dest="output", type=str,
|
||||
help="output base dir")
|
||||
parser.add_argument("-f", "--force", action="store_true", help="force overwrite output file")
|
||||
parser.add_argument("-d", "--dry-run", action="store_true", help="dry run without writing")
|
||||
parser.add_argument("-D", "--default", action="store_true", help="print the default configuration")
|
||||
parser.add_argument("-p", "--print", nargs="?", default="_unset_",
|
||||
help="use print template instead of writing to files")
|
||||
parser.add_argument("--version", action="version", version="%(prog)s {}".format(__version__))
|
||||
|
||||
debug_level = parser.add_mutually_exclusive_group()
|
||||
debug_level.add_argument("-v", action="store_true", help="Set debug level to info")
|
||||
debug_level.add_argument("-vv", action="store_true", help="Set debug level to debug")
|
||||
debug_level.add_argument("-vvv", action="store_true", help="Set debug level to trace")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def _parse_args(self, args):
|
||||
"""
|
||||
Use an args object to apply all the configuration combinations to the config object.
|
||||
|
||||
:param args:
|
||||
:return: None
|
||||
"""
|
||||
self.config.set_base_dir(os.path.abspath(args.project_dir))
|
||||
|
||||
# search for config file
|
||||
if args.conf != "":
|
||||
conf_file = os.path.abspath(args.conf)
|
||||
if os.path.isfile(conf_file) and os.path.basename(conf_file) == self.config.config_file_name:
|
||||
self.config.load_config_file(conf_file)
|
||||
# re apply log level based on config
|
||||
self.log.set_level(self.config.debug_level)
|
||||
else:
|
||||
self.log.warn("No configuration file found: " + conf_file)
|
||||
else:
|
||||
conf_file = self.config.get_base_dir() + "/" + self.config.config_file_name
|
||||
if os.path.isfile(conf_file):
|
||||
self.config.load_config_file(conf_file)
|
||||
# re apply log level based on config
|
||||
self.log.set_level(self.config.debug_level)
|
||||
|
||||
# sample configuration
|
||||
if args.default:
|
||||
print(self.config.sample_config)
|
||||
sys.exit()
|
||||
|
||||
# Debug levels
|
||||
if args.v is True:
|
||||
self.log.set_level("info")
|
||||
elif args.vv is True:
|
||||
self.log.set_level("debug")
|
||||
elif args.vvv is True:
|
||||
self.log.set_level("trace")
|
||||
|
||||
# need to send the message after the log levels have been set
|
||||
self.log.debug("using configuration file: " + conf_file)
|
||||
|
||||
# Overwrite
|
||||
if args.y is True:
|
||||
self.config.template_overwrite = True
|
||||
|
||||
# Dry run
|
||||
if args.dry_run is True:
|
||||
self.config.dry_run = True
|
||||
if self.log.log_level > 1:
|
||||
self.log.set_level(1)
|
||||
self.log.info("Running in Dry mode: Therefore setting log level at least to INFO")
|
||||
|
||||
# Print template
|
||||
if args.p == "_unset_":
|
||||
pass
|
||||
elif args.p is None:
|
||||
self.config.use_print_template = "all"
|
||||
else:
|
||||
self.config.use_print_template = args.p
|
||||
|
||||
# output dir
|
||||
if args.output is not None:
|
||||
self.config.output_dir = os.path.abspath(args.output)
|
||||
|
||||
# some debug
|
||||
self.log.debug(args)
|
||||
self.log.info("Using base dir: " + self.config.get_base_dir())
|
||||
|
||||
if self.config.is_role:
|
||||
self.log.info("This is detected as: ROLE ")
|
||||
elif self.config.is_role is not None and not self.config.is_role:
|
||||
self.log.info("This is detected as: PLAYBOOK ")
|
||||
else:
|
||||
self.log.error([
|
||||
self.config.get_base_dir() + "/tasks"
|
||||
], "No ansible role detected, checked for: ")
|
||||
sys.exit(1)
|
210
ansibledoctor/Config.py
Normal file
210
ansibledoctor/Config.py
Normal file
@ -0,0 +1,210 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
|
||||
import yaml
|
||||
|
||||
from ansibledoctor.Utils import Singleton
|
||||
|
||||
|
||||
class Config:
|
||||
sample_config = """---
|
||||
# filename: doctor.conf.yaml
|
||||
|
||||
# base directoy to scan, relative dir to configuration file
|
||||
# base_dir: "./"
|
||||
|
||||
# documentation output directory, relative dir to configuration file.
|
||||
output_dir: "./doc"
|
||||
|
||||
# directory containing templates, relative dir to configuration file,
|
||||
# comment to use default build in ones
|
||||
# template_dir: "./template"
|
||||
|
||||
# template directory name within template_dir
|
||||
# build in "doc_and_readme" and "readme"
|
||||
template: "readme"
|
||||
|
||||
# Overwrite documentation pages if already exist
|
||||
# this is equal to -y
|
||||
# template_overwrite : False
|
||||
|
||||
# set the debug level: trace | debug | info | warn
|
||||
# see -v | -vv | -vvv
|
||||
# debug_level: "warn"
|
||||
|
||||
# when searching for yaml files in roles projects,
|
||||
# excluded this paths (dir and files) from analysis
|
||||
# default values
|
||||
excluded_roles_dirs: []
|
||||
|
||||
"""
|
||||
# path to the documentation output dir
|
||||
output_dir = ""
|
||||
|
||||
# project base directory
|
||||
_base_dir = ""
|
||||
|
||||
# current directory of this object,
|
||||
# used to get the default template directory
|
||||
script_base_dir = ""
|
||||
|
||||
# path to the directory that contains the templates
|
||||
template_dir = ""
|
||||
# default template name
|
||||
default_template = "readme"
|
||||
# template to use
|
||||
template = ""
|
||||
# flag to ask if files can be overwritten
|
||||
template_overwrite = False
|
||||
# flag to use the cli print template
|
||||
use_print_template = False
|
||||
|
||||
# don"t modify any file
|
||||
dry_run = False
|
||||
|
||||
# default debug level
|
||||
debug_level = "warn"
|
||||
|
||||
# internal flag
|
||||
is_role = None
|
||||
# internal when is_rote is True
|
||||
project_name = ""
|
||||
|
||||
# name of the config file to search for
|
||||
config_file_name = "doctor.conf.yaml"
|
||||
# if config file is not in root of project, this is used to make output relative to config file
|
||||
_config_file_dir = ""
|
||||
|
||||
excluded_roles_dirs = []
|
||||
|
||||
# annotation search patterns
|
||||
|
||||
# for any pattern like " # @annotation: [annotation_key] # description "
|
||||
# name = annotation ( without "@" )
|
||||
# allow_multiple = True allow to repeat the same annotation, i.e. @todo
|
||||
# automatic = True this action will be parsed based on the annotation in name without calling the parse method
|
||||
|
||||
annotations = {
|
||||
"meta": {
|
||||
"name": "meta",
|
||||
"automatic": True
|
||||
},
|
||||
"todo": {
|
||||
"name": "todo",
|
||||
"automatic": True,
|
||||
},
|
||||
"var": {
|
||||
"name": "var",
|
||||
"automatic": True,
|
||||
},
|
||||
"example": {
|
||||
"name": "example",
|
||||
"regex": r"(\#\ *\@example\ *\: *.*)"
|
||||
},
|
||||
"tag": {
|
||||
"name": "tag",
|
||||
"automatic": True,
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.script_base_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
def set_base_dir(self, directory):
|
||||
self._base_dir = directory
|
||||
self._set_is_role()
|
||||
|
||||
def get_base_dir(self):
|
||||
return self._base_dir
|
||||
|
||||
def get_annotations_definition(self, automatic=True):
|
||||
annotations = {}
|
||||
|
||||
if automatic:
|
||||
for k, item in self.annotations.items():
|
||||
if "automatic" in item.keys() and item["automatic"]:
|
||||
annotations[k] = item
|
||||
|
||||
return annotations
|
||||
|
||||
def get_annotations_names(self, automatic=True):
|
||||
|
||||
annotations = []
|
||||
|
||||
if automatic:
|
||||
for k, item in self.annotations.items():
|
||||
if "automatic" in item.keys() and item["automatic"]:
|
||||
annotations.append(k)
|
||||
|
||||
return annotations
|
||||
|
||||
def _set_is_role(self):
|
||||
if os.path.isdir(self._base_dir + "/tasks"):
|
||||
self.is_role = True
|
||||
else:
|
||||
self.is_role = None
|
||||
|
||||
def get_output_dir(self):
|
||||
"""
|
||||
Get the relative path to cwd of the output directory for the documentation.
|
||||
|
||||
:return: str path
|
||||
"""
|
||||
if self.use_print_template:
|
||||
return ""
|
||||
if self.output_dir == "":
|
||||
return os.path.realpath(self._base_dir)
|
||||
elif os.path.isabs(self.output_dir):
|
||||
return os.path.realpath(self.output_dir)
|
||||
elif not os.path.isabs(self.output_dir):
|
||||
return os.path.realpath(self._config_file_dir + "/" + self.output_dir)
|
||||
|
||||
def get_template_base_dir(self):
|
||||
"""
|
||||
Get the base dir for the template to use.
|
||||
|
||||
:return: str abs path
|
||||
"""
|
||||
if self.use_print_template:
|
||||
return os.path.realpath(self.script_base_dir + "/templates/cliprint")
|
||||
|
||||
if self.template == "":
|
||||
template = self.default_template
|
||||
else:
|
||||
template = self.template
|
||||
|
||||
if self.template_dir == "":
|
||||
return os.path.realpath(self.script_base_dir + "/templates/" + template)
|
||||
elif os.path.isabs(self.template_dir):
|
||||
return os.path.realpath(self.template_dir + "/" + template)
|
||||
elif not os.path.isabs(self.template_dir):
|
||||
return os.path.realpath(self._config_file_dir + "/" + self.template_dir + "/" + template)
|
||||
|
||||
def load_config_file(self, file):
|
||||
|
||||
allow_to_overwrite = [
|
||||
"base_dir",
|
||||
"output_dir",
|
||||
"template_dir",
|
||||
"template",
|
||||
"template_overwrite",
|
||||
"debug_level",
|
||||
"excluded_roles_dirs",
|
||||
|
||||
]
|
||||
|
||||
with open(file, "r") as yaml_file:
|
||||
try:
|
||||
self._config_file_dir = os.path.dirname(os.path.realpath(file))
|
||||
data = yaml.safe_load(yaml_file)
|
||||
if data:
|
||||
for item_to_configure in allow_to_overwrite:
|
||||
if item_to_configure in data.keys():
|
||||
self.__setattr__(item_to_configure, data[item_to_configure])
|
||||
|
||||
except yaml.YAMLError as exc:
|
||||
print(exc)
|
||||
|
||||
|
||||
class SingleConfig(Config, metaclass=Singleton):
|
||||
pass
|
4
ansibledoctor/Contstants.py
Normal file
4
ansibledoctor/Contstants.py
Normal file
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
DOCTOR_CONF_FILE = "doctor.conf.yaml"
|
||||
YAML_EXTENSIONS = ["yaml","yml"]
|
141
ansibledoctor/DocumentationGenerator.py
Normal file
141
ansibledoctor/DocumentationGenerator.py
Normal file
@ -0,0 +1,141 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import codecs
|
||||
import glob
|
||||
import json
|
||||
import ntpath
|
||||
import os
|
||||
import pprint
|
||||
import sys
|
||||
|
||||
import jinja2.exceptions
|
||||
import ruamel.yaml
|
||||
from jinja2 import Environment
|
||||
from jinja2 import FileSystemLoader
|
||||
from six import binary_type
|
||||
from six import text_type
|
||||
|
||||
from ansibledoctor.Config import SingleConfig
|
||||
from ansibledoctor.Utils import FileUtils
|
||||
from ansibledoctor.Utils import SingleLog
|
||||
|
||||
|
||||
class Generator:
|
||||
def __init__(self, doc_parser):
|
||||
self.template_files = []
|
||||
self.extension = "j2"
|
||||
self._parser = None
|
||||
self.config = SingleConfig()
|
||||
self.log = SingleLog()
|
||||
self.log.info("Using template dir: " + self.config.get_template_base_dir())
|
||||
self._parser = doc_parser
|
||||
self._scan_template()
|
||||
|
||||
def _scan_template(self):
|
||||
"""
|
||||
Search for Jinja2 (.j2) files to apply to the destination.
|
||||
|
||||
:return: None
|
||||
"""
|
||||
base_dir = self.config.get_template_base_dir()
|
||||
|
||||
for file in glob.iglob(base_dir + "/**/*." + self.extension, recursive=True):
|
||||
|
||||
relative_file = file[len(base_dir) + 1:]
|
||||
if ntpath.basename(file)[:1] != "_":
|
||||
self.log.trace("[GENERATOR] found template file: " + relative_file)
|
||||
self.template_files.append(relative_file)
|
||||
else:
|
||||
self.log.debug("[GENERATOR] ignoring template file: " + relative_file)
|
||||
|
||||
def _create_dir(self, directory):
|
||||
if not self.config.dry_run:
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
else:
|
||||
self.log.info("[GENERATOR][DRY] Creating dir: " + dir)
|
||||
|
||||
def _write_doc(self):
|
||||
files_to_overwite = []
|
||||
|
||||
for file in self.template_files:
|
||||
doc_file = self.config.get_output_dir() + "/" + file[:-len(self.extension) - 1]
|
||||
if os.path.isfile(doc_file):
|
||||
files_to_overwite.append(doc_file)
|
||||
|
||||
if len(files_to_overwite) > 0 and self.config.template_overwrite is False:
|
||||
SingleLog.print("This files will be overwritten:", files_to_overwite)
|
||||
if not self.config.dry_run:
|
||||
resulst = FileUtils.query_yes_no("Do you want to continue?")
|
||||
if resulst != "yes":
|
||||
sys.exit()
|
||||
|
||||
for file in self.template_files:
|
||||
doc_file = self.config.get_output_dir() + "/" + file[:-len(self.extension) - 1]
|
||||
source_file = self.config.get_template_base_dir() + "/" + file
|
||||
|
||||
self.log.trace("[GENERATOR] Writing doc output to: " + doc_file + " from: " + source_file)
|
||||
|
||||
# make sure the directory exists
|
||||
self._create_dir(os.path.dirname(os.path.realpath(doc_file)))
|
||||
|
||||
if os.path.exists(source_file) and os.path.isfile(source_file):
|
||||
with open(source_file, "r") as template:
|
||||
data = template.read()
|
||||
if data is not None:
|
||||
try:
|
||||
print(json.dumps(self._parser.get_data(), indent=4, sort_keys=True))
|
||||
jenv = Environment(loader=FileSystemLoader(self.config.get_template_base_dir()), lstrip_blocks=True, trim_blocks=True, autoescape=True)
|
||||
jenv.filters["to_nice_yaml"] = self._to_nice_yaml
|
||||
data = jenv.from_string(data).render(self._parser.get_data(), role=self._parser.get_data())
|
||||
if not self.config.dry_run:
|
||||
with open(doc_file, "w") as outfile:
|
||||
outfile.write(data)
|
||||
self.log.info("Writing to: " + doc_file)
|
||||
else:
|
||||
self.log.info("[GENERATOR][DRY] Writing to: " + doc_file)
|
||||
except jinja2.exceptions.UndefinedError as e:
|
||||
self.log.error("Jinja2 templating error: <" + str(e) + "> when loading file: '" + file + "', run in debug mode to see full except")
|
||||
if self.log.log_level < 1:
|
||||
raise
|
||||
except UnicodeEncodeError as e:
|
||||
self.log.error("At the moment I'm unable to print special chars: <" + str(e) + ">, run in debug mode to see full except")
|
||||
if self.log.log_level < 1:
|
||||
raise
|
||||
sys.exit()
|
||||
|
||||
def _to_nice_yaml(self, a, indent=4, *args, **kw):
|
||||
"""Make verbose, human readable yaml."""
|
||||
yaml = ruamel.yaml.YAML()
|
||||
yaml.indent(mapping=indent, sequence=(indent * 2), offset=indent)
|
||||
stream = ruamel.yaml.compat.StringIO()
|
||||
yaml.dump(a, stream, **kw)
|
||||
return stream.getvalue().rstrip()
|
||||
|
||||
def print_to_cli(self):
|
||||
for file in self.template_files:
|
||||
source_file = self.config.get_template_base_dir() + "/" + file
|
||||
with open(source_file, "r") as template:
|
||||
data = template.read()
|
||||
|
||||
if data is not None:
|
||||
try:
|
||||
data = Environment(loader=FileSystemLoader(self.config.get_template_base_dir()), lstrip_blocks=True, trim_blocks=True, autoescape=True).from_string(data).render(self._parser.get_data(), r=self._parser)
|
||||
print(data)
|
||||
except jinja2.exceptions.UndefinedError as e:
|
||||
self.log.error("Jinja2 templating error: <" + str(e) + "> when loading file: '" + file + "', run in debug mode to see full except")
|
||||
if self.log.log_level < 1:
|
||||
raise
|
||||
except UnicodeEncodeError as e:
|
||||
self.log.error("At the moment I'm unable to print special chars: <" + str(e) + ">, run in debug mode to see full except")
|
||||
if self.log.log_level < 1:
|
||||
raise
|
||||
except Exception:
|
||||
print("Unexpected error:", sys.exc_info()[0])
|
||||
raise
|
||||
|
||||
def render(self):
|
||||
if self.config.use_print_template:
|
||||
self.print_to_cli()
|
||||
else:
|
||||
self.log.info("Using output dir: " + self.config.get_output_dir())
|
||||
self._write_doc()
|
77
ansibledoctor/DocumentationParser.py
Normal file
77
ansibledoctor/DocumentationParser.py
Normal file
@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
from collections import defaultdict
|
||||
|
||||
import anyconfig
|
||||
import yaml
|
||||
|
||||
from ansibledoctor.Annotation import Annotation
|
||||
from ansibledoctor.Config import SingleConfig
|
||||
from ansibledoctor.Contstants import YAML_EXTENSIONS
|
||||
from ansibledoctor.FileRegistry import Registry
|
||||
from ansibledoctor.Utils import SingleLog
|
||||
|
||||
|
||||
class Parser:
|
||||
def __init__(self):
|
||||
self._annotation_objs = {}
|
||||
self._data = defaultdict(dict)
|
||||
self.config = SingleConfig()
|
||||
self.log = SingleLog()
|
||||
self._files_registry = Registry()
|
||||
self._parse_meta_file()
|
||||
self._parse_vars_file()
|
||||
self._populate_doc_data()
|
||||
|
||||
def _parse_vars_file(self):
|
||||
extensions = YAML_EXTENSIONS
|
||||
|
||||
for rfile in self._files_registry.get_files():
|
||||
if any(fnmatch.fnmatch(rfile, "*/defaults/*." + ext) for ext in extensions):
|
||||
with open(rfile, "r", encoding="utf8") as yaml_file:
|
||||
try:
|
||||
data = defaultdict(dict, yaml.load(yaml_file, Loader=yaml.SafeLoader))
|
||||
for key, value in data.items():
|
||||
self._data["var"][key] = {"value": {key: value}}
|
||||
except yaml.YAMLError as exc:
|
||||
print(exc)
|
||||
|
||||
def _parse_meta_file(self):
|
||||
extensions = YAML_EXTENSIONS
|
||||
|
||||
for rfile in self._files_registry.get_files():
|
||||
if any("meta/main." + ext in rfile for ext in extensions):
|
||||
with open(rfile, "r", encoding="utf8") as yaml_file:
|
||||
try:
|
||||
data = defaultdict(dict, yaml.load(yaml_file, Loader=yaml.SafeLoader))
|
||||
if data.get("galaxy_info"):
|
||||
for key, value in data.get("galaxy_info").items():
|
||||
self._data["meta"][key] = {"value": value}
|
||||
except yaml.YAMLError as exc:
|
||||
print(exc)
|
||||
|
||||
def _populate_doc_data(self):
|
||||
"""Generate the documentation data object."""
|
||||
tags = defaultdict(dict)
|
||||
for annotaion in self.config.get_annotations_names(automatic=True):
|
||||
self.log.info("Finding annotations for: @" + annotaion)
|
||||
self._annotation_objs[annotaion] = Annotation(name=annotaion, files_registry=self._files_registry)
|
||||
tags[annotaion] = self._annotation_objs[annotaion].get_details()
|
||||
# print(json.dumps(tags, indent=4, sort_keys=True))
|
||||
anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS)
|
||||
|
||||
def get_data(self):
|
||||
return self._data
|
||||
|
||||
def cli_print_section(self):
|
||||
return self.config.use_print_template
|
||||
|
||||
def cli_left_space(self, item1="", left=25):
|
||||
item1 = item1.ljust(left)
|
||||
return item1
|
||||
|
||||
def test(self):
|
||||
return "test()"
|
62
ansibledoctor/FileRegistry.py
Normal file
62
ansibledoctor/FileRegistry.py
Normal file
@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env python3
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ansibledoctor.Config import SingleConfig
|
||||
from ansibledoctor.Contstants import YAML_EXTENSIONS
|
||||
from ansibledoctor.Utils import SingleLog
|
||||
|
||||
|
||||
class Registry:
|
||||
|
||||
_doc = {}
|
||||
log = None
|
||||
config = None
|
||||
|
||||
def __init__(self):
|
||||
self._doc = []
|
||||
self.config = SingleConfig()
|
||||
self.log = SingleLog()
|
||||
self._scan_for_yamls()
|
||||
|
||||
def get_files(self):
|
||||
return self._doc
|
||||
|
||||
def _scan_for_yamls(self):
|
||||
"""
|
||||
Search for the yaml files in each project/role root and append to the corresponding object.
|
||||
|
||||
:param base: directory in witch we are searching
|
||||
:return: None
|
||||
"""
|
||||
extensions = YAML_EXTENSIONS
|
||||
base_dir = self.config.get_base_dir()
|
||||
|
||||
self.log.debug("Scan for files: " + base_dir)
|
||||
|
||||
for extension in extensions:
|
||||
for filename in glob.iglob(base_dir + "/**/*." + extension, recursive=True):
|
||||
if self._is_excluded_yaml_file(filename, base_dir):
|
||||
self.log.trace("Excluding: " + filename)
|
||||
else:
|
||||
self.log.trace("Adding to role:" + base_dir + " => " + filename)
|
||||
self._doc.append(filename)
|
||||
|
||||
def _is_excluded_yaml_file(self, file, role_base_dir=None):
|
||||
"""
|
||||
Sub method for handling file exclusions based on the full path starts with.
|
||||
|
||||
:param file:
|
||||
:param role_base_dir:
|
||||
:return:
|
||||
"""
|
||||
base_dir = role_base_dir
|
||||
excluded = self.config.excluded_roles_dirs.copy()
|
||||
|
||||
is_filtered = False
|
||||
for excluded_dir in excluded:
|
||||
if file.startswith(base_dir + "/" + excluded_dir):
|
||||
is_filtered = True
|
||||
|
||||
return is_filtered
|
123
ansibledoctor/Utils.py
Normal file
123
ansibledoctor/Utils.py
Normal file
@ -0,0 +1,123 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import pprint
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
class Singleton(type):
|
||||
_instances = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class Log:
|
||||
levels = {
|
||||
"trace": -1,
|
||||
"debug": 0,
|
||||
"info": 1,
|
||||
"warn": 2,
|
||||
"error": 3,
|
||||
}
|
||||
log_level = 1
|
||||
|
||||
def __init__(self, level=1):
|
||||
self.set_level(level)
|
||||
|
||||
def set_level(self, s):
|
||||
|
||||
if isinstance(s, str):
|
||||
for level, v in self.levels.items():
|
||||
if level == s:
|
||||
self.log_level = v
|
||||
elif isinstance(s, int):
|
||||
if s in range(4):
|
||||
self.log_level = s
|
||||
|
||||
def trace(self, msg, h=""):
|
||||
if self.log_level <= -1:
|
||||
self._p("*TRACE*: " + h, msg)
|
||||
|
||||
def debug(self, msg, h=""):
|
||||
if self.log_level <= 0:
|
||||
self._p("*DEBUG*: " + h, msg)
|
||||
|
||||
def info(self, msg, h=""):
|
||||
if self.log_level <= 1:
|
||||
self._p("*INFO*: " + h, msg)
|
||||
|
||||
def warn(self, msg, h=""):
|
||||
if self.log_level <= 2:
|
||||
self._p("*WARN*: " + h, msg)
|
||||
|
||||
def error(self, msg, h=""):
|
||||
if self.log_level <= 3:
|
||||
self._p("*ERROR*: " + h, msg)
|
||||
|
||||
@staticmethod
|
||||
def _p(head, msg, print_type=True):
|
||||
|
||||
if isinstance(msg, list):
|
||||
t = " <list>" if print_type else ""
|
||||
print(head + t)
|
||||
i = 0
|
||||
for line in msg:
|
||||
print(" [" + str(i) + "]: " + str(line))
|
||||
i += 1
|
||||
|
||||
elif isinstance(msg, dict):
|
||||
t = " <dict>" if print_type else ""
|
||||
print(head + t)
|
||||
pprint.pprint(msg)
|
||||
else:
|
||||
print(head + str(msg))
|
||||
|
||||
@staticmethod
|
||||
def print(msg, data):
|
||||
Log._p(msg, data, False)
|
||||
|
||||
|
||||
class SingleLog(Log, metaclass=Singleton):
|
||||
pass
|
||||
|
||||
|
||||
class FileUtils:
|
||||
@staticmethod
|
||||
def create_path(path):
|
||||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
# http://code.activestate.com/recipes/577058/
|
||||
@staticmethod
|
||||
def query_yes_no(question, default="yes"):
|
||||
"""Ask a yes/no question via input() and return their answer.
|
||||
|
||||
"question" is a string that is presented to the user.
|
||||
"default" is the presumed answer if the user just hits <Enter>.
|
||||
It must be "yes" (the default), "no" or None (meaning
|
||||
an answer is required of the user).
|
||||
|
||||
The "answer" return value is one of "yes" or "no".
|
||||
"""
|
||||
valid = {"yes": "yes", "y": "yes", "ye": "yes",
|
||||
"no": "no", "n": "no"}
|
||||
if default is None:
|
||||
prompt = " [y/n] "
|
||||
elif default == "yes":
|
||||
prompt = " [Y/n] "
|
||||
elif default == "no":
|
||||
prompt = " [y/N] "
|
||||
else:
|
||||
raise ValueError("Invalid default answer: '%s'" % default)
|
||||
|
||||
while 1:
|
||||
choice = input(question + prompt).lower()
|
||||
if default is not None and choice == "":
|
||||
return default
|
||||
elif choice in valid.keys():
|
||||
return valid[choice]
|
||||
else:
|
||||
sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n")
|
9
ansibledoctor/__init__.py
Normal file
9
ansibledoctor/__init__.py
Normal file
@ -0,0 +1,9 @@
|
||||
"""Default package."""
|
||||
|
||||
__author__ = "Robert Kaussow"
|
||||
__project__ = "ansible-doctor"
|
||||
__version__ = "0.1.0"
|
||||
__license__ = "LGPLv3"
|
||||
__maintainer__ = "Robert Kaussow"
|
||||
__email__ = "mail@geeklabor.de"
|
||||
__status__ = "Production"
|
10
ansibledoctor/__main__.py
Normal file
10
ansibledoctor/__main__.py
Normal file
@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Main program."""
|
||||
|
||||
from ansibledoctor.Cli import AnsibleDoctor
|
||||
|
||||
def main():
|
||||
doc = AnsibleDoctor()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
15
ansibledoctor/templates/cliprint/_action.j2
Normal file
15
ansibledoctor/templates/cliprint/_action.j2
Normal file
@ -0,0 +1,15 @@
|
||||
# ============================================================
|
||||
# Actions (variables: action)
|
||||
# ============================================================
|
||||
{% for role in r.get_roles(False) %}
|
||||
{% if r.get_type("action",role) %}
|
||||
{{ r.capitalize(r.fprn(role)) }}:
|
||||
{% endif %}
|
||||
{##}
|
||||
{% for key , values in r.get_multi_type("action",role) %}
|
||||
{{ r.capitalize(key) }}:
|
||||
{% for item in values %}
|
||||
* {{ item.desc }}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
10
ansibledoctor/templates/cliprint/_description.j2
Normal file
10
ansibledoctor/templates/cliprint/_description.j2
Normal file
@ -0,0 +1,10 @@
|
||||
# ============================================================
|
||||
# Project Description
|
||||
# ============================================================
|
||||
{% for role in r.get_roles(False) %}
|
||||
{{ r.capitalize(r.fprn(role)) }}:
|
||||
{% for item in r.get_type("meta",role) %}
|
||||
{{ r.cli_left_space(r.capitalize(item.key),25) }} {{ item.value }}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
19
ansibledoctor/templates/cliprint/_tags.j2
Normal file
19
ansibledoctor/templates/cliprint/_tags.j2
Normal file
@ -0,0 +1,19 @@
|
||||
# ============================================================
|
||||
# Tags (variable: tag)
|
||||
# ============================================================
|
||||
{% for role in r.get_roles(False) %}
|
||||
{{ r.capitalize(r.fprn(role)) }}:
|
||||
{% for item in r.get_type("tag",role) %}
|
||||
{{ r.cli_left_space(" * "+item.key,25) }} {{ r.capitalize(item.desc) }}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
||||
{#{{ tag | pprint }}#}
|
||||
Duplicate Tags:
|
||||
{% for k,v in r.get_duplicates("tag") %}
|
||||
{{ " * "+k }} in files:
|
||||
{% for item in v %}
|
||||
{{ item.file }} {% if item.line != "" %}(line: {{ item.line }}) {% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% endfor %}
|
26
ansibledoctor/templates/cliprint/_todo.j2
Normal file
26
ansibledoctor/templates/cliprint/_todo.j2
Normal file
@ -0,0 +1,26 @@
|
||||
# ============================================================
|
||||
# Todo (variables: todo)
|
||||
# ============================================================
|
||||
{% for role in r.get_roles(False) %}
|
||||
{% if r.get_type("todo",role) %}
|
||||
|
||||
{{ r.capitalize(r.fprn(role)) }}:
|
||||
{% endif %}
|
||||
{##}
|
||||
{% for key , values in r.get_multi_type("todo",role) %}
|
||||
{% if key == "_unset_" %}
|
||||
Todos without section:
|
||||
{% for item in values %}
|
||||
* {{ item.desc }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% for key , values in r.get_multi_type("todo",role) %}
|
||||
{% if key != "_unset_" %}
|
||||
{{ r.capitalize(key) }}:
|
||||
{% for item in values %}
|
||||
* {{ item.desc }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
22
ansibledoctor/templates/cliprint/_var.j2
Normal file
22
ansibledoctor/templates/cliprint/_var.j2
Normal file
@ -0,0 +1,22 @@
|
||||
# ============================================================
|
||||
# Variables (variable: var)
|
||||
# ============================================================
|
||||
{% for role in r.get_roles(False) %}
|
||||
{% if r.get_type("var",role) %}
|
||||
{{ r.capitalize(r.fprn(role)) }}:
|
||||
{% endif %}
|
||||
{% for item in r.get_type("var",role) %}
|
||||
{{ r.cli_left_space(" * "+ item.key+": "+item.value,35) }} {{ item.desc }}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
||||
Duplicate Vars:
|
||||
{% for k,v in r.get_duplicates("var") %}
|
||||
{{ " * "+k }} in files:
|
||||
{% for item in v %}
|
||||
{{ item.file }} {% if item.line != "" %}(line: {{ item.line }}) {% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
||||
{#{{ var | pprint }}#}
|
21
ansibledoctor/templates/cliprint/print_to_cli.j2
Normal file
21
ansibledoctor/templates/cliprint/print_to_cli.j2
Normal file
@ -0,0 +1,21 @@
|
||||
### CLI tempate ###
|
||||
|
||||
{% if r.cli_print_section() == "all" or r.cli_print_section() == "info" %}
|
||||
{% include '_description.j2' %}
|
||||
{% endif %}
|
||||
|
||||
{% if r.cli_print_section() == "all" or r.cli_print_section() == "action" %}
|
||||
{% include '_action.j2' %}
|
||||
{% endif %}
|
||||
|
||||
{% if r.cli_print_section() == "all" or r.cli_print_section() == "tag" %}
|
||||
{% include '_tags.j2' %}
|
||||
{% endif %}
|
||||
|
||||
{% if r.cli_print_section() == "all" or r.cli_print_section() == "todo" %}
|
||||
{% include '_todo.j2' %}
|
||||
{% endif %}
|
||||
|
||||
{% if r.cli_print_section() == "all" or r.cli_print_section() == "var" %}
|
||||
{% include '_var.j2' %}
|
||||
{% endif %}
|
9
ansibledoctor/templates/readme/README.md.j2
Normal file
9
ansibledoctor/templates/readme/README.md.j2
Normal file
@ -0,0 +1,9 @@
|
||||
{% set meta = role.meta | default({}) %}
|
||||
# {{ (meta.name | default({"value": "_undefined_"})).value }}
|
||||
|
||||
{% if meta.description is defined %}
|
||||
{{ meta.description.value }}
|
||||
{% endif %}
|
||||
|
||||
{# Vars #}
|
||||
{% include '_vars.j2' %}
|
4
ansibledoctor/templates/readme/_dev_var_dump.txt.j2
Normal file
4
ansibledoctor/templates/readme/_dev_var_dump.txt.j2
Normal file
@ -0,0 +1,4 @@
|
||||
#============================================================================================================
|
||||
# This is a dump of the documentation variable : tags
|
||||
#============================================================================================================
|
||||
{{ tag | pprint }}
|
39
ansibledoctor/templates/readme/_vars.j2
Normal file
39
ansibledoctor/templates/readme/_vars.j2
Normal file
@ -0,0 +1,39 @@
|
||||
{% set var = role.var | default({}) %}
|
||||
{% if var %}
|
||||
## Default Variables
|
||||
|
||||
{% for key, item in var.items() %}
|
||||
|
||||
### {{ key }}
|
||||
{% if item.description is defined and item.description %}
|
||||
|
||||
{% for desc_line in item.description %}
|
||||
{{ desc_line }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
#### Default value
|
||||
|
||||
```YAML
|
||||
{{ item.value | to_nice_yaml(indent=2) }}
|
||||
```
|
||||
|
||||
{% if item.example is defined and item.example %}
|
||||
|
||||
#### Example usage
|
||||
|
||||
```YAML
|
||||
{% if item.example is mapping %}
|
||||
{{ item.example | to_nice_yaml(indent=2) }}
|
||||
{% else %}
|
||||
{% for ex_line in item.example %}
|
||||
{{ ex_line }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
```
|
||||
|
||||
{% endif %}
|
||||
|
||||
---
|
||||
{% endfor %}
|
||||
{% endif %}
|
7
bin/ansible-doctor
Executable file
7
bin/ansible-doctor
Executable file
@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
|
||||
import ansibledoctor.__main__
|
||||
|
||||
sys.exit(ansibledoctor.__main__.main())
|
20
setup.cfg
Normal file
20
setup.cfg
Normal file
@ -0,0 +1,20 @@
|
||||
[metadata]
|
||||
description-file = README.md
|
||||
license_file = LICENSE
|
||||
|
||||
[bdist_wheel]
|
||||
universal = 1
|
||||
|
||||
[isort]
|
||||
default_section = THIRDPARTY
|
||||
known_first_party = ansibledoctor
|
||||
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||
force_single_line = true
|
||||
line_length = 120
|
||||
skip_glob = **/env/*
|
||||
|
||||
[tool:pytest]
|
||||
filterwarnings =
|
||||
ignore::FutureWarning
|
||||
ignore:.*collections.*:DeprecationWarning
|
||||
ignore:.*pep8.*:FutureWarning
|
72
setup.py
Executable file
72
setup.py
Executable file
@ -0,0 +1,72 @@
|
||||
#!/usr/bin/python3
|
||||
"""Setup script for the package."""
|
||||
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
PACKAGE_NAME = "ansibledoctor"
|
||||
|
||||
|
||||
def get_property(prop, project):
|
||||
current_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
result = re.search(
|
||||
r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop),
|
||||
open(os.path.join(current_dir, project, "__init__.py")).read())
|
||||
return result.group(1)
|
||||
|
||||
|
||||
def get_readme(filename="README.md"):
|
||||
this = os.path.abspath(os.path.dirname(__file__))
|
||||
with io.open(os.path.join(this, filename), encoding="utf-8") as f:
|
||||
long_description = f.read()
|
||||
return long_description
|
||||
|
||||
|
||||
setup(
|
||||
name=get_property("__project__", PACKAGE_NAME),
|
||||
version=get_property("__version__", PACKAGE_NAME),
|
||||
description="Generate documentation from annotated Ansible roles using templates",
|
||||
keywords="ansible role documentation",
|
||||
author=get_property("__author__", PACKAGE_NAME),
|
||||
author_email=get_property("__email__", PACKAGE_NAME),
|
||||
url="https://github.com/xoxys/ansible-doctor",
|
||||
license=get_property("__license__", PACKAGE_NAME),
|
||||
long_description=get_readme(),
|
||||
long_description_content_type="text/markdown",
|
||||
packages=find_packages(exclude=["*.tests", "tests", "tests.*"]),
|
||||
python_requires=">=3.5",
|
||||
classifiers=[
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: Information Technology",
|
||||
"Intended Audience :: System Administrators",
|
||||
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
|
||||
"Natural Language :: English",
|
||||
"Operating System :: POSIX",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Topic :: System :: Installation/Setup",
|
||||
"Topic :: System :: Systems Administration",
|
||||
"Topic :: Utilities",
|
||||
"Topic :: Software Development",
|
||||
"Topic :: Software Development :: Documentation",
|
||||
],
|
||||
install_requires=[
|
||||
"pyyaml",
|
||||
"ruamel.yaml",
|
||||
"appdirs",
|
||||
"colorama",
|
||||
"anyconfig",
|
||||
"python-json-logger",
|
||||
"jsonschema",
|
||||
"jinja2"
|
||||
],
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"ansible-doctor = ansibledoctor.__main__:main"
|
||||
]
|
||||
},
|
||||
test_suite="tests"
|
||||
)
|
19
test-requirements.txt
Normal file
19
test-requirements.txt
Normal file
@ -0,0 +1,19 @@
|
||||
# open issue
|
||||
# https://gitlab.com/pycqa/flake8-docstrings/issues/36
|
||||
pydocstyle<4.0.0
|
||||
flake8
|
||||
flake8-colors
|
||||
flake8-blind-except
|
||||
flake8-builtins
|
||||
flake8-colors
|
||||
flake8-docstrings<=3.0.0
|
||||
flake8-isort
|
||||
flake8-logging-format
|
||||
flake8-polyfill
|
||||
flake8-quotes
|
||||
pep8-naming
|
||||
wheel
|
||||
pytest
|
||||
pytest-mock
|
||||
pytest-cov
|
||||
bandit
|
Loading…
Reference in New Issue
Block a user