Compare commits

...

125 Commits
v3.1.1 ... main

Author SHA1 Message Date
8e22e87a31
refactor: replace logger by structlog (#718)
BREAKING CHANGE: Replace the custom logger and `python-json-logger` with
`structlog`. This will also change the layout and general structure of
the log messages.

The original `python-json-logger` package is unmaintained and has caused
some issues. Using https://github.com/nhairs/python-json-logger.git
instead has fixed the logging issues but prevents PyPI package
uploads...

```
HTTP Error 400: Can't have direct dependency: python-json-logger@ git+https://github.com/nhairs/python-json-logger.git@v3.1.0. See https://packaging.python.org/specifications/core-metadata for more information.
```
2024-06-17 13:51:03 +02:00
renovate[bot]
ed113e37ea
chore(deps): lock file maintenance (#721)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-06-17 08:43:13 +02:00
renovate[bot]
c33738c8a1 chore(deps): update dependency ruff to v0.4.9 2024-06-17 03:15:24 +02:00
renovate[bot]
d5e4e6ba33 chore(docker): update python:3.12-alpine docker digest to a982997 2024-06-15 04:27:03 +02:00
08e2178333
feat: add option to load templates from remote git sources (#717) 2024-06-12 20:59:55 +02:00
renovate[bot]
42892daa74
chore(deps): lock file maintenance (#716)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-06-11 10:19:07 +02:00
renovate[bot]
ab6c988869
chore(deps): update quay.io/thegeeklab/hugo docker tag to v0.127.0 (#713)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-06-10 07:27:41 +02:00
renovate[bot]
eb2cf69468 chore(deps): update devdeps non-major 2024-06-10 06:05:31 +02:00
renovate[bot]
abee343056 chore(deps): update dependency thegeeklab/hugo-geekdoc to v0.46.0 2024-06-10 03:08:33 +02:00
renovate[bot]
29320b6b96 chore(docker): update python:3.12-alpine docker digest to d24ed56 2024-06-08 03:37:15 +02:00
8e042c739e
feat: migrate to dynaconf to handle multi-source configuration (#708) 2024-06-07 21:51:10 +02:00
9b20c11660
fix: exclude tags from exclude_tags during rendering (#711) 2024-06-07 21:30:10 +02:00
renovate[bot]
5760ee0832 chore(docker): update python:3.12-alpine docker digest to 32385e6 2024-06-06 03:51:22 +02:00
renovate[bot]
db94c07396
chore(deps): lock file maintenance (#707)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-06-05 09:26:04 +02:00
renovate[bot]
73bbd746d3 chore(deps): update dependency ruff to v0.4.7 2024-06-03 03:31:33 +02:00
ab0372bef5
chore: unifi jinja template syntax and add linting (#704) 2024-06-02 09:00:07 +02:00
3df7e465db
docs: add documentation for tabulated vars option (#705) 2024-06-02 00:08:33 +02:00
89c6a11be4
fix: fix sysexit with custom error (#703) 2024-06-02 00:08:25 +02:00
Chip Selden
4051d2915d
feat: add option for tabulating variables (#693) 2024-06-01 22:05:16 +02:00
renovate[bot]
fe4e4e5f7a
chore(deps): lock file maintenance (#702)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-06-01 13:18:57 +02:00
renovate[bot]
172e4f4380 chore(deps): update dependency ruff to v0.4.5 2024-05-27 04:31:24 +02:00
renovate[bot]
fada900568
fix(deps): update dependency ansible-core to v2.14.17 (#698)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-05-24 09:44:19 +02:00
renovate[bot]
013f760c8a chore(docker): update python:3.12-alpine docker digest to 5365725 2024-05-23 05:49:37 +02:00
renovate[bot]
00adc389a2 chore(deps): update dependency pytest to v8.2.1 2024-05-20 03:47:49 +02:00
renovate[bot]
84fdc06315
chore(deps): lock file maintenance (#695)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-05-15 11:06:23 +02:00
renovate[bot]
db68e80372
chore(deps): update quay.io/thegeeklab/hugo docker tag to v0.125.7 (#696)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-05-14 14:24:17 +02:00
renovate[bot]
af702628eb chore(deps): update dependency ruff to v0.4.4 2024-05-13 03:05:31 +02:00
renovate[bot]
81d4e97af6
fix(deps): update dependency jinja2 to v3.1.4 (#692)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-05-06 21:29:22 +02:00
renovate[bot]
a33f3c53bb chore(deps): update dependency ruff to v0.4.3 2024-05-06 02:37:46 +02:00
renovate[bot]
ccc2d249f8
fix(deps): update dependency jsonschema to v4.22.0 (#689)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-05-01 12:36:45 +02:00
renovate[bot]
f7ff5fd624
chore(deps): lock file maintenance (#683)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-29 10:28:49 +02:00
renovate[bot]
acee6e1285 chore(deps): update devdeps non-major 2024-04-29 04:15:22 +02:00
Julien Rottenberg
2375ad118d
fix: install extra group when using pre-commit (#687) 2024-04-24 08:54:48 +02:00
renovate[bot]
a2f02527d9
fix(deps): update dependency ansible-core to v2.14.16 (#686)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-23 21:18:13 +02:00
renovate[bot]
0bf59ac34f chore(deps): update dependency ruff to v0.4.1 2024-04-22 03:46:05 +02:00
renovate[bot]
94ec1a632b chore(deps): update dependency ruff to v0.3.7 2024-04-15 02:40:29 +02:00
renovate[bot]
075e1f91ca
fix(deps): update dependency ansible-core to v2.14.15 (#681)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-12 09:59:00 +02:00
4cf63bf2fe
separate minor-patch for ansible deps 2024-04-12 09:23:00 +02:00
renovate[bot]
e3f797d5e3
chore(deps): lock file maintenance (#677)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-12 09:18:07 +02:00
renovate[bot]
3a0c5ae35f chore(docker): update python:3.12-alpine docker digest to ef09762 2024-04-11 04:03:51 +02:00
renovate[bot]
9f7f943c93 chore(deps): update dependency ruff to v0.3.5 2024-04-08 03:24:24 +02:00
renovate[bot]
b38c4aa2b8 chore(deps): update dependency thegeeklab/hugo-geekdoc to v0.45.0 2024-04-08 03:24:09 +02:00
renovate[bot]
ed167d1443 chore(deps): update dependency thegeeklab/hugo-geekdoc to v0.44.3 2024-04-01 03:24:41 +02:00
renovate[bot]
da6fd26c6d
chore(deps): update quay.io/thegeeklab/wp-docker-buildx docker tag to v4 (#674)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-27 08:42:10 +01:00
renovate[bot]
522c21f8fc chore(docker): update python:3.12-alpine docker digest to c7eb5c9 2024-03-27 01:21:36 +01:00
renovate[bot]
28e39055e3
chore(deps): lock file maintenance (#669)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-26 21:39:41 +01:00
renovate[bot]
894965286b
chore(deps): update dependency pytest-cov to v5 (#671)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-25 08:34:20 +01:00
renovate[bot]
c2e0f787ce chore(deps): update devdeps non-major 2024-03-25 01:35:05 +01:00
renovate[bot]
d524537fd3
chore(deps): update quay.io/thegeeklab/hugo docker tag to v0.124.1 (#670)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-21 08:48:14 +01:00
renovate[bot]
a559b654ca chore(deps): update dependency ruff to v0.3.3 2024-03-18 03:00:26 +01:00
renovate[bot]
aa78adf912 chore(docker): update python:3.12-alpine docker digest to 25a82f6 2024-03-17 01:49:43 +01:00
6e88c18375
ci: fix deprecated ruff command 2024-03-12 20:52:52 +01:00
7b9ba09f1d fix linting 2024-03-11 09:44:16 +01:00
renovate[bot]
08883952c1 chore(deps): update devdeps non-major 2024-03-11 09:44:16 +01:00
renovate[bot]
e1ef4937dd chore(deps): update dependency thegeeklab/hugo-geekdoc to v0.44.2 2024-03-11 02:43:51 +01:00
renovate[bot]
571222f6f5
chore(deps): lock file maintenance (#663)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-07 11:04:59 +01:00
renovate[bot]
6d50525021
fix(deps): update dependency environs to v11 (#664)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-07 11:04:46 +01:00
renovate[bot]
3ade4698e7 chore(deps): update devdeps non-major 2024-03-04 02:18:33 +01:00
renovate[bot]
ee81c8ee73
chore(deps): lock file maintenance (#659)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-26 10:26:11 +01:00
renovate[bot]
03df5bd79b
chore(deps): lock file maintenance (#657)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-26 10:14:18 +01:00
renovate[bot]
1e32a8f87a chore(deps): update dependency pytest to v8.0.2 2024-02-26 02:21:25 +01:00
renovate[bot]
dab9bc8691 chore(deps): update devdeps non-major 2024-02-19 02:20:31 +01:00
e2eaa81c4f
[skip ci] revert renovate automerge config 2024-02-15 12:23:07 +01:00
renovate[bot]
732f588aa9
chore(deps): update quay.io/thegeeklab/hugo docker tag to v0.122.0 (#655)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-11 16:53:09 +01:00
6619351fbd
enable renovate on automerge branches 2024-02-09 23:08:29 +01:00
renovate[bot]
8f6f444931 chore(docker): update python:3.12-alpine docker digest to 1a05012 2024-02-09 05:43:21 +01:00
renovate[bot]
49b861cabc
fix(deps): update dependency ruamel.yaml to v0.18.6 (#653)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-07 22:36:59 +01:00
renovate[bot]
59b497d745
fix(deps): update dependency ansible-core to v2.14.14 (#652)
This CVE does not affect ansible-doctor and is therefore not treated as a security update.

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-07 08:38:43 +01:00
b2f4fd2bd8
chore: bump ruff to v0.2.1 (#651) 2024-02-06 09:34:17 +01:00
renovate[bot]
864af95606
chore(deps): update dependency pytest to v8 (#648)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-31 09:05:49 +01:00
renovate[bot]
758c87ee80 chore(docker): update python:3.12-alpine docker digest to 14cfc61 2024-01-29 06:35:05 +01:00
renovate[bot]
9b0edda70a
chore(deps): update quay.io/thegeeklab/wp-docker-buildx docker tag to v3 (#647)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-22 11:27:39 +01:00
renovate[bot]
1d32f7548a chore(deps): update dependency ruff to v0.1.14 2024-01-22 02:14:07 +01:00
renovate[bot]
704cdb9d7c
fix(deps): update dependency jsonschema to v4.21.1 (#645)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-20 15:13:09 +01:00
renovate[bot]
c7f3fe57a0 chore(docker): update python:3.12-alpine docker digest to 801b54e 2024-01-19 23:32:46 +01:00
renovate[bot]
1270d7cb7d chore(docker): update python:3.12-alpine docker digest to 4a156f7 2024-01-19 05:23:41 +01:00
renovate[bot]
461eeb2d74 chore(docker): update python:3.12-alpine docker digest to 67990ec 2024-01-19 02:54:58 +01:00
renovate[bot]
0817646004
fix(deps): update dependency jsonschema to v4.21.0 (#641)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-18 10:21:51 +01:00
renovate[bot]
5242fd882a chore(deps): update dependency thegeeklab/hugo-geekdoc to v0.44.1 2024-01-16 00:19:56 +01:00
renovate[bot]
052c668d92
fix(deps): update dependency anyconfig to v0.14.0 (#638)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-15 21:56:46 +01:00
1e8a8beef7 fix linting 2024-01-15 15:24:52 +01:00
renovate[bot]
c124460c11 chore(deps): update dependency ruff to v0.1.13 2024-01-15 15:24:52 +01:00
renovate[bot]
38bd53f7bc
fix(deps): update dependency environs to v10.3.0 (#637)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-12 08:30:19 +01:00
renovate[bot]
dbf9c979ac
fix(deps): update dependency jinja2 to v3.1.3 (#636)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-11 08:29:35 +01:00
renovate[bot]
fe12548387
fix(deps): update dependency ansible-core to v2.14.12 [security] (#633)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-10 10:09:14 +01:00
ae14704b74
chore: drop support for python 3.8 (#634)
BREAKING CHANGE: The support for python 3.8 was removed to bundle `ansible-core` v2.14.x by default.
2024-01-10 09:07:45 +01:00
renovate[bot]
2232a12bc8
fix(deps): update dependency environs to v10.2.0 (#632)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-10 08:48:16 +01:00
78d4c5f44b
fix: make ansible-core an optional extra-dependency (#631) 2024-01-10 08:32:57 +01:00
c3068a573f
re-generate poetry lockfile 2024-01-09 15:27:07 +01:00
danielpodwysocki
505f9b58cc
fix: allow ansible-core versions newer than 2.13
ansible-core 2.13 and the corresponding ansible 6.0.0 had been deprecated. This allows users to use ansible-doctor with any modern enough version of ansible, while not breaking legacy setups.
2024-01-09 15:19:10 +01:00
renovate[bot]
593df92d32
chore(deps): lock file maintenance (#618)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-09 15:17:43 +01:00
renovate[bot]
568b91654d
fix(deps): update dependency environs to v10.1.0 (#627)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-09 15:03:32 +01:00
renovate[bot]
4afabc4284 chore(deps): update dependency ruff to v0.1.11 2024-01-08 02:07:30 +01:00
renovate[bot]
10ff283ec2
chore(deps): update quay.io/thegeeklab/hugo docker tag to v0.121.2 (#625)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-07 20:42:02 +01:00
renovate[bot]
1d0ff92bf3 chore(deps): update dependency pytest to v7.4.4 2024-01-01 03:06:14 +01:00
renovate[bot]
7245a4149c
fix(deps): update dependency environs to v10 (#616)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-12-27 11:07:48 +01:00
renovate[bot]
df155dcf8a chore(deps): update dependency ruff to v0.1.9 2023-12-25 01:59:40 +01:00
2ce29b2bff
use exact package name match 2023-12-24 00:01:40 +01:00
1dc53d1970
disable renovate for python test matrix in ci 2023-12-23 23:40:38 +01:00
2f1f42318b
use list style synatx and cleanup (#619) 2023-12-23 23:24:56 +01:00
renovate[bot]
69d682df79 chore(deps): update dependency ruff to v0.1.8 2023-12-18 02:34:28 +01:00
223b1d8814
cleanup unused env vars in ci 2023-12-17 14:07:58 +01:00
renovate[bot]
70539cc9a2
chore(deps): lock file maintenance (#615)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-12-12 08:55:43 +01:00
renovate[bot]
982e2db2df
fix(deps): update dependency pathspec to v0.12.1 (#613)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-12-12 08:55:06 +01:00
renovate[bot]
787c09a741 chore(deps): update dependency ruff to v0.1.7 2023-12-11 02:16:03 +01:00
renovate[bot]
95c2a6aeaf chore(docker): update python:3.12-alpine docker digest to c793b92 2023-12-09 07:35:33 +01:00
renovate[bot]
855f48894a chore(docker): update python:3.12-alpine docker digest to 401aa10 2023-12-09 04:23:04 +01:00
2270051d0d
ci: exclude dockerhub from linkcheck due to rate limiting 2023-12-07 09:08:22 +01:00
renovate[bot]
c0f100b70e
chore(deps): update quay.io/thegeeklab/wp-docker-buildx docker tag to v2 (#610)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-12-07 08:40:11 +01:00
dependabot[bot]
41ed10270d
chore(deps): bump cryptography from 41.0.5 to 41.0.6 (#609)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-05 10:16:25 +01:00
renovate[bot]
eef09f4a42
chore(deps): lock file maintenance (#607)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-12-05 10:05:27 +01:00
renovate[bot]
0460f09627 chore(docker): update python:3.12-alpine docker digest to 09f18c1 2023-12-04 20:02:12 +00:00
6a7ae3011d
fix settings for required status checks 2023-12-04 21:01:49 +01:00
0db500b0a8
fix: replace log by message for yes/no prompt (#606) 2023-11-23 10:33:36 +01:00
dfa10dd209
fix: skip missing yaml file (#605) 2023-11-23 08:58:32 +01:00
renovate[bot]
82398da75d
chore(deps): lock file maintenance (#603)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-11-20 09:01:31 +01:00
renovate[bot]
0257b874e9
fix(deps): update dependency jsonschema to v4.20.0 (#601)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-11-20 09:00:46 +01:00
renovate[bot]
40b96ae285 chore(deps): update dependency ruff to v0.1.6 2023-11-20 03:19:00 +01:00
0f65f50e06
chore: disable logging in pre-commit (#600) 2023-11-15 08:25:54 +01:00
renovate[bot]
9476810896 chore(deps): update dependency ruff to v0.1.5 2023-11-13 02:26:47 +01:00
renovate[bot]
2eb9aad213
chore(deps): lock file maintenance (#593)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-11-12 21:52:22 +01:00
cacc92f831
fix: parse taskfiles as ansible tasks (#597) 2023-11-12 21:39:41 +01:00
9536cd400d
fix: replace deprecated ruamel.yaml.safe_load (#596) 2023-11-12 14:46:24 +01:00
593a90ff10
chore: drop yapf and favor of the ruff formatter (#595) 2023-11-10 14:50:50 +01:00
renovate[bot]
a1e3e669d4 chore(deps): update dependency ruff to v0.1.4 2023-11-06 01:48:11 +01:00
39 changed files with 1624 additions and 1530 deletions

1
.lycheeignore Normal file
View File

@ -0,0 +1 @@
https://hub.docker.com/r/thegeeklab/*

View File

@ -5,3 +5,6 @@ MD041: False
MD024: False MD024: False
MD004: MD004:
style: dash style: dash
MD033:
allowed_elements:
- "br"

View File

@ -2,7 +2,9 @@
- id: ansible-doctor - id: ansible-doctor
name: ansible-doctor name: ansible-doctor
description: Create annotation based documentation for your Ansible roles. description: Create annotation based documentation for your Ansible roles.
entry: ansible-doctor -f entry: ansible-doctor -f -qqq
language: python language: python
pass_filenames: False pass_filenames: False
always_run: True always_run: True
additional_dependencies:
- .[ansible-core]

View File

@ -6,15 +6,14 @@ when:
- ${CI_REPO_DEFAULT_BRANCH} - ${CI_REPO_DEFAULT_BRANCH}
steps: steps:
build: - name: build
image: docker.io/library/python:3.12 image: docker.io/library/python:3.12
commands: commands:
- git fetch --depth=2147483647
- pip install poetry poetry-dynamic-versioning -qq - pip install poetry poetry-dynamic-versioning -qq
- poetry build - poetry build
dryrun: - name: dryrun
image: quay.io/thegeeklab/wp-docker-buildx:1 image: quay.io/thegeeklab/wp-docker-buildx:4
settings: settings:
containerfile: Containerfile.multiarch containerfile: Containerfile.multiarch
dry_run: true dry_run: true
@ -26,9 +25,9 @@ steps:
when: when:
- event: [pull_request] - event: [pull_request]
publish-dockerhub: - name: publish-dockerhub
image: quay.io/thegeeklab/wp-docker-buildx:4
group: container group: container
image: quay.io/thegeeklab/wp-docker-buildx:1
settings: settings:
auto_tag: true auto_tag: true
containerfile: Containerfile.multiarch containerfile: Containerfile.multiarch
@ -47,9 +46,9 @@ steps:
branch: branch:
- ${CI_REPO_DEFAULT_BRANCH} - ${CI_REPO_DEFAULT_BRANCH}
publish-quay: - name: publish-quay
image: quay.io/thegeeklab/wp-docker-buildx:4
group: container group: container
image: quay.io/thegeeklab/wp-docker-buildx:1
settings: settings:
auto_tag: true auto_tag: true
containerfile: Containerfile.multiarch containerfile: Containerfile.multiarch

View File

@ -6,27 +6,25 @@ when:
- ${CI_REPO_DEFAULT_BRANCH} - ${CI_REPO_DEFAULT_BRANCH}
steps: steps:
build: - name: build
image: docker.io/library/python:3.12 image: docker.io/library/python:3.12
commands: commands:
- git fetch --depth=2147483647
- pip install poetry poetry-dynamic-versioning -qq - pip install poetry poetry-dynamic-versioning -qq
- poetry build - poetry build
checksum: - name: checksum
image: quay.io/thegeeklab/alpine-tools image: quay.io/thegeeklab/alpine-tools
commands: commands:
- cd dist/ && sha256sum * > ../sha256sum.txt - cd dist/ && sha256sum * > ../sha256sum.txt
changelog: - name: changelog
image: quay.io/thegeeklab/git-sv image: quay.io/thegeeklab/git-sv
commands: commands:
- git fetch --depth=2147483647
- git sv current-version - git sv current-version
- git sv release-notes -t ${CI_COMMIT_TAG:-next} -o CHANGELOG.md - git sv release-notes -t ${CI_COMMIT_TAG:-next} -o CHANGELOG.md
- cat CHANGELOG.md - cat CHANGELOG.md
publish-github: - name: publish-github
image: docker.io/plugins/github-release image: docker.io/plugins/github-release
settings: settings:
api_key: api_key:
@ -40,15 +38,14 @@ steps:
when: when:
- event: [tag] - event: [tag]
publish-pypi: - name: publish-pypi
image: docker.io/library/python:3.12 image: docker.io/library/python:3.12
secrets: secrets:
- source: pypi_password - source: pypi_password
target: POETRY_HTTP_BASIC_PYPI_PASSWORD target: POETRY_HTTP_BASIC_PYPI_PASSWORD
- source: pypi_username - source: pypi_username
target: POETRY_HTTP_BASIC_PYPI_USERNAME target: POETRY_HTTP_BASIC_PYPI_USERNAME
commands: commands:
- git fetch --depth=2147483647
- pip install poetry poetry-dynamic-versioning -qq - pip install poetry poetry-dynamic-versioning -qq
- poetry publish -n - poetry publish -n
when: when:

View File

@ -6,51 +6,44 @@ when:
- ${CI_REPO_DEFAULT_BRANCH} - ${CI_REPO_DEFAULT_BRANCH}
steps: steps:
assets: - name: assets
image: quay.io/thegeeklab/alpine-tools image: quay.io/thegeeklab/alpine-tools
commands: commands:
- make doc - make doc
markdownlint: - name: markdownlint
image: quay.io/thegeeklab/markdownlint-cli image: quay.io/thegeeklab/markdownlint-cli
group: test group: test
commands: commands:
- markdownlint 'README.md' 'CONTRIBUTING.md' - markdownlint 'README.md' 'CONTRIBUTING.md'
spellcheck: - name: spellcheck
image: quay.io/thegeeklab/alpine-tools image: quay.io/thegeeklab/alpine-tools
group: test group: test
commands: commands:
- spellchecker --files '_docs/**/*.md' 'README.md' 'CONTRIBUTING.md' -d .dictionary -p spell indefinite-article syntax-urls - spellchecker --files 'docs/**/*.md' 'README.md' 'CONTRIBUTING.md' -d .dictionary -p spell indefinite-article syntax-urls
environment: environment:
FORCE_COLOR: "true" FORCE_COLOR: "true"
NPM_CONFIG_LOGLEVEL: "error"
link-validation: - name: link-validation
image: docker.io/lycheeverse/lychee image: docker.io/lycheeverse/lychee
group: test group: test
commands: commands:
- lychee --no-progress --format detailed docs/content README.md - lychee --no-progress --format detailed docs/content README.md
testbuild: - name: build
image: quay.io/thegeeklab/hugo:0.115.2 image: quay.io/thegeeklab/hugo:0.127.0
commands:
- hugo --panicOnWarning -s docs/ -b http://localhost:8000/
build:
image: quay.io/thegeeklab/hugo:0.115.2
commands: commands:
- hugo --panicOnWarning -s docs/ - hugo --panicOnWarning -s docs/
beautify: - name: beautify
image: quay.io/thegeeklab/alpine-tools image: quay.io/thegeeklab/alpine-tools
commands: commands:
- html-beautify -r -f 'docs/public/**/*.html' - html-beautify -r -f 'docs/public/**/*.html'
environment: environment:
FORCE_COLOR: "true" FORCE_COLOR: "true"
NPM_CONFIG_LOGLEVEL: error
publish: - name: publish
image: quay.io/thegeeklab/wp-s3-action image: quay.io/thegeeklab/wp-s3-action
settings: settings:
access_key: access_key:
@ -69,15 +62,15 @@ steps:
- event: [push, manual] - event: [push, manual]
branch: branch:
- ${CI_REPO_DEFAULT_BRANCH} - ${CI_REPO_DEFAULT_BRANCH}
status: [success] status: [success, failure]
pushrm-dockerhub: - name: pushrm-dockerhub
image: docker.io/chko/docker-pushrm:1 image: docker.io/chko/docker-pushrm:1
secrets: secrets:
- source: docker_password - source: docker_password
target: DOCKER_PASS target: DOCKER_PASS
- source: docker_username - source: docker_username
target: DOCKER_USER target: DOCKER_USER
environment: environment:
PUSHRM_FILE: README.md PUSHRM_FILE: README.md
PUSHRM_SHORT: Annotation based documentation for your Ansible roles PUSHRM_SHORT: Annotation based documentation for your Ansible roles
@ -88,7 +81,7 @@ steps:
- ${CI_REPO_DEFAULT_BRANCH} - ${CI_REPO_DEFAULT_BRANCH}
status: [success] status: [success]
pushrm-quay: - name: pushrm-quay
image: docker.io/chko/docker-pushrm:1 image: docker.io/chko/docker-pushrm:1
secrets: secrets:
- source: quay_token - source: quay_token

View File

@ -6,22 +6,29 @@ when:
- ${CI_REPO_DEFAULT_BRANCH} - ${CI_REPO_DEFAULT_BRANCH}
steps: steps:
check-format: - name: check-format
image: docker.io/library/python:3.12 image: docker.io/library/python:3.12
commands: commands:
- git fetch --depth=2147483647
- pip install poetry poetry-dynamic-versioning -qq - pip install poetry poetry-dynamic-versioning -qq
- poetry install - poetry install -E ansible-core
- poetry run yapf -dr ./${CI_REPO_NAME//-/} - poetry run ruff format --check --diff ./${CI_REPO_NAME//-/}
environment: environment:
PY_COLORS: "1" PY_COLORS: "1"
check-coding: - name: check-coding
image: docker.io/library/python:3.12 image: docker.io/library/python:3.12
commands: commands:
- git fetch --depth=2147483647
- pip install poetry poetry-dynamic-versioning -qq - pip install poetry poetry-dynamic-versioning -qq
- poetry install - poetry install -E ansible-core
- poetry run ruff ./${CI_REPO_NAME//-/} - poetry run ruff check ./${CI_REPO_NAME//-/}
environment:
PY_COLORS: "1"
- name: check-jinja
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run j2lint ansibledoctor/templates/ -i jinja-statements-indentation jinja-statements-delimiter
environment: environment:
PY_COLORS: "1" PY_COLORS: "1"

View File

@ -8,7 +8,7 @@ when:
runs_on: [success, failure] runs_on: [success, failure]
steps: steps:
matrix: - name: matrix
image: quay.io/thegeeklab/wp-matrix image: quay.io/thegeeklab/wp-matrix
settings: settings:
homeserver: homeserver:

View File

@ -5,22 +5,30 @@ when:
branch: branch:
- ${CI_REPO_DEFAULT_BRANCH} - ${CI_REPO_DEFAULT_BRANCH}
matrix: variables:
PYTHON_VERSION: - &pytest_base
- "3.8" group: pytest
- "3.9"
- "3.10"
- "3.11"
- "3.12"
steps:
pytest:
image: docker.io/library/python:${PYTHON_VERSION}
commands: commands:
- git fetch --depth=2147483647
- pip install poetry poetry-dynamic-versioning -qq - pip install poetry poetry-dynamic-versioning -qq
- poetry install - poetry install -E ansible-core
- poetry version - poetry version
- poetry run ${CI_REPO_NAME} --help - poetry run ${CI_REPO_NAME} --help
environment: environment:
PY_COLORS: "1" PY_COLORS: "1"
steps:
- name: python-312
image: docker.io/library/python:3.12
<<: *pytest_base
- name: python-311
image: docker.io/library/python:3.11
<<: *pytest_base
- name: python-310
image: docker.io/library/python:3.10
<<: *pytest_base
- name: python-39
image: docker.io/library/python:3.9
<<: *pytest_base

View File

@ -1,4 +1,4 @@
FROM python:3.12-alpine@sha256:a5d1738d6abbdff3e81c10b7f86923ebcb340ca536e21e8c5ee7d938d263dba1 FROM python:3.12-alpine@sha256:a982997504b8ec596f553d78f4de4b961bbdf5254e0177f6e99bb34f4ef16f95
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>" LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>" LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
@ -12,9 +12,9 @@ ENV TZ=UTC
ADD dist/ansible_doctor-*.whl / ADD dist/ansible_doctor-*.whl /
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev && \ RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev git && \
pip install --upgrade --no-cache-dir pip && \ pip install --upgrade --no-cache-dir pip && \
pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl") && \ pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl")[ansible-core] && \
rm -f ansible_doctor-*.whl && \ rm -f ansible_doctor-*.whl && \
rm -rf /var/cache/apk/* && \ rm -rf /var/cache/apk/* && \
rm -rf /root/.cache/ rm -rf /root/.cache/

View File

@ -1,5 +1,5 @@
# renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc # renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc
THEME_VERSION := v0.44.0 THEME_VERSION := v0.46.0
THEME := hugo-geekdoc THEME := hugo-geekdoc
BASEDIR := docs BASEDIR := docs
THEMEDIR := $(BASEDIR)/themes THEMEDIR := $(BASEDIR)/themes

View File

@ -1,3 +1,10 @@
"""Provide version information.""" """Provide version information."""
__version__ = "0.0.0" __version__ = "0.0.0"
import sys
try:
import ansible # noqa
except ImportError:
sys.exit("ERROR: Python requirements are missing: 'ansible-core' not found.")

View File

@ -6,9 +6,10 @@ import re
from collections import defaultdict from collections import defaultdict
import anyconfig import anyconfig
import structlog
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.utils import SingleLog, _split_string from ansibledoctor.utils import _split_string, sysexit_with_message
class AnnotationItem: class AnnotationItem:
@ -37,8 +38,7 @@ class Annotation:
self._all_items = defaultdict(dict) self._all_items = defaultdict(dict)
self._file_handler = None self._file_handler = None
self.config = SingleConfig() self.config = SingleConfig()
self.log = SingleLog() self.log = structlog.get_logger()
self.logger = self.log.logger
self._files_registry = files_registry self._files_registry = files_registry
self._all_annotations = self.config.get_annotations_definition() self._all_annotations = self.config.get_annotations_definition()
@ -67,7 +67,7 @@ class Annotation:
num, line, self._annotation_definition["name"], rfile num, line, self._annotation_definition["name"], rfile
) )
if item: if item:
self.logger.info(str(item)) self.log.info(f"Found {item!s}")
self._populate_item( self._populate_item(
item.get_obj().items(), self._annotation_definition["name"] item.get_obj().items(), self._annotation_definition["name"]
) )
@ -85,7 +85,7 @@ class Annotation:
try: try:
anyconfig.merge(self._all_items[key], value, ac_merge=anyconfig.MS_DICTS) anyconfig.merge(self._all_items[key], value, ac_merge=anyconfig.MS_DICTS)
except ValueError as e: except ValueError as e:
self.log.sysexit_with_message(f"Unable to merge annotation values:\n{e}") sysexit_with_message("Failed to merge annotation values", error=e)
def _get_annotation_data(self, num, line, name, rfile): def _get_annotation_data(self, num, line, name, rfile):
""" """
@ -171,15 +171,15 @@ class Annotation:
if parts[2].startswith("$"): if parts[2].startswith("$"):
source = "".join([x.strip() for x in multiline]) source = "".join([x.strip() for x in multiline])
multiline = self._str_to_json(key, source, rfile, num, line) multiline = self._str_to_json(key, source, rfile, num)
item.data[key][parts[1]] = multiline item.data[key][parts[1]] = multiline
return item return item
def _str_to_json(self, key, string, rfile, num, line): def _str_to_json(self, key, string, rfile, num):
try: try:
return {key: json.loads(string)} return {key: json.loads(string)}
except ValueError: except ValueError:
self.log.sysexit_with_message( sysexit_with_message(
f"Json value error: Can't parse json in {rfile}:{num!s}:\n{line.strip()}" f"ValueError: Failed to parse json in {rfile}:{num!s}", file=rfile
) )

View File

@ -4,25 +4,32 @@
import argparse import argparse
import os import os
import structlog
import ansibledoctor.exception import ansibledoctor.exception
from ansibledoctor import __version__ from ansibledoctor import __version__
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.doc_generator import Generator from ansibledoctor.doc_generator import Generator
from ansibledoctor.doc_parser import Parser from ansibledoctor.doc_parser import Parser
from ansibledoctor.utils import SingleLog from ansibledoctor.utils import sysexit_with_message
class AnsibleDoctor: class AnsibleDoctor:
"""Create main object.""" """Create main object."""
def __init__(self): log = structlog.get_logger()
self.log = SingleLog()
self.logger = self.log.logger
self.args = self._cli_args()
self.config = self._get_config()
self._execute()
def _cli_args(self): def __init__(self):
try:
self.config = SingleConfig()
self.config.load(args=self._parse_args())
self._execute()
except ansibledoctor.exception.DoctorError as e:
sysexit_with_message(e)
except KeyboardInterrupt:
sysexit_with_message("Aborted...")
def _parse_args(self):
""" """
Use argparse for parsing CLI arguments. Use argparse for parsing CLI arguments.
@ -33,90 +40,102 @@ class AnsibleDoctor:
description="Generate documentation from annotated Ansible roles using templates" description="Generate documentation from annotated Ansible roles using templates"
) )
parser.add_argument( parser.add_argument(
"base_dir", nargs="?", help="base directory (default: current working directory)" "base_dir",
nargs="?",
default=self.config.config.base_dir,
help="base directory (default: current working directory)",
) )
parser.add_argument( parser.add_argument(
"-c", "--config", dest="config_file", help="path to configuration file" "-c",
"--config",
dest="config_file",
help="path to configuration file",
) )
parser.add_argument( parser.add_argument(
"-o", "--output", dest="output_dir", action="store", help="output directory" "-o",
"--output",
dest="renderer__dest",
action="store",
default=self.config.config.renderer.dest,
help="output directory",
metavar="OUTPUT_DIR",
) )
parser.add_argument( parser.add_argument(
"-r", "-r",
"--recursive", "--recursive",
dest="recursive", dest="recursive",
action="store_true", action="store_true",
default=None, default=self.config.config.recursive,
help="run recursively over the base directory subfolders" help="run recursively over the base directory subfolders",
) )
parser.add_argument( parser.add_argument(
"-f", "-f",
"--force", "--force",
dest="force_overwrite", dest="renderer.force_overwrite",
action="store_true", action="store_true",
default=None, default=self.config.config.renderer.force_overwrite,
help="force overwrite output file" help="force overwrite output file",
) )
parser.add_argument( parser.add_argument(
"-d", "-d",
"--dry-run", "--dry-run",
dest="dry_run", dest="dry_run",
action="store_true", action="store_true",
default=None, default=self.config.config.dry_run,
help="dry run without writing" help="dry run without writing",
) )
parser.add_argument( parser.add_argument(
"-n", "-n",
"--no-role-detection", "--no-role-detection",
dest="role_detection", dest="role_detection",
action="store_false", action="store_false",
default=None, default=self.config.config.role.autodetect,
help="disable automatic role detection" help="disable automatic role detection",
) )
parser.add_argument( parser.add_argument(
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level" "-v",
dest="logging.level",
action="append_const",
const=-1,
help="increase log level",
) )
parser.add_argument( parser.add_argument(
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level" "-q",
dest="logging.level",
action="append_const",
const=1,
help="decrease log level",
)
parser.add_argument(
"--version",
action="version",
version=f"%(prog)s {__version__}",
) )
parser.add_argument("--version", action="version", version=f"%(prog)s {__version__}")
return parser.parse_args().__dict__ return parser.parse_args().__dict__
def _get_config(self):
try:
config = SingleConfig(args=self.args)
except ansibledoctor.exception.ConfigError as e:
self.log.sysexit_with_message(e)
return config
def _execute(self): def _execute(self):
cwd = self.config.base_dir cwd = os.path.abspath(self.config.config.base_dir)
walkdirs = [cwd] walkdirs = [cwd]
if self.config.recursive: if self.config.config.recursive:
walkdirs = [f.path for f in os.scandir(cwd) if f.is_dir()] walkdirs = [f.path for f in os.scandir(cwd) if f.is_dir()]
for item in walkdirs: for item in walkdirs:
os.chdir(item) os.chdir(item)
self.config.load(root_path=os.getcwd())
self.config.set_config(base_dir=os.getcwd()) self.log.debug("Switch working directory", path=item)
try: self.log.info("Lookup config file", path=self.config.config_files)
self.log.set_level(self.config.config["logging"]["level"])
except ValueError as e:
self.log.sysexit_with_message(f"Can not set log level.\n{e!s}")
self.logger.info(f"Using config file: {self.config.config_file}")
self.logger.debug(f"Using working dir: {item}") if self.config.config.role.autodetect:
if self.config.is_role():
if self.config.config["role_detection"]: structlog.contextvars.bind_contextvars(role=self.config.config.role_name)
if self.config.is_role: self.log.info("Ansible role detected")
self.logger.info(f"Ansible role detected: {self.config.config['role_name']}")
else: else:
self.log.sysexit_with_message("No Ansible role detected") sysexit_with_message("No Ansible role detected")
else: else:
self.logger.info("Ansible role detection disabled") self.log.info("Ansible role detection disabled")
doc_parser = Parser() doc_parser = Parser()
doc_generator = Generator(doc_parser) doc_generator = Generator(doc_parser)

View File

@ -1,337 +1,226 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""Global settings definition.""" """Global settings definition."""
import logging
import os import os
import re
from io import StringIO
import anyconfig import colorama
import environs import structlog
import jsonschema.exceptions
import ruamel.yaml
from appdirs import AppDirs from appdirs import AppDirs
from jsonschema._utils import format_as_index from dynaconf import Dynaconf, ValidationError, Validator
import ansibledoctor.exception import ansibledoctor.exception
from ansibledoctor.utils import Singleton from ansibledoctor.utils import Singleton
config_dir = AppDirs("ansible-doctor").user_config_dir
default_config_file = os.path.join(config_dir, "config.yml")
default_envs_prefix = "ANSIBLE_DOCTOR_"
class Config: class Config:
""" """Create configuration object."""
Create an object with all necessary settings.
Settings are loade from multiple locations in defined order (last wins):
- default settings defined by `self._get_defaults()`
- yaml config file, defaults to OS specific user config dir (https://pypi.org/project/appdirs/)
- provides cli parameters
"""
SETTINGS = {
"config_file": {
"default": default_config_file,
"env": "CONFIG_FILE",
"type": environs.Env().str
},
"base_dir": {
"default": os.getcwd(),
"refresh": os.getcwd,
"env": "BASE_DIR",
"type": environs.Env().str
},
"role_name": {
"default": "",
"env": "ROLE_NAME",
"type": environs.Env().str
},
"dry_run": {
"default": False,
"env": "DRY_RUN",
"file": True,
"type": environs.Env().bool
},
"logging.level": {
"default": "WARNING",
"env": "LOG_LEVEL",
"file": True,
"type": environs.Env().str
},
"logging.json": {
"default": False,
"env": "LOG_JSON",
"file": True,
"type": environs.Env().bool
},
"output_dir": {
"default": os.getcwd(),
"refresh": os.getcwd,
"env": "OUTPUT_DIR",
"file": True,
"type": environs.Env().str
},
"recursive": {
"default": False,
"env": "RECURSIVE",
"type": environs.Env().bool
},
"template_dir": {
"default": os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates"),
"env": "TEMPLATE_DIR",
"file": True,
"type": environs.Env().str
},
"template": {
"default": "readme",
"env": "TEMPLATE",
"file": True,
"type": environs.Env().str
},
"template_autotrim": {
"default": True,
"env": "TEMPLATE_AUTOTRIM",
"file": True,
"type": environs.Env().bool
},
"force_overwrite": {
"default": False,
"env": "FORCE_OVERWRITE",
"file": True,
"type": environs.Env().bool
},
"custom_header": {
"default": "",
"env": "CUSTOM_HEADER",
"file": True,
"type": environs.Env().str
},
"exclude_files": {
"default": [],
"env": "EXCLUDE_FILES",
"file": True,
"type": environs.Env().list
},
"exclude_tags": {
"default": [],
"env": "EXCLUDE_TAGS",
"file": True,
"type": environs.Env().list
},
"role_detection": {
"default": True,
"env": "ROLE_DETECTION",
"file": True,
"type": environs.Env().bool
},
}
ANNOTATIONS = { ANNOTATIONS = {
"meta": { "meta": {
"name": "meta", "name": "meta",
"automatic": True, "automatic": True,
"subtypes": ["value"], "subtypes": ["value"],
"allow_multiple": False "allow_multiple": False,
}, },
"todo": { "todo": {
"name": "todo", "name": "todo",
"automatic": True, "automatic": True,
"subtypes": ["value"], "subtypes": ["value"],
"allow_multiple": True "allow_multiple": True,
}, },
"var": { "var": {
"name": "var", "name": "var",
"automatic": True, "automatic": True,
"subtypes": ["value", "example", "description", "type", "deprecated"], "subtypes": ["value", "example", "description", "type", "deprecated"],
"allow_multiple": False "allow_multiple": False,
}, },
"example": { "example": {
"name": "example", "name": "example",
"automatic": True, "automatic": True,
"subtypes": [], "subtypes": [],
"allow_multiple": False "allow_multiple": False,
}, },
"tag": { "tag": {
"name": "tag", "name": "tag",
"automatic": True, "automatic": True,
"subtypes": ["value", "description"], "subtypes": ["value", "description"],
"allow_multiple": False "allow_multiple": False,
}, },
} }
def __init__(self, args=None): def __init__(self):
""" self.config_files = [
Initialize a new settings class. os.path.join(AppDirs("ansible-doctor").user_config_dir, "config.yml"),
".ansibledoctor",
".ansibledoctor.yml",
".ansibledoctor.yaml",
]
self.config_merge = True
self.args = {}
self.load()
:param args: An optional dict of options, arguments and commands from the CLI. def load(self, root_path=None, args=None):
:param config_file: An optional path to a yaml config file. tmpl_src = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates")
:returns: None tmpl_provider = ["local", "git"]
""" if args:
if args is None: if args.get("config_file"):
self._args = {} self.config_merge = False
else: self.config_files = [os.path.abspath(args.get("config_file"))]
self._args = args args.pop("config_file")
self._schema = None
self.config = None
self.is_role = False
self.set_config()
def _get_args(self, args): self.args = args
cleaned = dict(filter(lambda item: item[1] is not None, args.items()))
normalized = {} self.config = Dynaconf(
for key, value in cleaned.items(): envvar_prefix="ANSIBLE_DOCTOR",
normalized = self._add_dict_branch(normalized, key.split("."), value) merge_enabled=self.config_merge,
core_loaders=["YAML"],
root_path=root_path,
settings_files=self.config_files,
fresh_vars=["base_dir", "output_dir"],
validators=[
Validator(
"base_dir",
default=os.getcwd(),
apply_default_on_none=True,
is_type_of=str,
),
Validator(
"dry_run",
default=False,
is_type_of=bool,
),
Validator(
"recursive",
default=False,
is_type_of=bool,
),
Validator(
"exclude_files",
default=[],
is_type_of=list,
),
Validator(
"exclude_tags",
default=[],
is_type_of=list,
),
Validator(
"role.name",
is_type_of=str,
),
Validator(
"role.autodetect",
default=True,
is_type_of=bool,
),
Validator(
"logging.level",
default="WARNING",
is_in=[
"DEBUG",
"INFO",
"WARNING",
"ERROR",
"CRITICAL",
"debug",
"info",
"warning",
"error",
"critical",
],
),
Validator(
"logging.json",
default=False,
is_type_of=bool,
),
Validator(
"recursive",
default=False,
is_type_of=bool,
),
Validator(
"template.src",
default=f"local>{tmpl_src}",
is_type_of=str,
condition=lambda x: re.match(r"^(local|git)\s*>\s*", x),
messages={
"condition": f"Template provider must be one of {tmpl_provider}.",
},
),
Validator(
"template.name",
default="readme",
is_type_of=str,
),
Validator(
"template.options.tabulate_variables",
default=False,
is_type_of=bool,
),
Validator(
"renderer.autotrim",
default=True,
is_type_of=bool,
),
Validator(
"renderer.include_header",
default="",
is_type_of=str,
),
Validator(
"renderer.dest",
default=os.path.relpath(os.getcwd()),
is_type_of=str,
),
Validator(
"renderer.force_overwrite",
default=False,
is_type_of=bool,
),
],
)
self.validate()
# Override correct log level from argparse # Override correct log level from argparse
levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
log_level = levels.index(self.SETTINGS["logging.level"]["default"]) log_level = levels.index(self.config.logging.level.upper())
if normalized.get("logging"): if self.args.get("logging.level") and isinstance(self.args["logging.level"], list):
for adjustment in normalized["logging"]["level"]: for lvl in self.args["logging.level"]:
log_level = min(len(levels) - 1, max(log_level + adjustment, 0)) log_level = min(len(levels) - 1, max(log_level + lvl, 0))
normalized["logging"]["level"] = levels[log_level]
return normalized self.args["logging__level"] = levels[log_level]
def _get_defaults(self): if root_path:
normalized = {} self.args["base_dir"] = root_path
for key, item in self.SETTINGS.items():
if item.get("refresh"):
item["default"] = item["refresh"]()
normalized = self._add_dict_branch(normalized, key.split("."), item["default"])
self.schema = anyconfig.gen_schema(normalized) self.config.update(self.args)
return normalized self.validate()
def _get_envs(self): self._init_logger()
normalized = {}
for key, item in self.SETTINGS.items():
if item.get("env"):
envname = f"{default_envs_prefix}{item['env']}"
try:
value = item["type"](envname)
normalized = self._add_dict_branch(normalized, key.split("."), value)
except environs.EnvError as e:
if f'"{envname}" not set' in str(e):
pass
else:
raise ansibledoctor.exception.ConfigError(
"Unable to read environment variable", str(e)
) from e
return normalized def validate(self):
def set_config(self, base_dir=None):
args = self._get_args(self._args)
envs = self._get_envs()
defaults = self._get_defaults()
self.recursive = defaults.get("recursive")
if envs.get("recursive"):
self.recursive = envs.get("recursive")
if args.get("recursive"):
self.recursive = args.get("recursive")
if "recursive" in defaults:
defaults.pop("recursive")
self.config_file = defaults.get("config_file")
if envs.get("config_file"):
self.config_file = self._normalize_path(envs.get("config_file"))
if args.get("config_file"):
self.config_file = self._normalize_path(args.get("config_file"))
if "config_file" in defaults:
defaults.pop("config_file")
self.base_dir = defaults.get("base_dir")
if envs.get("base_dir"):
self.base_dir = self._normalize_path(envs.get("base_dir"))
if args.get("base_dir"):
self.base_dir = self._normalize_path(args.get("base_dir"))
if base_dir:
self.base_dir = base_dir
if "base_dir" in defaults:
defaults.pop("base_dir")
self.is_role = os.path.isdir(os.path.join(self.base_dir, "tasks"))
# compute role_name default
defaults["role_name"] = os.path.basename(self.base_dir)
source_files = []
source_files.append((self.config_file, False))
source_files.append((os.path.join(os.getcwd(), ".ansibledoctor"), True))
source_files.append((os.path.join(os.getcwd(), ".ansibledoctor.yml"), True))
source_files.append((os.path.join(os.getcwd(), ".ansibledoctor.yaml"), True))
for (config, first_found) in source_files:
if config and os.path.exists(config):
with open(config, encoding="utf8") as stream:
s = stream.read()
try:
file_dict = ruamel.yaml.safe_load(s)
except (
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
) as e:
message = f"{e.context} {e.problem}"
raise ansibledoctor.exception.ConfigError(
f"Unable to read config file: {config}", message
) from e
if self._validate(file_dict):
anyconfig.merge(defaults, file_dict, ac_merge=anyconfig.MS_DICTS)
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
self.config_file = config
if first_found:
break
if self._validate(envs):
anyconfig.merge(defaults, envs, ac_merge=anyconfig.MS_DICTS)
if self._validate(args):
anyconfig.merge(defaults, args, ac_merge=anyconfig.MS_DICTS)
fix_files = ["output_dir", "template_dir", "custom_header"]
for filename in fix_files:
if defaults[filename] and defaults[filename] != "":
defaults[filename] = self._normalize_path(defaults[filename])
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
self.config = defaults
def _normalize_path(self, path):
if not os.path.isabs(path):
base = os.path.join(os.getcwd(), path)
return os.path.abspath(os.path.expanduser(os.path.expandvars(base)))
return path
def _validate(self, config):
try: try:
anyconfig.validate(config, self.schema, ac_schema_safe=False) self.config.validators.validate_all()
except jsonschema.exceptions.ValidationError as e: except ValidationError as e:
schema_error = "Failed validating '{validator}' in schema{schema}\n{message}".format( raise ansibledoctor.exception.ConfigError("Configuration error", e.message) from e
validator=e.validator,
schema=format_as_index(list(e.relative_schema_path)[:-1]),
message=e.message
)
raise ansibledoctor.exception.ConfigError("Configuration error", schema_error) from e
return True def is_role(self):
self.config.role_name = self.config.get(
def _add_dict_branch(self, tree, vector, value): "role_name", os.path.basename(self.config.base_dir)
key = vector[0] )
tree[key] = value \ return os.path.isdir(os.path.join(self.config.base_dir, "tasks"))
if len(vector) == 1 \
else self._add_dict_branch(tree[key] if key in tree else {}, vector[1:], value)
return tree
def get_annotations_definition(self, automatic=True): def get_annotations_definition(self, automatic=True):
annotations = {} annotations = {}
if automatic: if automatic:
for k, item in self.ANNOTATIONS.items(): for k, item in self.ANNOTATIONS.items():
if "automatic" in item and item["automatic"]: if item.get("automatic"):
annotations[k] = item annotations[k] = item
return annotations return annotations
@ -339,19 +228,84 @@ class Config:
annotations = [] annotations = []
if automatic: if automatic:
for k, item in self.ANNOTATIONS.items(): for k, item in self.ANNOTATIONS.items():
if "automatic" in item and item["automatic"]: if item.get("automatic"):
annotations.append(k) annotations.append(k)
return annotations return annotations
def get_template(self): def _init_logger(self):
""" styles = structlog.dev.ConsoleRenderer.get_default_level_styles()
Get the base dir for the template to use. styles["debug"] = colorama.Fore.BLUE
:return: str abs path processors = [
""" structlog.contextvars.merge_contextvars,
template_dir = self.config.get("template_dir") structlog.processors.add_log_level,
template = self.config.get("template") structlog.processors.StackInfoRenderer(),
return os.path.realpath(os.path.join(template_dir, template)) structlog.dev.set_exc_info,
structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S", utc=False),
]
if self.config.logging.json:
processors.append(ErrorStringifier())
processors.append(structlog.processors.JSONRenderer())
else:
processors.append(MultilineConsoleRenderer(level_styles=styles))
try:
structlog.configure(
processors=processors,
wrapper_class=structlog.make_filtering_bound_logger(
logging.getLevelName(self.config.get("logging.level")),
),
)
structlog.contextvars.unbind_contextvars()
except KeyError as e:
raise ansibledoctor.exception.ConfigError(f"Can not set log level: {e!s}") from e
class ErrorStringifier:
"""A processor that converts exceptions to a string representation."""
def __call__(self, _, __, event_dict):
if "error" not in event_dict:
return event_dict
err = event_dict.get("error")
if isinstance(err, Exception):
event_dict["error"] = f"{err.__class__.__name__}: {err}"
return event_dict
class MultilineConsoleRenderer(structlog.dev.ConsoleRenderer):
"""A processor for printing multiline strings."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __call__(self, _, __, event_dict):
err = None
if "error" in event_dict:
err = event_dict.pop("error")
event_dict = super().__call__(_, __, event_dict)
if not err:
return event_dict
sio = StringIO()
sio.write(event_dict)
if isinstance(err, Exception):
sio.write(
f"\n{colorama.Fore.RED}{err.__class__.__name__}:"
f"{colorama.Style.RESET_ALL} {str(err).strip()}"
)
else:
sio.write(f"\n{err.strip()}")
return sio.getvalue()
class SingleConfig(Config, metaclass=Singleton): class SingleConfig(Config, metaclass=Singleton):

View File

@ -1,75 +1,52 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""Prepare output and write compiled jinja2 templates.""" """Prepare output and write compiled jinja2 templates."""
import glob
import ntpath
import os import os
import re import re
from functools import reduce from functools import reduce
import jinja2.exceptions import jinja2.exceptions
import ruamel.yaml import ruamel.yaml
import structlog
from jinja2 import Environment, FileSystemLoader from jinja2 import Environment, FileSystemLoader
from jinja2.filters import pass_eval_context from jinja2.filters import pass_eval_context
import ansibledoctor.exception
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.utils import FileUtils, SingleLog from ansibledoctor.template import Template
from ansibledoctor.utils import FileUtils, sysexit_with_message
class Generator: class Generator:
"""Generate documentation from jinja2 templates.""" """Generate documentation from jinja2 templates."""
def __init__(self, doc_parser): def __init__(self, doc_parser):
self.template_files = [] self.log = structlog.get_logger()
self.extension = "j2"
self._parser = None
self.config = SingleConfig() self.config = SingleConfig()
self.log = SingleLog() self.template = Template(
self.logger = self.log.logger self.config.config.get("template.name"),
self.config.config.get("template.src"),
)
self._parser = doc_parser self._parser = doc_parser
self._scan_template()
def _scan_template(self):
"""
Search for Jinja2 (.j2) files to apply to the destination.
:return: None
"""
template_dir = self.config.get_template()
if os.path.isdir(template_dir):
self.logger.info(f"Using template dir: {template_dir}")
else:
self.log.sysexit_with_message(f"Can not open template dir {template_dir}")
for file in glob.iglob(template_dir + "/**/*." + self.extension, recursive=True):
relative_file = file[len(template_dir) + 1:]
if ntpath.basename(file)[:1] != "_":
self.logger.debug(f"Found template file: {relative_file}")
self.template_files.append(relative_file)
else:
self.logger.debug(f"Ignoring template file: {relative_file}")
def _create_dir(self, directory): def _create_dir(self, directory):
if not self.config.config["dry_run"] and not os.path.isdir(directory): if not self.config.config["dry_run"] and not os.path.isdir(directory):
try: try:
os.makedirs(directory, exist_ok=True) os.makedirs(directory, exist_ok=True)
self.logger.info(f"Creating dir: {directory}") self.log.info(f"Creating dir: {directory}")
except FileExistsError as e: except FileExistsError as e:
self.log.sysexit_with_message(str(e)) sysexit_with_message(e)
def _write_doc(self): def _write_doc(self):
files_to_overwite = [] files_to_overwite = []
for file in self.template_files: for tf in self.template.files:
doc_file = os.path.join( doc_file = os.path.join(
self.config.config.get("output_dir"), self.config.config.get("renderer.dest"), os.path.splitext(tf)[0]
os.path.splitext(file)[0]
) )
if os.path.isfile(doc_file): if os.path.isfile(doc_file):
files_to_overwite.append(doc_file) files_to_overwite.append(doc_file)
header_file = self.config.config.get("custom_header") header_file = self.config.config.get("renderer.include_header")
role_data = self._parser.get_data() role_data = self._parser.get_data()
header_content = "" header_content = ""
if bool(header_file): if bool(header_file):
@ -78,70 +55,69 @@ class Generator:
with open(header_file) as a: with open(header_file) as a:
header_content = a.read() header_content = a.read()
except FileNotFoundError as e: except FileNotFoundError as e:
self.log.sysexit_with_message(f"Can not open custom header file\n{e!s}") sysexit_with_message("Can not open custom header file", path=header_file, error=e)
if ( if (
len(files_to_overwite) > 0 and self.config.config.get("force_overwrite") is False len(files_to_overwite) > 0
and self.config.config.get("renderer.force_overwrite") is False
and not self.config.config["dry_run"] and not self.config.config["dry_run"]
): ):
files_to_overwite_string = "\n".join(files_to_overwite) files_to_overwite_string = "\n".join(files_to_overwite)
self.logger.warning(f"This files will be overwritten:\n{files_to_overwite_string}") prompt = f"These files will be overwritten:\n{files_to_overwite_string}".replace(
"\n", "\n... "
)
try: try:
if not FileUtils.query_yes_no("Do you want to continue?"): if not FileUtils.query_yes_no(f"{prompt}\nDo you want to continue?"):
self.log.sysexit_with_message("Aborted...") sysexit_with_message("Aborted...")
except ansibledoctor.exception.InputError as e: except KeyboardInterrupt:
self.logger.debug(str(e)) sysexit_with_message("Aborted...")
self.log.sysexit_with_message("Aborted...")
for file in self.template_files: for tf in self.template.files:
doc_file = os.path.join( doc_file = os.path.join(
self.config.config.get("output_dir"), self.config.config.get("renderer.dest"), os.path.splitext(tf)[0]
os.path.splitext(file)[0]
) )
source_file = self.config.get_template() + "/" + file template = os.path.join(self.template.path, tf)
self.logger.debug(f"Writing doc output to: {doc_file} from: {source_file}") self.log.debug("Writing renderer output", path=doc_file, src=os.path.dirname(template))
# make sure the directory exists # make sure the directory exists
self._create_dir(os.path.dirname(doc_file)) self._create_dir(os.path.dirname(doc_file))
if os.path.exists(source_file) and os.path.isfile(source_file): if os.path.exists(template) and os.path.isfile(template):
with open(source_file) as template: with open(template) as template:
data = template.read() data = template.read()
if data is not None: if data is not None:
try: try:
jenv = Environment( # nosec jenv = Environment( # nosec
loader=FileSystemLoader(self.config.get_template()), loader=FileSystemLoader(self.template.path),
lstrip_blocks=True, lstrip_blocks=True,
trim_blocks=True, trim_blocks=True,
autoescape=jinja2.select_autoescape() autoescape=jinja2.select_autoescape(),
) )
jenv.filters["to_nice_yaml"] = self._to_nice_yaml jenv.filters["to_nice_yaml"] = self._to_nice_yaml
jenv.filters["deep_get"] = self._deep_get jenv.filters["deep_get"] = self._deep_get
jenv.filters["safe_join"] = self._safe_join jenv.filters["safe_join"] = self._safe_join
# keep the old name of the function to not break custom templates. # keep the old name of the function to not break custom templates.
jenv.filters["save_join"] = self._safe_join jenv.filters["save_join"] = self._safe_join
data = jenv.from_string(data).render(role_data, role=role_data) template_options = self.config.config.get("template.options")
data = jenv.from_string(data).render(
role_data, role=role_data, options=template_options
)
if not self.config.config["dry_run"]: if not self.config.config["dry_run"]:
with open(doc_file, "wb") as outfile: with open(doc_file, "wb") as outfile:
outfile.write(header_content.encode("utf-8")) outfile.write(header_content.encode("utf-8"))
outfile.write(data.encode("utf-8")) outfile.write(data.encode("utf-8"))
self.logger.info(f"Writing to: {doc_file}")
else:
self.logger.info(f"Writing to: {doc_file}")
except ( except (
jinja2.exceptions.UndefinedError, jinja2.exceptions.UndefinedError,
jinja2.exceptions.TemplateSyntaxError, jinja2.exceptions.TemplateSyntaxError,
jinja2.exceptions.TemplateRuntimeError jinja2.exceptions.TemplateRuntimeError,
) as e: ) as e:
self.log.sysexit_with_message( sysexit_with_message(
f"Jinja2 templating error while loading file: '{file}'\n{e!s}" "Jinja2 template error while loading file", path=tf, error=e
) )
except UnicodeEncodeError as e: except UnicodeEncodeError as e:
self.log.sysexit_with_message( sysexit_with_message("Failed to print special characters", error=e)
f"Unable to print special characters\n{e!s}"
)
def _to_nice_yaml(self, a, indent=4, **kw): def _to_nice_yaml(self, a, indent=4, **kw):
"""Make verbose, human readable yaml.""" """Make verbose, human readable yaml."""
@ -154,8 +130,9 @@ class Generator:
def _deep_get(self, _, dictionary, keys): def _deep_get(self, _, dictionary, keys):
default = None default = None
return reduce( return reduce(
lambda d, key: d.get(key, default) lambda d, key: d.get(key, default) if isinstance(d, dict) else default,
if isinstance(d, dict) else default, keys.split("."), dictionary keys.split("."),
dictionary,
) )
@pass_eval_context @pass_eval_context
@ -165,12 +142,11 @@ class Generator:
normalized = jinja2.filters.do_join(eval_ctx, value, d, attribute=None) normalized = jinja2.filters.do_join(eval_ctx, value, d, attribute=None)
if self.config.config["template_autotrim"]: if self.config.config.renderer.autotrim:
for s in [r" +(\n|\t| )", r"(\n|\t) +"]: for s in [r" +(\n|\t| )", r"(\n|\t) +"]:
normalized = re.sub(s, "\\1", normalized) normalized = re.sub(s, "\\1", normalized)
return jinja2.filters.do_mark_safe(normalized) return jinja2.filters.do_mark_safe(normalized)
def render(self): def render(self):
self.logger.info(f"Using output dir: {self.config.config.get('output_dir')}")
self._write_doc() self._write_doc()

View File

@ -3,17 +3,17 @@
import fnmatch import fnmatch
from collections import defaultdict from collections import defaultdict
from contextlib import suppress
import anyconfig import anyconfig
import ruamel.yaml import structlog
from nested_lookup import nested_lookup
from ansibledoctor.annotation import Annotation from ansibledoctor.annotation import Annotation
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.contstants import YAML_EXTENSIONS from ansibledoctor.contstants import YAML_EXTENSIONS
from ansibledoctor.exception import YAMLError
from ansibledoctor.file_registry import Registry from ansibledoctor.file_registry import Registry
from ansibledoctor.utils import SingleLog, UnsafeTag, flatten from ansibledoctor.utils import flatten, sysexit_with_message
from ansibledoctor.utils.yamlhelper import parse_yaml, parse_yaml_ansible
class Parser: class Parser:
@ -23,110 +23,73 @@ class Parser:
self._annotation_objs = {} self._annotation_objs = {}
self._data = defaultdict(dict) self._data = defaultdict(dict)
self.config = SingleConfig() self.config = SingleConfig()
self.log = SingleLog() self.log = structlog.get_logger()
self.logger = SingleLog().logger
self._files_registry = Registry() self._files_registry = Registry()
self._parse_meta_file() self._parse_meta_file()
self._parse_var_files() self._parse_var_files()
self._parse_task_tags() self._parse_task_tags()
self._populate_doc_data() self._populate_doc_data()
def _yaml_remove_comments(self, d):
if isinstance(d, dict):
for k, v in d.items():
self._yaml_remove_comments(k)
self._yaml_remove_comments(v)
elif isinstance(d, list):
for elem in d:
self._yaml_remove_comments(elem)
with suppress(AttributeError):
attr = "comment" if isinstance(
d, ruamel.yaml.scalarstring.ScalarString
) else ruamel.yaml.comments.Comment.attrib
delattr(d, attr)
def _parse_var_files(self): def _parse_var_files(self):
for rfile in self._files_registry.get_files(): for rfile in self._files_registry.get_files():
if any(fnmatch.fnmatch(rfile, "*/defaults/*." + ext) for ext in YAML_EXTENSIONS): if any(fnmatch.fnmatch(rfile, "*/defaults/*." + ext) for ext in YAML_EXTENSIONS):
with open(rfile, encoding="utf8") as yaml_file: with open(rfile, encoding="utf8") as yamlfile:
try: try:
ruamel.yaml.add_constructor( raw = parse_yaml(yamlfile)
UnsafeTag.yaml_tag, except YAMLError as e:
UnsafeTag.yaml_constructor, sysexit_with_message("Failed to read yaml file", path=rfile, error=e)
constructor=ruamel.yaml.SafeConstructor
)
raw = ruamel.yaml.YAML(typ="rt").load(yaml_file) data = defaultdict(dict, raw or {})
self._yaml_remove_comments(raw)
data = defaultdict(dict, raw or {}) for key, value in data.items():
for key, value in data.items(): self._data["var"][key] = {"value": {key: value}}
self._data["var"][key] = {"value": {key: value}}
except (
ruamel.yaml.composer.ComposerError,
ruamel.yaml.scanner.ScannerError,
ruamel.yaml.constructor.ConstructorError,
ruamel.yaml.constructor.DuplicateKeyError,
) as e:
message = f"{e.context} {e.problem}"
self.log.sysexit_with_message(
f"Unable to read yaml file {rfile}\n{message}"
)
def _parse_meta_file(self): def _parse_meta_file(self):
self._data["meta"]["name"] = {"value": self.config.config["role_name"]} self._data["meta"]["name"] = {"value": self.config.config["role_name"]}
for rfile in self._files_registry.get_files(): for rfile in self._files_registry.get_files():
if any("meta/main." + ext in rfile for ext in YAML_EXTENSIONS): if any("meta/main." + ext in rfile for ext in YAML_EXTENSIONS):
with open(rfile, encoding="utf8") as yaml_file: with open(rfile, encoding="utf8") as yamlfile:
try: try:
raw = ruamel.yaml.YAML(typ="rt").load(yaml_file) raw = parse_yaml(yamlfile)
self._yaml_remove_comments(raw) except YAMLError as e:
sysexit_with_message("Failed to read yaml file", path=rfile, error=e)
data = defaultdict(dict, raw) data = defaultdict(dict, raw)
if data.get("galaxy_info"): if data.get("galaxy_info"):
for key, value in data.get("galaxy_info").items(): for key, value in data.get("galaxy_info").items():
self._data["meta"][key] = {"value": value} self._data["meta"][key] = {"value": value}
if data.get("dependencies") is not None: if data.get("dependencies") is not None:
self._data["meta"]["dependencies"] = { self._data["meta"]["dependencies"] = {"value": data.get("dependencies")}
"value": data.get("dependencies")
}
except (
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
) as e:
message = f"{e.context} {e.problem}"
self.log.sysexit_with_message(
f"Unable to read yaml file {rfile}\n{message}"
)
def _parse_task_tags(self): def _parse_task_tags(self):
for rfile in self._files_registry.get_files(): for rfile in self._files_registry.get_files():
if any(fnmatch.fnmatch(rfile, "*/tasks/*." + ext) for ext in YAML_EXTENSIONS): if any(fnmatch.fnmatch(rfile, "*/tasks/*." + ext) for ext in YAML_EXTENSIONS):
with open(rfile, encoding="utf8") as yaml_file: with open(rfile, encoding="utf8") as yamlfile:
try: try:
raw = ruamel.yaml.YAML(typ="rt").load(yaml_file) raw = parse_yaml_ansible(yamlfile)
self._yaml_remove_comments(raw) except YAMLError as e:
sysexit_with_message("Failed to read yaml file", path=rfile, error=e)
tags = list(set(flatten(nested_lookup("tags", raw)))) tags = []
for tag in [ for task in raw:
x for x in tags if x not in self.config.config["exclude_tags"] task_tags = task.get("tags")
]: if isinstance(task_tags, str):
self._data["tag"][tag] = {"value": tag} task_tags = [task_tags]
except (
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError for tag in task_tags:
) as e: if tag not in self.config.config["exclude_tags"]:
message = f"{e.context} {e.problem}" tags.append(tag)
self.log.sysexit_with_message(
f"Unable to read yaml file {rfile}\n{message}" for tag in flatten(tags):
) self._data["tag"][tag] = {"value": tag}
def _populate_doc_data(self): def _populate_doc_data(self):
"""Generate the documentation data object.""" """Generate the documentation data object."""
tags = defaultdict(dict) tags = defaultdict(dict)
for annotation in self.config.get_annotations_names(automatic=True): for annotation in self.config.get_annotations_names(automatic=True):
self.logger.info(f"Finding annotations for: @{annotation}") self.log.info(f"Lookup annotation @{annotation}")
self._annotation_objs[annotation] = Annotation( self._annotation_objs[annotation] = Annotation(
name=annotation, files_registry=self._files_registry name=annotation, files_registry=self._files_registry
) )
@ -135,7 +98,7 @@ class Parser:
try: try:
anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS) anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS)
except ValueError as e: except ValueError as e:
self.log.sysexit_with_message(f"Unable to merge annotation values:\n{e}") sysexit_with_message("Failed to merge annotation values", error=e)
def get_data(self): def get_data(self):
return self._data return self._data

View File

@ -10,13 +10,17 @@ class DoctorError(Exception):
self.original_exception = original_exception self.original_exception = original_exception
class YAMLError(DoctorError):
"""Errors while reading a yaml file."""
pass
class ConfigError(DoctorError): class ConfigError(DoctorError):
"""Errors related to config file handling.""" """Errors related to config file handling."""
pass pass
class InputError(DoctorError): class TemplateError(DoctorError):
"""Errors related to config file handling.""" """Errors related to template file handling."""
pass

View File

@ -5,10 +5,10 @@ import glob
import os import os
import pathspec import pathspec
import structlog
from ansibledoctor.config import SingleConfig from ansibledoctor.config import SingleConfig
from ansibledoctor.contstants import YAML_EXTENSIONS from ansibledoctor.contstants import YAML_EXTENSIONS
from ansibledoctor.utils import SingleLog
class Registry: class Registry:
@ -21,7 +21,7 @@ class Registry:
def __init__(self): def __init__(self):
self._doc = [] self._doc = []
self.config = SingleConfig() self.config = SingleConfig()
self.log = SingleLog().logger self.log = structlog.get_logger()
self._scan_for_yamls() self._scan_for_yamls()
def get_files(self): def get_files(self):
@ -35,20 +35,17 @@ class Registry:
:return: None :return: None
""" """
extensions = YAML_EXTENSIONS extensions = YAML_EXTENSIONS
base_dir = self.config.base_dir base_dir = self.config.config.base_dir
role_name = os.path.basename(base_dir)
excludes = self.config.config.get("exclude_files") excludes = self.config.config.get("exclude_files")
excludespec = pathspec.PathSpec.from_lines("gitwildmatch", excludes) excludespec = pathspec.PathSpec.from_lines("gitwildmatch", excludes)
self.log.debug(f"Scan for files: {base_dir}") self.log.debug("Lookup role files", path=base_dir)
for extension in extensions: for extension in extensions:
pattern = os.path.join(base_dir, "**/*." + extension) pattern = os.path.join(base_dir, "**/*." + extension)
for filename in glob.iglob(pattern, recursive=True): for filename in glob.iglob(pattern, recursive=True):
if not excludespec.match_file(filename): if not excludespec.match_file(filename):
self.log.debug( self.log.debug("Found role file", path=os.path.relpath(filename, base_dir))
f"Adding file to '{role_name}': {os.path.relpath(filename, base_dir)}"
)
self._doc.append(filename) self._doc.append(filename)
else: else:
self.log.debug(f"Excluding file: {os.path.relpath(filename, base_dir)}") self.log.debug("Skippped role file", path=os.path.relpath(filename, base_dir))

113
ansibledoctor/template.py Normal file
View File

@ -0,0 +1,113 @@
"""Module for handling templates."""
import atexit
import glob
import ntpath
import os
import shutil
import tempfile
import structlog
from git import GitCommandError, Repo
import ansibledoctor.exception
from ansibledoctor.utils import sysexit_with_message
class Template:
"""
Represents a template that can be used to generate content.
Templates can be sourced from a local file or a Git repository. The `Template` class handles
the initialization and setup of a template, including cloning a Git repository if necessary.
Args:
----
name (str): The name of the template.
src (str): The source of the template, in the format `<provider>><path>`.
Supported providers are `local` and `git`.
Raises:
------
ansibledoctor.exception.TemplateError
"""
def __init__(self, name, src):
self.log = structlog.get_logger()
self.name = name
self.src = src
try:
provider, path = self.src.split(">", 1)
except ValueError as e:
raise ansibledoctor.exception.TemplateError(
"Error reading template src", str(e)
) from e
self.provider = provider.strip().lower()
self.path = path.strip()
if self.provider == "local":
self.path = os.path.realpath(os.path.join(self.path, self.name))
elif self.provider == "git":
repo_url, branch_or_tag = (
self.path.split("#", 1) if "#" in self.path else (self.path, None)
)
temp_dir = self._clone_repo(repo_url, branch_or_tag)
self.path = os.path.join(temp_dir, self.name)
else:
raise ansibledoctor.exception.TemplateError(
f"Unsupported template provider: {provider}"
)
self.files = self._scan_files()
def _clone_repo(self, repo_url, branch_or_tag=None):
temp_dir = tempfile.mkdtemp(prefix="ansibledoctor-")
atexit.register(self._cleanup_temp_dir, temp_dir)
try:
self.log.debug("Cloning template repo", src=repo_url)
repo = Repo.clone_from(repo_url, temp_dir)
if branch_or_tag:
self.log.debug(f"Checking out branch or tag: {branch_or_tag}")
try:
repo.git.checkout(branch_or_tag)
except GitCommandError as e:
raise ansibledoctor.exception.TemplateError(
f"Error checking out branch or tag: {branch_or_tag}: {e}"
) from e
return temp_dir
except GitCommandError as e:
msg = e.stderr.strip("'").strip()
msg = msg.removeprefix("stderr: ")
raise ansibledoctor.exception.TemplateError(
f"Error cloning Git repository: {msg}"
) from e
def _scan_files(self):
"""Search for Jinja2 (.j2) files to apply to the destination."""
template_files = []
if os.path.isdir(self.path):
self.log.info("Lookup template files", src=self.src)
else:
sysexit_with_message("Can not open template directory", path=self.path)
for file in glob.iglob(self.path + "/**/*.j2", recursive=True):
relative_file = file[len(self.path) + 1 :]
if ntpath.basename(file)[:1] != "_":
self.log.debug("Found template file", path=relative_file)
template_files.append(relative_file)
else:
self.log.debug("Skipped template file", path=relative_file)
return template_files
@staticmethod
def _cleanup_temp_dir(temp_dir):
if temp_dir and os.path.exists(temp_dir):
shutil.rmtree(temp_dir)

View File

@ -2,10 +2,12 @@
{% set var = role.var | default({}) %} {% set var = role.var | default({}) %}
{% if var %} {% if var %}
- [Default Variables](#default-variables) - [Default Variables](#default-variables)
{% if not options.tabulate_vars %}
{% for key, item in var | dictsort %} {% for key, item in var | dictsort %}
- [{{ key }}](#{{ key }}) - [{{ key }}](#{{ key }})
{% endfor %} {% endfor %}
{% endif %} {% endif %}
{% endif %}
{% if tag %} {% if tag %}
- [Discovered Tags](#discovered-tags) - [Discovered Tags](#discovered-tags)
{% endif %} {% endif %}

View File

@ -0,0 +1,49 @@
{% set var = role.var | default({}) %}
{% if var %}
## Default Variables
{% set columns = ["variable", "default", "description", "type", "deprecated", "example"] %}
{% set found_columns = ["variable", "default"] + var.values() | map("list") | sum(start=["key"]) | unique | list %}
{% for c in columns %}
{% if c in found_columns %}
|{{ c | capitalize -}}
{% endif %}
{% endfor %}
|
{% for c in columns %}
{% if c in found_columns %}
|{{ "-" * (c | length) -}}
{% endif %}
{% endfor %}
|
{% for key, item in var | dictsort %}
|{{ key -}}
|{{ (item.value | default({}))[key] | default -}}
{% if "description" in found_columns %}
|{{ item.description | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
{% endif %}
{% if "type" in found_columns %}
|{{ item.type | default([]) | join("<br />") -}}
{% endif %}
{% if "deprecated" in found_columns %}
|
{%- if "deprecated" in found_columns %}
{% if item.deprecated is defined %}
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
{% set deprecated_string = deprecated | map("replace", "\n", "<br />") | safe_join("<br />") %}
{% if deprecated_string -%}
{{ deprecated_string }}
{%- else -%}
True
{%- endif %}
{%- else -%}
False
{%- endif %}
{% endif %}
{% endif %}
{% if "example" in found_columns %}
|{{ item.example | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
{% endif %}
|
{% endfor %}
{% endif %}

View File

@ -23,7 +23,11 @@ summary: {{ meta.summary.value | safe_join(" ") }}
{% include '_requirements.j2' %} {% include '_requirements.j2' %}
{# Vars #} {# Vars #}
{% if options.tabulate_vars %}
{% include '_vars_tabulated.j2' %}
{% else %}
{% include '_vars.j2' %} {% include '_vars.j2' %}
{% endif %}
{# Tag #} {# Tag #}
{% include '_tag.j2' %} {% include '_tag.j2' %}

View File

@ -15,7 +15,11 @@
{% include '_requirements.j2' %} {% include '_requirements.j2' %}
{# Vars #} {# Vars #}
{% if options.tabulate_vars %}
{% include '_vars_tabulated.j2' %}
{% else %}
{% include '_vars.j2' %} {% include '_vars.j2' %}
{% endif %}
{# Tag #} {# Tag #}
{% include '_tag.j2' %} {% include '_tag.j2' %}

View File

@ -4,10 +4,12 @@
{% set var = role.var | default({}) %} {% set var = role.var | default({}) %}
{% if var %} {% if var %}
- [Default Variables](#default-variables) - [Default Variables](#default-variables)
{% if not options.tabulate_vars %}
{% for key, item in var | dictsort %} {% for key, item in var | dictsort %}
- [{{ key }}](#{{ key }}) - [{{ key }}](#{{ key }})
{% endfor %} {% endfor %}
{% endif %} {% endif %}
{% endif %}
{% if tag %} {% if tag %}
- [Discovered Tags](#discovered-tags) - [Discovered Tags](#discovered-tags)
{% endif %} {% endif %}

View File

@ -0,0 +1,49 @@
{% set var = role.var | default({}) %}
{% if var %}
## Default Variables
{% set columns = ["variable", "default", "description", "type", "deprecated", "example"] %}
{% set found_columns = ["variable", "default"] + var.values() | map("list") | sum(start=["key"]) | unique | list %}
{% for c in columns %}
{% if c in found_columns %}
|{{ c | capitalize -}}
{% endif %}
{% endfor %}
|
{% for c in columns %}
{% if c in found_columns %}
|{{ "-" * (c | length) -}}
{% endif %}
{% endfor %}
|
{% for key, item in var | dictsort %}
|{{ key -}}
|{{ (item.value | default({}))[key] | default -}}
{% if "description" in found_columns %}
|{{ item.description | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
{% endif %}
{% if "type" in found_columns %}
|{{ item.type | default([]) | join("<br />") -}}
{% endif %}
{% if "deprecated" in found_columns %}
|
{%- if "deprecated" in found_columns %}
{% if item.deprecated is defined %}
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
{% set deprecated_string = deprecated | map("replace", "\n", "<br />") | safe_join("<br />") %}
{% if deprecated_string -%}
{{ deprecated_string }}
{%- else -%}
True
{%- endif %}
{%- else -%}
False
{%- endif %}
{% endif %}
{% endif %}
{% if "example" in found_columns %}
|{{ item.example | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
{% endif %}
|
{% endfor %}
{% endif %}

View File

@ -1,339 +0,0 @@
#!/usr/bin/env python3
"""Global utility methods and classes."""
import logging
import os
import sys
from collections.abc import Iterable
import colorama
from pythonjsonlogger import jsonlogger
import ansibledoctor.exception
CONSOLE_FORMAT = "{}{}[%(levelname)s]{} %(message)s"
JSON_FORMAT = "%(asctime)s %(levelname)s %(message)s"
def strtobool(value):
"""Convert a string representation of truth to true or false."""
_map = {
"y": True,
"yes": True,
"t": True,
"true": True,
"on": True,
"1": True,
"n": False,
"no": False,
"f": False,
"false": False,
"off": False,
"0": False
}
try:
return _map[str(value).lower()]
except KeyError as err:
raise ValueError(f'"{value}" is not a valid bool value') from err
def to_bool(string):
return bool(strtobool(str(string)))
def flatten(items):
for x in items:
if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
yield from flatten(x)
else:
yield x
def _should_do_markup():
py_colors = os.environ.get("PY_COLORS", None)
if py_colors is not None:
return to_bool(py_colors)
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
def _split_string(string, delimiter, escape, maxsplit=None):
result = []
current_element = []
iterator = iter(string)
count_split = 0
skip_split = False
for character in iterator:
if maxsplit and count_split >= maxsplit:
skip_split = True
if character == escape and not skip_split:
try:
next_character = next(iterator)
if next_character != delimiter and next_character != escape:
# Do not copy the escape character if it is intended to escape either the
# delimiter or the escape character itself. Copy the escape character
# if it is not used to escape either of these characters.
current_element.append(escape)
current_element.append(next_character)
count_split += 1
except StopIteration:
current_element.append(escape)
elif character == delimiter and not skip_split:
result.append("".join(current_element))
current_element = []
count_split += 1
else:
current_element.append(character)
result.append("".join(current_element))
return result
colorama.init(autoreset=True, strip=not _should_do_markup())
class Singleton(type):
"""Meta singleton class."""
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super().__call__(*args, **kwargs)
return cls._instances[cls]
class LogFilter:
"""Exclude log messages above the logged level."""
def __init__(self, level):
"""
Initialize a new custom log filter.
:param level: Log level limit
:returns: None
"""
self.__level = level
def filter(self, logRecord): # noqa
# https://docs.python.org/3/library/logging.html#logrecord-attributes
return logRecord.levelno <= self.__level
class MultilineFormatter(logging.Formatter):
"""Reset color after newline characters."""
def format(self, record): # noqa
record.msg = record.msg.replace("\n", f"\n{colorama.Style.RESET_ALL}... ")
return logging.Formatter.format(self, record)
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
"""Remove newline characters."""
def format(self, record): # noqa
record.msg = record.msg.replace("\n", " ")
return jsonlogger.JsonFormatter.format(self, record)
class Log:
"""Handle logging."""
def __init__(self, level=logging.WARNING, name="ansibledoctor", json=False):
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.logger.addHandler(self._get_error_handler(json=json))
self.logger.addHandler(self._get_warning_handler(json=json))
self.logger.addHandler(self._get_info_handler(json=json))
self.logger.addHandler(self._get_critical_handler(json=json))
self.logger.addHandler(self._get_debug_handler(json=json))
self.logger.propagate = False
def _get_error_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.ERROR)
handler.addFilter(LogFilter(logging.ERROR))
handler.setFormatter(
MultilineFormatter(
self.error(
CONSOLE_FORMAT.format(
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_warning_handler(self, json=False):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.WARNING)
handler.addFilter(LogFilter(logging.WARNING))
handler.setFormatter(
MultilineFormatter(
self.warning(
CONSOLE_FORMAT.format(
colorama.Fore.YELLOW, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_info_handler(self, json=False):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
handler.addFilter(LogFilter(logging.INFO))
handler.setFormatter(
MultilineFormatter(
self.info(
CONSOLE_FORMAT.format(
colorama.Fore.CYAN, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_critical_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.CRITICAL)
handler.addFilter(LogFilter(logging.CRITICAL))
handler.setFormatter(
MultilineFormatter(
self.critical(
CONSOLE_FORMAT.format(
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_debug_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.DEBUG)
handler.addFilter(LogFilter(logging.DEBUG))
handler.setFormatter(
MultilineFormatter(
self.critical(
CONSOLE_FORMAT.format(
colorama.Fore.BLUE, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
)
)
)
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def set_level(self, s):
self.logger.setLevel(s)
def debug(self, msg):
"""Format info messages and return string."""
return msg
def critical(self, msg):
"""Format critical messages and return string."""
return msg
def error(self, msg):
"""Format error messages and return string."""
return msg
def warning(self, msg):
"""Format warning messages and return string."""
return msg
def info(self, msg):
"""Format info messages and return string."""
return msg
def _color_text(self, color, msg):
"""
Colorize strings.
:param color: colorama color settings
:param msg: string to colorize
:returns: string
"""
return f"{color}{msg}{colorama.Style.RESET_ALL}"
def sysexit(self, code=1):
sys.exit(code)
def sysexit_with_message(self, msg, code=1):
self.logger.critical(str(msg))
self.sysexit(code)
class SingleLog(Log, metaclass=Singleton):
"""Singleton logging class."""
pass
class UnsafeTag:
"""Handle custom yaml unsafe tag."""
yaml_tag = "!unsafe"
def __init__(self, value):
self.unsafe = value
@staticmethod
def yaml_constructor(loader, node):
return loader.construct_scalar(node)
class FileUtils:
"""Mics static methods for file handling."""
@staticmethod
def create_path(path):
os.makedirs(path, exist_ok=True)
@staticmethod
def query_yes_no(question, default=True):
"""
Ask a yes/no question via input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
"""
prompt = "[Y/n]" if default else "[N/y]"
try:
# input method is safe in python3
choice = input(f"{question} {prompt} ") or default # nosec
return to_bool(choice)
except (KeyboardInterrupt, ValueError) as e:
raise ansibledoctor.exception.InputError("Error while reading input", e) from e

View File

@ -0,0 +1,130 @@
#!/usr/bin/env python3
"""Global utility methods and classes."""
import os
import sys
from collections.abc import Iterable
import structlog
def strtobool(value):
"""Convert a string representation of truth to true or false."""
_map = {
"y": True,
"yes": True,
"t": True,
"true": True,
"on": True,
"1": True,
"n": False,
"no": False,
"f": False,
"false": False,
"off": False,
"0": False,
}
try:
return _map[str(value).lower()]
except KeyError as err:
raise ValueError(f'"{value}" is not a valid bool value') from err
def to_bool(string):
return bool(strtobool(str(string)))
def flatten(items):
for x in items:
if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
yield from flatten(x)
else:
yield x
def _split_string(string, delimiter, escape, maxsplit=None):
result = []
current_element = []
iterator = iter(string)
count_split = 0
skip_split = False
for character in iterator:
if maxsplit and count_split >= maxsplit:
skip_split = True
if character == escape and not skip_split:
try:
next_character = next(iterator)
if next_character != delimiter and next_character != escape:
# Do not copy the escape character if it is intended to escape either the
# delimiter or the escape character itself. Copy the escape character
# if it is not used to escape either of these characters.
current_element.append(escape)
current_element.append(next_character)
count_split += 1
except StopIteration:
current_element.append(escape)
elif character == delimiter and not skip_split:
result.append("".join(current_element))
current_element = []
count_split += 1
else:
current_element.append(character)
result.append("".join(current_element))
return result
def sysexit(code=1):
sys.exit(code)
def sysexit_with_message(msg, code=1, **kwargs):
structlog.get_logger().critical(str(msg).strip(), **kwargs)
sysexit(code)
class Singleton(type):
"""Meta singleton class."""
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super().__call__(*args, **kwargs)
return cls._instances[cls]
class FileUtils:
"""Mics static methods for file handling."""
@staticmethod
def create_path(path):
os.makedirs(path, exist_ok=True)
@staticmethod
def query_yes_no(question, default=True):
"""
Ask a yes/no question via input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
"""
prompt = "[Y/n]" if default else "[N/y]"
while True:
try:
# input method is safe in python3
choice = input(f"{question} {prompt} ") or default # nosec
return to_bool(choice)
except ValueError:
print("Invalid input. Please enter 'y' or 'n'.") # noqa: T201
except KeyboardInterrupt as e:
raise e

View File

@ -0,0 +1,78 @@
"""Utils for YAML file operations."""
from collections import defaultdict
from contextlib import suppress
import ruamel.yaml
import yaml
from ansible.parsing.yaml.loader import AnsibleLoader
import ansibledoctor.exception
class UnsafeTag:
"""Handle custom yaml unsafe tag."""
yaml_tag = "!unsafe"
def __init__(self, value):
self.unsafe = value
@staticmethod
def yaml_constructor(loader, node):
return loader.construct_scalar(node)
def parse_yaml_ansible(yamlfile):
try:
loader = AnsibleLoader(yamlfile)
data = loader.get_single_data() or []
except (
yaml.parser.ParserError,
yaml.scanner.ScannerError,
yaml.constructor.ConstructorError,
yaml.composer.ComposerError,
) as e:
raise ansibledoctor.exception.YAMLError(e) from e
return data
def parse_yaml(yamlfile):
try:
ruamel.yaml.add_constructor(
UnsafeTag.yaml_tag,
UnsafeTag.yaml_constructor,
constructor=ruamel.yaml.SafeConstructor,
)
data = ruamel.yaml.YAML(typ="rt").load(yamlfile)
_yaml_remove_comments(data)
data = defaultdict(dict, data or {})
except (
ruamel.yaml.parser.ParserError,
ruamel.yaml.scanner.ScannerError,
ruamel.yaml.constructor.ConstructorError,
ruamel.yaml.composer.ComposerError,
) as e:
raise ansibledoctor.exception.YAMLError(e) from e
return data
def _yaml_remove_comments(d):
if isinstance(d, dict):
for k, v in d.items():
_yaml_remove_comments(k)
_yaml_remove_comments(v)
elif isinstance(d, list):
for elem in d:
_yaml_remove_comments(elem)
with suppress(AttributeError):
attr = (
"comment"
if isinstance(d, ruamel.yaml.scalarstring.ScalarString)
else ruamel.yaml.comments.Comment.attrib
)
delattr(d, attr)

View File

@ -4,11 +4,12 @@ title: Using pip
```Shell ```Shell
# From PyPI as unprivileged user # From PyPI as unprivileged user
$ pip install ansible-doctor --user $ pip install ansible-doctor[ansible-core] --user
# .. or as root # .. or as root
$ sudo pip install ansible-doctor $ sudo pip install ansible-doctor[ansible-core]
# From Wheel file # From Wheel file
$ pip install https://github.com/thegeeklab/ansible-doctor/releases/download/v0.1.1/ansible_doctor-0.1.1-py2.py3-none-any.whl # Please check first whether a newer version is available.
$ pip install https://github.com/thegeeklab/ansible-doctor/releases/download/v3.1.4/ansible_doctor-3.1.4-py2.py3-none-any.whl[ansible-core]
``` ```

View File

@ -20,36 +20,17 @@ Configuration options can be set in different places, which are processed in the
--- ---
# Default is the current working directory. # Default is the current working directory.
base_dir: base_dir:
# Default is the basename of 'role_name'.
role_name: role:
# Auto-detect if the given directory is a role, can be disabled # Default is the basename of 'role_name'.
# to parse loose files instead. name:
role_detection: True # Auto-detect if the given directory is a role, can be disabled
# Don't write anything to file system # to parse loose files instead.
autodetect: True
# Don't write anything to file system.
dry_run: False dry_run: False
logging:
# Possible options debug | info | warning | error | critical
level: "warning"
# Json logging can be enabled if a parsable output is required
json: False
# Path to write rendered template file. Default is the current working directory.
output_dir:
# Default is built-in templates directory.
template_dir:
template: readme
# By default, double spaces, spaces before and after line breaks or tab characters, etc.
# are automatically removed before the template is rendered. As a result, indenting
# with spaces does not work. If you want to use spaces to indent text, you must disable
# this option.
template_autotrim: True
# Don't ask to overwrite if output file exists.
force_overwrite: False
# Load custom header from given file and append template output to it before write.
custom_header: ""
exclude_files: [] exclude_files: []
# Examples # Examples
# exclude_files: # exclude_files:
@ -59,6 +40,62 @@ exclude_files: []
# Exclude tags from automatic detection. Configured tags are only skipped # Exclude tags from automatic detection. Configured tags are only skipped
# if the tag is not used in an annotation. # if the tag is not used in an annotation.
exclude_tags: [] exclude_tags: []
logging:
# Possible options: debug|info|warning| error|critical
level: "warning"
# JSON logging can be enabled if a parsable output is required.
json: False
template:
# Name of the template to be used. In most cases, this is the name of a directory that is attached to the
# the `src` path or Git repo (see example below).
name: readme
# Template provider source. Currently supported providers are `local|git`.
# The `local` provider loads templates from the local file system. This provider
# is used by default and uses the built-in templates.
#
# Examples:
# template:
# name: readme
# src: local>/tmp/custom_templates/
#
# The `git` provider allows templates to be loaded from a git repository. At the moment
# the functions of this provider are limited and only public repositories are supported.
#
# Examples:
# template:
# src: git>https://github.com/thegeeklab/ansible-doctor
# name: ansibledoctor/templates/readme
#
# template:
# src: git>git@github.com:thegeeklab/ansible-doctor.git
# name: ansibledoctor/templates/readme
#
# template:
# src: git>git@github.com:thegeeklab/ansible-doctor.git#branch-or-tag
# name: ansibledoctor/templates/readme
src:
options:
# Configures whether to tabulate variables in the output. When set to `True`,
# variables will be displayed in a tabular format intsead of plain marktdown sections.
# NOTE: This option does not support rendering multiline code blocks.
tabulate_vars: False
renderer:
# By default, double spaces, spaces before and after line breaks or tab characters, etc.
# are automatically removed before the template is rendered. As a result, indenting
# with spaces does not work. If you want to use spaces to indent text, you must disable
# this option.
autotrim: True
# Load custom header from given file and append template output to it before write.
include_header: ""
# Path to write rendered template file. Default is the current working directory.
dest:
# Don't ask to overwrite if output file exists.
force_overwrite: False
``` ```
## CLI ## CLI
@ -91,22 +128,25 @@ options:
## Environment Variables ## Environment Variables
```Shell ```Shell
ANSIBLE_DOCTOR_CONFIG_FILE=
ANSIBLE_DOCTOR_ROLE_DETECTION=true
ANSIBLE_DOCTOR_BASE_DIR= ANSIBLE_DOCTOR_BASE_DIR=
ANSIBLE_DOCTOR_RECURSIVE=false ANSIBLE_DOCTOR_DRY_RUN=False
ANSIBLE_DOCTOR_ROLE_NAME=
ANSIBLE_DOCTOR_DRY_RUN=false
ANSIBLE_DOCTOR_LOG_LEVEL=warning
ANSIBLE_DOCTOR_LOG_JSON=false
ANSIBLE_DOCTOR_OUTPUT_DIR=
ANSIBLE_DOCTOR_TEMPLATE_DIR=
ANSIBLE_DOCTOR_TEMPLATE=readme
ANSIBLE_DOCTOR_TEMPLATE_AUTOTRIM=true
ANSIBLE_DOCTOR_FORCE_OVERWRITE=false
ANSIBLE_DOCTOR_CUSTOM_HEADER=
ANSIBLE_DOCTOR_EXCLUDE_FILES= ANSIBLE_DOCTOR_EXCLUDE_FILES=
ANSIBLE_DOCTOR_EXCLUDE_FILES=molecule/,files/**/*.py ANSIBLE_DOCTOR_EXCLUDE_TAGS=
ANSIBLE_DOCTOR_ROLE__NAME=
ANSIBLE_DOCTOR_ROLE__AUTODETECT=True
ANSIBLE_DOCTOR_LOGGING__LEVEL="warning"
ANSIBLE_DOCTOR_LOGGING__JSON=False
ANSIBLE_DOCTOR_TEMPLATE__NAME=readme
ANSIBLE_DOCTOR_TEMPLATE__SRC=
ANSIBLE_DOCTOR_TEMPLATE__OPTIONS__TABULATE_VARS=False
ANSIBLE_DOCTOR_RENDERER__AUTOTRIM=True
ANSIBLE_DOCTOR_RENDERER__INCLUDE_HEADER=
ANSIBLE_DOCTOR_RENDERER__DEST=
ANSIBLE_DOCTOR_RENDERER__FORCE_OVERWRITE=False
``` ```
## Pre-Commit setup ## Pre-Commit setup
@ -119,8 +159,8 @@ To use _ansible-doctor_ with the [pre-commit](https://pre-commit.com/) framework
{{< highlight yaml "linenos=table" >}} {{< highlight yaml "linenos=table" >}}
- repo: https://github.com/thegeeklab/ansible-doctor - repo: https://github.com/thegeeklab/ansible-doctor
# change ref to the latest release from https://github.com/thegeeklab/ansible-doctor/releases # update version with `pre-commit autoupdate`
rev: v1.4.8 rev: v4.0.4
hooks: hooks:
- id: ansible-doctor - id: ansible-doctor
{{< /highlight >}} {{< /highlight >}}

View File

@ -1,5 +1,10 @@
--- ---
custom_header: HEADER.md
logging: logging:
level: debug level: debug
template: readme
template:
src: git>https://github.com/thegeeklab/ansible-doctor
name: ansibledoctor/templates/readme
renderer:
include_header: HEADER.md

View File

@ -204,7 +204,6 @@ demo_role_unset: some_value
## Dependencies ## Dependencies
- role1
- role2 - role2
## License ## License

View File

@ -18,6 +18,5 @@ galaxy_info:
- documentation - documentation
dependencies: dependencies:
- role1
- role: role2 - role: role2
- name: namespace.role3 - name: namespace.role3

View File

@ -11,6 +11,8 @@
- name: Demo task with a tag list - name: Demo task with a tag list
debug: debug:
msg: "Demo message" msg: "Demo message"
tags:
- module-tag
tags: tags:
- role-tag1 - role-tag1
- role-tag2 - role-tag2

View File

@ -1,5 +1,9 @@
--- ---
custom_header: HEADER.md
logging: logging:
level: debug level: debug
template: readme
template:
name: readme
renderer:
include_header: HEADER.md

1019
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,6 @@ classifiers = [
"Natural Language :: English", "Natural Language :: English",
"Operating System :: POSIX", "Operating System :: POSIX",
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.11",
@ -22,42 +21,42 @@ classifiers = [
description = "Generate documentation from annotated Ansible roles using templates." description = "Generate documentation from annotated Ansible roles using templates."
documentation = "https://ansible-doctor.geekdocs.de/" documentation = "https://ansible-doctor.geekdocs.de/"
homepage = "https://ansible-doctor.geekdocs.de/" homepage = "https://ansible-doctor.geekdocs.de/"
include = [ include = ["LICENSE"]
"LICENSE",
]
keywords = ["ansible", "role", "documentation"] keywords = ["ansible", "role", "documentation"]
license = "GPL-3.0-only" license = "GPL-3.0-only"
name = "ansible-doctor" name = "ansible-doctor"
packages = [ packages = [{ include = "ansibledoctor" }]
{include = "ansibledoctor"},
]
readme = "README.md" readme = "README.md"
repository = "https://github.com/thegeeklab/ansible-doctor/" repository = "https://github.com/thegeeklab/ansible-doctor/"
version = "0.0.0" version = "0.0.0"
[tool.poetry.dependencies] [tool.poetry.dependencies]
Jinja2 = "3.1.2" Jinja2 = "3.1.4"
anyconfig = "0.13.0" anyconfig = "0.14.0"
appdirs = "1.4.4" appdirs = "1.4.4"
colorama = "0.4.6" colorama = "0.4.6"
environs = "9.5.0" pathspec = "0.12.1"
jsonschema = "4.19.2" python = "^3.9.0"
nested-lookup = "0.2.25" "ruamel.yaml" = "0.18.6"
pathspec = "0.11.2" dynaconf = "3.2.5"
python = "^3.8.0" gitpython = "3.1.43"
python-json-logger = "2.0.7" ansible-core = { version = "2.14.17", optional = true }
"ruamel.yaml" = "0.18.5" structlog = "24.2.0"
[tool.poetry.extras]
ansible-core = ["ansible-core"]
[tool.poetry.scripts] [tool.poetry.scripts]
ansible-doctor = "ansibledoctor.cli:main" ansible-doctor = "ansibledoctor.cli:main"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
ruff = "0.1.3" ruff = "0.4.9"
pytest = "7.4.3" pytest = "8.2.2"
pytest-mock = "3.12.0" pytest-mock = "3.14.0"
pytest-cov = "4.1.0" pytest-cov = "5.0.0"
toml = "0.10.2" toml = "0.10.2"
yapf = "0.40.2" j2lint = "1.1.0"
[tool.poetry-dynamic-versioning] [tool.poetry-dynamic-versioning]
enable = true enable = true
@ -81,17 +80,22 @@ requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
[tool.ruff] [tool.ruff]
exclude = [ exclude = [
".git", ".git",
"__pycache__", "__pycache__",
"build", "build",
"dist", "dist",
"test", "test",
"*.pyc", "*.pyc",
"*.egg-info", "*.egg-info",
".cache", ".cache",
".eggs", ".eggs",
"env*", "env*",
] ]
line-length = 99
indent-width = 4
[tool.ruff.lint]
# Explanation of errors # Explanation of errors
# #
# D102: Missing docstring in public method # D102: Missing docstring in public method
@ -102,47 +106,39 @@ exclude = [
# D203: One blank line required before class docstring # D203: One blank line required before class docstring
# D212: Multi-line docstring summary should start at the first line # D212: Multi-line docstring summary should start at the first line
ignore = [ ignore = [
"D102", "D102",
"D103", "D103",
"D105", "D105",
"D107", "D107",
"D202", "D202",
"D203", "D203",
"D212", "D212",
"UP038", "UP038",
"RUF012", "RUF012",
] ]
line-length = 99
select = [ select = [
"D", "D",
"E", "E",
"F", "F",
"Q", "Q",
"W", "W",
"I", "I",
"S", "S",
"BLE", "BLE",
"N", "N",
"UP", "UP",
"B", "B",
"A", "A",
"C4", "C4",
"T20", "T20",
"SIM", "SIM",
"RET", "RET",
"ARG", "ARG",
"ERA", "ERA",
"RUF", "RUF",
] ]
[tool.ruff.flake8-quotes] [tool.ruff.format]
inline-quotes = "double" quote-style = "double"
indent-style = "space"
[tool.yapf] line-ending = "lf"
based_on_style = "google"
column_limit = 99
dedent_closing_brackets = true
coalesce_brackets = true
split_before_logical_operator = true
indent_dictionary_value = true
allow_split_before_dict_value = false

View File

@ -1,4 +1,17 @@
{ {
"$schema": "https://docs.renovatebot.com/renovate-schema.json", "$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": ["github>thegeeklab/renovate-presets"] "extends": ["github>thegeeklab/renovate-presets"],
"packageRules": [
{
"description": "Ansible base dependencies",
"matchPackageNames": ["ansible-core"],
"separateMinorPatch": true
},
{
"matchManagers": ["woodpecker"],
"matchFileNames": [".woodpecker/test.yml"],
"matchPackageNames": ["docker.io/library/python"],
"enabled": false
}
]
} }