mirror of
https://github.com/thegeeklab/ansible-later.git
synced 2024-11-16 01:50:39 +00:00
Compare commits
624 Commits
Author | SHA1 | Date | |
---|---|---|---|
8571c4841e | |||
7099f6d3dc | |||
|
d0070d4816 | ||
|
aa69db12cc | ||
|
d5cb46f3c4 | ||
|
523867c885 | ||
|
bdb093047c | ||
|
38d11aae44 | ||
92d3fad445 | |||
|
7b95de429b | ||
64531c1271 | |||
|
d5682968ab | ||
|
62b7096f3a | ||
|
68bb9953de | ||
|
34c879dcc6 | ||
|
ff03f9c933 | ||
|
20425d54b5 | ||
|
3b1c285f2d | ||
|
fa4ebcdb67 | ||
|
49b4182fda | ||
|
26d89019de | ||
|
70d2e2b6e3 | ||
|
b8fc1f80f5 | ||
|
3ecaf2c83c | ||
|
ae339853fc | ||
|
7d6ff809ae | ||
|
fff6c09a3d | ||
|
72c1e8e778 | ||
|
6d84a16819 | ||
|
4880f9e999 | ||
|
855fdb701f | ||
|
9a447b168a | ||
|
253b54410d | ||
|
6c6fbe2646 | ||
|
dd2d6e415b | ||
|
a1a2b57546 | ||
|
1a1ecbd695 | ||
|
6f0a9fa628 | ||
|
c09b67088f | ||
|
fa49768ded | ||
|
25b52b9e18 | ||
|
f0ceba29ed | ||
|
c75029d26f | ||
|
bf4fa1f9e8 | ||
ccaa936f9a | |||
|
a8ae213b42 | ||
|
f17f833e6d | ||
|
b1a92ee164 | ||
|
e22fd04a04 | ||
|
c9bebc3fc6 | ||
|
7d0f611982 | ||
|
1f15548f69 | ||
|
70b31ff632 | ||
|
dce5137deb | ||
|
414ae921b8 | ||
|
97b6c688c6 | ||
|
f81a2e2f79 | ||
|
776896eb10 | ||
|
77ac5003a2 | ||
|
4decb201b7 | ||
|
35e26084fa | ||
|
ab9edb7618 | ||
|
7224113ec9 | ||
|
bd8a413c4b | ||
|
d03b522bbc | ||
|
516b78a871 | ||
|
0decedea9c | ||
|
68187e8765 | ||
|
70b40cee63 | ||
|
0fd035828b | ||
|
94591e4435 | ||
|
efa2529ae0 | ||
|
a17ddf8a52 | ||
|
9af2d042e0 | ||
|
d7b6775516 | ||
|
6abed0ab00 | ||
|
893eb69cc6 | ||
|
47b55b27b6 | ||
|
f9a214f622 | ||
|
5971c9679c | ||
|
f87922a662 | ||
|
aed6a5f420 | ||
|
21d6316dd0 | ||
|
b0f3dca7c7 | ||
|
e6ca4cfdb0 | ||
|
e1b535af8a | ||
|
b7401677dc | ||
|
8213c46e05 | ||
|
e2549dfc77 | ||
|
4c3b8f2e82 | ||
|
5e64c9d79d | ||
|
cfcd804959 | ||
|
13ac29da22 | ||
|
eeae482b31 | ||
|
0a53eee7f3 | ||
|
73e960f0b7 | ||
|
5d40978fa3 | ||
|
9dcaf5e488 | ||
|
fa65be587b | ||
|
dcc2e08d2c | ||
|
ab4f8b209f | ||
|
82e09a5726 | ||
|
cef217826e | ||
|
64e1b4dadb | ||
|
0f00036d7c | ||
|
2209fabe28 | ||
|
c7b9e492d3 | ||
|
ab9b9c6ae9 | ||
|
de4825ff70 | ||
e285bf3351 | |||
|
58beb822ae | ||
|
afcbd3d356 | ||
|
a5832c553a | ||
|
fc4a6944d6 | ||
|
d6ac162764 | ||
|
a7623f7a5a | ||
|
f0baf76e24 | ||
|
f754a7f292 | ||
|
d92da10f13 | ||
e6027445c8 | |||
|
1d7321e3dc | ||
252af19cb3 | |||
|
2803c86816 | ||
|
359db66db2 | ||
0e74461b58 | |||
|
942420db34 | ||
b90b030054 | |||
8dba488747 | |||
a89d6d5336 | |||
|
37e701217e | ||
48baa2f338 | |||
ce0d895fc4 | |||
|
9d6dd16c1c | ||
0e197fd585 | |||
|
9797533a14 | ||
674ffe6445 | |||
6fd39b62a4 | |||
7121b2ce6f | |||
74f58c59c9 | |||
da5d3c21c2 | |||
7b44647bec | |||
2f4e35d83c | |||
80ac8ec34d | |||
8861dcc705 | |||
|
b365b32638 | ||
|
087df4848e | ||
39a5e90e78 | |||
2df48598ec | |||
|
d360de2125 | ||
|
0137b863fa | ||
|
d4db45ba05 | ||
|
e9a082b2fa | ||
|
975a7bd661 | ||
|
f907222162 | ||
|
7f5c2a51b1 | ||
|
c51a4111ec | ||
8c9420b445 | |||
|
87ef3539cb | ||
|
952cbb265b | ||
|
c68679535c | ||
|
ff428aaa21 | ||
|
969118c90f | ||
8250ecb577 | |||
33089cd159 | |||
|
b9755de618 | ||
4f1e3c6551 | |||
|
adfe297f92 | ||
|
1de67de11e | ||
|
41e7bd1e4d | ||
|
d24b5caa7e | ||
|
ae8968ce6e | ||
|
771a6feea3 | ||
2967885c9b | |||
|
f8036e12a1 | ||
|
2a03723047 | ||
|
422df4d050 | ||
|
7de980eb31 | ||
|
6c1ae48cc2 | ||
|
5e32636dbe | ||
|
c78ed5fb9e | ||
|
c1e8ceb7da | ||
62c137a130 | |||
|
71d5ca9942 | ||
|
2f2d687f49 | ||
|
400141599b | ||
8266f92b47 | |||
|
511728afdf | ||
|
d226cdd07f | ||
|
274d19a379 | ||
|
ce7932be7d | ||
|
ab7e2d7ea9 | ||
d687e263cc | |||
|
b554557440 | ||
|
894c3ce107 | ||
|
77cd620449 | ||
|
b1491413ff | ||
|
5a86410a59 | ||
|
237aaf8d09 | ||
|
547e9d2148 | ||
|
e000e49e16 | ||
|
0d59c5de12 | ||
|
3ef12f0cc5 | ||
dc6146fc46 | |||
c4a96aad71 | |||
80fe102901 | |||
|
3f838836a7 | ||
a35f5030ec | |||
0ebb855bc5 | |||
|
f7592ce839 | ||
|
1425d99934 | ||
|
40a47aebf8 | ||
|
5d8c1b6104 | ||
|
da209c6bf7 | ||
|
0f8a2c0cdb | ||
|
166c8aece3 | ||
|
77a92f31f4 | ||
|
3a181f2043 | ||
|
59fe984f03 | ||
|
05131edbc7 | ||
|
41ee34a95d | ||
1ca9588968 | |||
|
bf4b5914fd | ||
|
b9717bced2 | ||
e588c9469f | |||
024af07c65 | |||
|
981383274f | ||
|
1c1f33cbb2 | ||
|
c0788df9e3 | ||
|
1951a354ea | ||
|
ad14cf1a9f | ||
|
13abe0ef92 | ||
|
4933296bca | ||
|
f890714fa5 | ||
|
f214552ae1 | ||
|
873318916c | ||
|
5d1450b044 | ||
|
38eb46c3cd | ||
|
f85bf41b57 | ||
|
bb526ed483 | ||
|
50ea5608d3 | ||
|
5ea41e66dd | ||
78cdf8cd93 | |||
|
bd9810fb69 | ||
|
5e7a8ebfe3 | ||
|
9102934c3c | ||
|
279e7c0c3c | ||
|
bdddc6196d | ||
|
f0a1372dcb | ||
|
291a02d426 | ||
|
1e4bf15b1d | ||
|
523522d86d | ||
|
52c300d3f2 | ||
|
9b1bc48741 | ||
|
a18731d80f | ||
|
968a7a2f63 | ||
|
57116fbb15 | ||
|
dd6d1e203e | ||
|
e1184ce4c6 | ||
|
c049bf44d5 | ||
|
fde78ad9d2 | ||
|
7c80ae1e3f | ||
|
1b244bf1e4 | ||
|
ce28c25add | ||
|
4684e3024e | ||
|
8b0a186a23 | ||
|
8d4d7f8655 | ||
f772ae7c4d | |||
|
5423cf9580 | ||
|
2fe0ab3696 | ||
|
3545561d76 | ||
|
22aaf73f64 | ||
26ce453fc4 | |||
|
a38a9aa11b | ||
|
c55733f0eb | ||
2f09ef6c31 | |||
|
512dc44a89 | ||
|
237b1a2748 | ||
|
8cc4d0dc12 | ||
0fabe9c7b4 | |||
|
bb4f42ea8b | ||
|
202ba187bb | ||
|
26a06f2eb7 | ||
|
4409b1c8e9 | ||
|
e3aae7b231 | ||
|
e5c187504b | ||
|
8f22a26d26 | ||
c4d406f138 | |||
|
6bde8f1437 | ||
|
43675d771d | ||
|
3f2d169354 | ||
15e3909660 | |||
adc1801724 | |||
|
a77609197f | ||
|
55b4f2a091 | ||
|
065984e123 | ||
|
ccc1fe999d | ||
34cf06c9c1 | |||
|
f567ec2ab2 | ||
|
ef10fb8ae1 | ||
|
0bc487f8c4 | ||
|
9bbd1ee81b | ||
|
fe6c190745 | ||
|
fa4223a319 | ||
b43ab2915e | |||
|
96d58b6ad3 | ||
9dad9cdb5f | |||
|
58cf4cb09e | ||
ce47455b0f | |||
9a9bf37702 | |||
|
151741f70a | ||
|
4ba87b90fe | ||
|
d3a1fb816a | ||
|
bf30fc60a6 | ||
|
67c7ac8f70 | ||
4ec8954ab5 | |||
|
5daceac699 | ||
fb806086a7 | |||
|
82c448ca2b | ||
|
eba0094410 | ||
|
653c7ba384 | ||
|
60861897fc | ||
|
4da5ccd8cf | ||
|
0776af4c92 | ||
|
d505795239 | ||
|
50adfc9476 | ||
|
8b7011689f | ||
|
6190e9a132 | ||
|
31cd5b6f82 | ||
|
1843140b76 | ||
|
6d18b52e78 | ||
|
9e5f939098 | ||
|
2811408df3 | ||
|
e7b52d7930 | ||
|
ce54bce0c3 | ||
|
8b546bcdad | ||
|
71c66ca77f | ||
|
0d7b1de351 | ||
9c8746bcf2 | |||
|
af09adbc73 | ||
|
9c33dd4d81 | ||
|
842123f804 | ||
|
bab098efca | ||
|
d9bc86b504 | ||
|
4c9f2f1475 | ||
|
e2c0504f10 | ||
|
7ec54fe1a9 | ||
|
414efa33dc | ||
|
1758b9ff8c | ||
|
c7fd8fcf2d | ||
106feff9ba | |||
f655603614 | |||
4aef895e5e | |||
b31d510156 | |||
|
2115efec89 | ||
c4f7ff0ee9 | |||
|
fd86f6e8a4 | ||
0a4f0c890a | |||
|
3d025288dd | ||
|
2eabda6909 | ||
|
84c79af2d7 | ||
|
d7d4328f5a | ||
|
ee9bcdf6a5 | ||
|
6a18380222 | ||
|
afbbd408d3 | ||
129e6bab0f | |||
|
1130a168d0 | ||
d3f1e42f3e | |||
|
57a6c894bf | ||
64858e467d | |||
505a2aef5d | |||
|
d980dfc35d | ||
b4bf13d734 | |||
cc385df566 | |||
|
ebcd5e573d | ||
939bca122d | |||
|
7a3e91fbff | ||
|
604268c141 | ||
|
61b742aea1 | ||
25a3648dca | |||
79d1738322 | |||
|
b5bae4d6d9 | ||
|
d6f2db4a61 | ||
fa35feee18 | |||
|
39712f9e6b | ||
|
5597a2a289 | ||
|
905b111cad | ||
|
e9f1c31e18 | ||
|
4e0fc73a78 | ||
|
30b999c45b | ||
|
e505a9bc51 | ||
|
a1e7deccdc | ||
|
78955cf848 | ||
|
1a14deb044 | ||
|
d48820acd5 | ||
|
90b92d2509 | ||
|
cd5b234596 | ||
|
2802daaff2 | ||
|
60c560c08e | ||
|
3dcec41dd7 | ||
|
c65f0423f4 | ||
|
263d901a8f | ||
|
0a7f82d820 | ||
1a67f46dbe | |||
|
da2f8271d8 | ||
52bb558176 | |||
|
228a207eda | ||
547754929c | |||
|
b4a9a50b60 | ||
|
73179fad15 | ||
|
8e3f178f8f | ||
|
9888f678ff | ||
|
02461f4f01 | ||
|
4aae0240f6 | ||
|
d7cd60714f | ||
|
94d3466db1 | ||
|
04dd553f7f | ||
|
b05edbbc85 | ||
|
c9dcdd1e97 | ||
|
f7c79bafb1 | ||
|
7393b5881f | ||
|
65609af214 | ||
|
05e90eb380 | ||
|
ebc0b97eec | ||
|
a52644d757 | ||
|
1391409645 | ||
|
38142779b0 | ||
|
fe8fba98d3 | ||
|
574aa38f92 | ||
|
9fefeab0f1 | ||
|
e561da7dcd | ||
|
662aa5ea37 | ||
|
ea7306d1ad | ||
|
95d359f466 | ||
|
fff8492ffd | ||
|
df43ebb3c6 | ||
|
7f6c70a7e8 | ||
|
585a774139 | ||
|
f1377d8f3e | ||
|
bbaed768cb | ||
|
2cd7ab51b2 | ||
|
2bfbe13da2 | ||
|
5bf16e2ecb | ||
|
47c48dc0ee | ||
|
d94a9f3120 | ||
|
f2d959170e | ||
4e3c24b359 | |||
|
8cb1af3640 | ||
|
a6adcddfe2 | ||
|
c7becbfdd0 | ||
|
bd42a4d8d5 | ||
|
df9b2e6c88 | ||
|
bf6a57c414 | ||
|
e70fcf520c | ||
|
d90ae3effa | ||
|
80c4c5cd78 | ||
|
89e0e38e2a | ||
|
5f37e636ec | ||
|
f847caf300 | ||
|
4b4e3fa4c2 | ||
|
80f954a601 | ||
|
fe414cb3c5 | ||
85e5c41b32 | |||
|
e299833f13 | ||
|
509d9566b7 | ||
|
9ec587d460 | ||
|
edd87084c0 | ||
|
bbf1747979 | ||
|
b2591a795f | ||
46e21d6348 | |||
|
28310cbc9c | ||
|
8f0b6d5d82 | ||
03e575642c | |||
|
afa71603b0 | ||
|
64c61dddc1 | ||
|
a41412d95a | ||
|
bbcceff9ab | ||
9093403651 | |||
|
b302fd5d80 | ||
939f53bab0 | |||
|
1fd7620ddc | ||
|
6f284c5034 | ||
|
2d1ed2f348 | ||
|
3d65b4a2b2 | ||
|
a9179da18d | ||
|
da20bb1a0a | ||
|
6ae8aea83f | ||
|
dddcfec1d8 | ||
|
9b240a23e9 | ||
|
20239a3edc | ||
|
cf1608c703 | ||
|
a9184b9de3 | ||
|
a420b23840 | ||
|
76a9b5f94d | ||
|
b3a6dbfe77 | ||
|
12d4516974 | ||
|
afca01be00 | ||
|
a70836f570 | ||
|
68d3dd9fc5 | ||
|
b1fbae323c | ||
|
0bf07aa0b7 | ||
|
af355df7dd | ||
|
02affa4df7 | ||
|
d964e16746 | ||
|
5ac688985f | ||
|
54ad54c8b6 | ||
a801c7256c | |||
a87e24b623 | |||
b6e69d47cf | |||
|
0aa63060bc | ||
|
e62f0bd9ba | ||
|
9c3cbf56be | ||
|
a85e53636b | ||
|
9951596600 | ||
|
547fe125c9 | ||
|
9c73f82f3b | ||
|
87872cbea5 | ||
|
f50f53faaa | ||
|
07620c5dce | ||
|
269c03964d | ||
2f3cc65564 | |||
|
9cbff367f3 | ||
|
839f1ca5ff | ||
|
2958649d71 | ||
|
bd9552373e | ||
|
f77e4f2e40 | ||
|
64c4832c4a | ||
|
97b1408fb9 | ||
4ea80dec1c | |||
|
8fa5ab766f | ||
|
ca2f6c5c7a | ||
|
59373b1c5d | ||
|
757109c1c3 | ||
|
9ec1ac31cb | ||
|
f4c324e248 | ||
|
728ef34f84 | ||
|
82ecf63ada | ||
|
8359a60e8d | ||
|
bda427b394 | ||
|
7c66f6eb52 | ||
|
187f80f832 | ||
|
9ca06f3283 | ||
|
c5a59c65b6 | ||
821c6c1773 | |||
|
e2e58365e3 | ||
|
f3ae6bdef8 | ||
|
5efcb568d0 | ||
|
71f977139e | ||
|
ee669cde6e | ||
|
8fe48db29c | ||
|
3d4c1d2232 | ||
|
b944040b47 | ||
|
93a43acebb | ||
|
5648488bce | ||
|
0f35e0484d | ||
|
19704f2942 | ||
|
22898fa021 | ||
|
d573073ae6 | ||
|
329f243f1f | ||
|
328d6680bb | ||
|
98edfb95cc | ||
|
839470e41c | ||
d93d03e6ba | |||
|
4647113cc4 | ||
79e7f08c23 | |||
|
0b37625411 | ||
|
7a6380a773 | ||
|
b8a213f7fd | ||
|
fdd24dbdae | ||
|
93fd347409 | ||
|
728300903f | ||
|
c1bbe43ea0 | ||
|
b7d3525867 | ||
|
9b33107b39 | ||
|
f40c39f7f3 | ||
|
e9bee9ce12 | ||
|
7b5e7bee35 | ||
|
bf2c39c0bc | ||
|
af4f88c3c0 | ||
|
bd4ec186c6 | ||
|
814aba4700 | ||
|
ac81c7a21f | ||
|
98147074e3 | ||
e045f741b2 | |||
|
6a045dc253 | ||
|
59a49e8240 | ||
|
4526533078 | ||
|
10753190b7 | ||
|
2fe8792d85 | ||
|
fb7037b9c9 | ||
|
4bed3e6117 | ||
|
dab99f2c26 | ||
|
b49b66fea7 | ||
|
16158d295d | ||
|
4a893fcbae | ||
|
a4cc17976a | ||
|
3e988274a9 | ||
|
c252894586 | ||
|
5a5f89d029 | ||
f4043d6c0d | |||
|
1e805a1ee5 | ||
|
e8467e28a4 | ||
|
ca7e161674 | ||
|
f44baef8b1 | ||
|
01506a6305 | ||
|
f8b554b2e2 | ||
|
919275366c | ||
|
9ce64b6a59 | ||
8295dc111c | |||
93b61fa754 | |||
55c701d8aa | |||
|
f9c855540d | ||
|
69111fe01c | ||
|
044df9a204 | ||
|
f999292bbc | ||
|
2d07e2e1ce | ||
3d03f4436c | |||
|
6326caa4e3 | ||
|
49596b9cb6 | ||
|
e5d988dab2 | ||
|
1feb14d860 | ||
|
992c784d9f | ||
|
6e4b6a0da1 | ||
|
1b189ce460 | ||
d8040cefc1 |
@ -1,23 +0,0 @@
|
|||||||
# Changelog
|
|
||||||
|
|
||||||
{{ range .Versions -}}
|
|
||||||
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]({{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}){{ else }}{{ .Tag.Name }}{{ end }} ({{ datetime "2006-01-02" .Tag.Date }})
|
|
||||||
|
|
||||||
{{ range .CommitGroups -}}
|
|
||||||
### {{ .Title }}
|
|
||||||
|
|
||||||
{{ range .Commits -}}
|
|
||||||
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ (regexReplaceAll "(Co-\\w*-by.*)" .Subject "") | trim }}
|
|
||||||
{{ end }}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
{{- if .NoteGroups -}}
|
|
||||||
{{ range .NoteGroups -}}
|
|
||||||
### {{ .Title }}
|
|
||||||
|
|
||||||
{{ range .Notes }}
|
|
||||||
{{ .Body }}
|
|
||||||
{{ end }}
|
|
||||||
{{ end -}}
|
|
||||||
{{ end -}}
|
|
||||||
{{ end -}}
|
|
@ -1,25 +0,0 @@
|
|||||||
style: github
|
|
||||||
template: CHANGELOG.tpl.md
|
|
||||||
info:
|
|
||||||
title: CHANGELOG
|
|
||||||
repository_url: https://github.com/thegeeklab/ansible-later
|
|
||||||
options:
|
|
||||||
commit_groups:
|
|
||||||
title_maps:
|
|
||||||
feat: Features
|
|
||||||
fix: Bug Fixes
|
|
||||||
perf: Performance Improvements
|
|
||||||
refactor: Code Refactoring
|
|
||||||
chore: Others
|
|
||||||
test: Testing
|
|
||||||
ci: CI Pipeline
|
|
||||||
docs: Documentation
|
|
||||||
header:
|
|
||||||
pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$"
|
|
||||||
pattern_maps:
|
|
||||||
- Type
|
|
||||||
- Scope
|
|
||||||
- Subject
|
|
||||||
notes:
|
|
||||||
keywords:
|
|
||||||
- BREAKING CHANGE
|
|
@ -18,8 +18,9 @@ HostVars
|
|||||||
Rolesfile
|
Rolesfile
|
||||||
Makefile
|
Makefile
|
||||||
Jinja2
|
Jinja2
|
||||||
ANSIBLE([0-9]{4})
|
ANS([0-9]{3})
|
||||||
LINT([0-9]{4})
|
YML([0-9]{3})
|
||||||
SCM
|
SCM
|
||||||
bools
|
bools
|
||||||
Check[A-Z].+
|
Check[A-Z].+
|
||||||
|
(P|p)re-(C|c)ommit
|
||||||
|
510
.drone.jsonnet
510
.drone.jsonnet
@ -1,510 +0,0 @@
|
|||||||
local PythonVersion(pyversion='3.7') = {
|
|
||||||
name: 'python' + std.strReplace(pyversion, '.', '') + '-pytest',
|
|
||||||
image: 'python:' + pyversion,
|
|
||||||
environment: {
|
|
||||||
PY_COLORS: 1,
|
|
||||||
},
|
|
||||||
commands: [
|
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
|
||||||
'poetry config experimental.new-installer false',
|
|
||||||
'poetry install -E ansible-core',
|
|
||||||
'poetry run pytest',
|
|
||||||
'poetry version',
|
|
||||||
'poetry run ansible-later --help',
|
|
||||||
],
|
|
||||||
depends_on: [
|
|
||||||
'fetch',
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
local PipelineLint = {
|
|
||||||
kind: 'pipeline',
|
|
||||||
name: 'lint',
|
|
||||||
platform: {
|
|
||||||
os: 'linux',
|
|
||||||
arch: 'amd64',
|
|
||||||
},
|
|
||||||
steps: [
|
|
||||||
{
|
|
||||||
name: 'yapf',
|
|
||||||
image: 'python:3.10',
|
|
||||||
environment: {
|
|
||||||
PY_COLORS: 1,
|
|
||||||
},
|
|
||||||
commands: [
|
|
||||||
'git fetch -tq',
|
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
|
||||||
'poetry config experimental.new-installer false',
|
|
||||||
'poetry install',
|
|
||||||
'poetry run yapf -dr ./ansiblelater',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'flake8',
|
|
||||||
image: 'python:3.10',
|
|
||||||
environment: {
|
|
||||||
PY_COLORS: 1,
|
|
||||||
},
|
|
||||||
commands: [
|
|
||||||
'git fetch -tq',
|
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
|
||||||
'poetry install -E ansible-core',
|
|
||||||
'poetry run flake8 ./ansiblelater',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
trigger: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
local PipelineTest = {
|
|
||||||
kind: 'pipeline',
|
|
||||||
name: 'test',
|
|
||||||
platform: {
|
|
||||||
os: 'linux',
|
|
||||||
arch: 'amd64',
|
|
||||||
},
|
|
||||||
steps: [
|
|
||||||
{
|
|
||||||
name: 'fetch',
|
|
||||||
image: 'python:3.10',
|
|
||||||
commands: [
|
|
||||||
'git fetch -tq',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
PythonVersion(pyversion='3.7'),
|
|
||||||
PythonVersion(pyversion='3.8'),
|
|
||||||
PythonVersion(pyversion='3.9'),
|
|
||||||
PythonVersion(pyversion='3.10'),
|
|
||||||
{
|
|
||||||
name: 'codecov',
|
|
||||||
image: 'python:3.10',
|
|
||||||
environment: {
|
|
||||||
PY_COLORS: 1,
|
|
||||||
CODECOV_TOKEN: { from_secret: 'codecov_token' },
|
|
||||||
},
|
|
||||||
commands: [
|
|
||||||
'pip install codecov -qq',
|
|
||||||
'codecov --required -X gcov',
|
|
||||||
],
|
|
||||||
depends_on: [
|
|
||||||
'python37-pytest',
|
|
||||||
'python38-pytest',
|
|
||||||
'python39-pytest',
|
|
||||||
'python310-pytest',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
depends_on: [
|
|
||||||
'lint',
|
|
||||||
],
|
|
||||||
trigger: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
local PipelineSecurity = {
|
|
||||||
kind: 'pipeline',
|
|
||||||
name: 'security',
|
|
||||||
platform: {
|
|
||||||
os: 'linux',
|
|
||||||
arch: 'amd64',
|
|
||||||
},
|
|
||||||
steps: [
|
|
||||||
{
|
|
||||||
name: 'bandit',
|
|
||||||
image: 'python:3.10',
|
|
||||||
environment: {
|
|
||||||
PY_COLORS: 1,
|
|
||||||
},
|
|
||||||
commands: [
|
|
||||||
'git fetch -tq',
|
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
|
||||||
'poetry install -E ansible-core',
|
|
||||||
'poetry run bandit -r ./ansiblelater -x ./ansiblelater/test',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
depends_on: [
|
|
||||||
'test',
|
|
||||||
],
|
|
||||||
trigger: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
local PipelineBuildPackage = {
|
|
||||||
kind: 'pipeline',
|
|
||||||
name: 'build-package',
|
|
||||||
platform: {
|
|
||||||
os: 'linux',
|
|
||||||
arch: 'amd64',
|
|
||||||
},
|
|
||||||
steps: [
|
|
||||||
{
|
|
||||||
name: 'build',
|
|
||||||
image: 'python:3.10',
|
|
||||||
commands: [
|
|
||||||
'git fetch -tq',
|
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
|
||||||
'poetry build',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'checksum',
|
|
||||||
image: 'alpine',
|
|
||||||
commands: [
|
|
||||||
'cd dist/ && sha256sum * > ../sha256sum.txt',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'changelog-generate',
|
|
||||||
image: 'thegeeklab/git-chglog',
|
|
||||||
commands: [
|
|
||||||
'git fetch -tq',
|
|
||||||
'git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'changelog-format',
|
|
||||||
image: 'thegeeklab/alpine-tools',
|
|
||||||
commands: [
|
|
||||||
'prettier CHANGELOG.md',
|
|
||||||
'prettier -w CHANGELOG.md',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'publish-github',
|
|
||||||
image: 'plugins/github-release',
|
|
||||||
settings: {
|
|
||||||
overwrite: true,
|
|
||||||
api_key: { from_secret: 'github_token' },
|
|
||||||
files: ['dist/*', 'sha256sum.txt'],
|
|
||||||
title: '${DRONE_TAG}',
|
|
||||||
note: 'CHANGELOG.md',
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
ref: ['refs/tags/**'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'publish-pypi',
|
|
||||||
image: 'python:3.10',
|
|
||||||
commands: [
|
|
||||||
'git fetch -tq',
|
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
|
||||||
'poetry publish -n',
|
|
||||||
],
|
|
||||||
environment: {
|
|
||||||
POETRY_HTTP_BASIC_PYPI_USERNAME: { from_secret: 'pypi_username' },
|
|
||||||
POETRY_HTTP_BASIC_PYPI_PASSWORD: { from_secret: 'pypi_password' },
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
ref: ['refs/tags/**'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
depends_on: [
|
|
||||||
'security',
|
|
||||||
],
|
|
||||||
trigger: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
local PipelineBuildContainer(arch='amd64') = {
|
|
||||||
local build = if arch == 'arm' then [{
|
|
||||||
name: 'build',
|
|
||||||
image: 'python:3.10-alpine',
|
|
||||||
commands: [
|
|
||||||
'apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo',
|
|
||||||
'git fetch -tq',
|
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
|
||||||
'poetry build',
|
|
||||||
],
|
|
||||||
environment: {
|
|
||||||
CARGO_NET_GIT_FETCH_WITH_CLI: true,
|
|
||||||
},
|
|
||||||
}] else [{
|
|
||||||
name: 'build',
|
|
||||||
image: 'python:3.10',
|
|
||||||
commands: [
|
|
||||||
'git fetch -tq',
|
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
|
||||||
'poetry build',
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
|
|
||||||
kind: 'pipeline',
|
|
||||||
name: 'build-container-' + arch,
|
|
||||||
platform: {
|
|
||||||
os: 'linux',
|
|
||||||
arch: arch,
|
|
||||||
},
|
|
||||||
steps: build + [
|
|
||||||
{
|
|
||||||
name: 'dryrun',
|
|
||||||
image: 'thegeeklab/drone-docker:19',
|
|
||||||
settings: {
|
|
||||||
dry_run: true,
|
|
||||||
dockerfile: 'docker/Dockerfile.' + arch,
|
|
||||||
repo: 'thegeeklab/${DRONE_REPO_NAME}',
|
|
||||||
username: { from_secret: 'docker_username' },
|
|
||||||
password: { from_secret: 'docker_password' },
|
|
||||||
},
|
|
||||||
depends_on: ['build'],
|
|
||||||
when: {
|
|
||||||
ref: ['refs/pull/**'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'publish-dockerhub',
|
|
||||||
image: 'thegeeklab/drone-docker:19',
|
|
||||||
settings: {
|
|
||||||
auto_tag: true,
|
|
||||||
auto_tag_suffix: arch,
|
|
||||||
dockerfile: 'docker/Dockerfile.' + arch,
|
|
||||||
repo: 'thegeeklab/${DRONE_REPO_NAME}',
|
|
||||||
username: { from_secret: 'docker_username' },
|
|
||||||
password: { from_secret: 'docker_password' },
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**'],
|
|
||||||
},
|
|
||||||
depends_on: ['dryrun'],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'publish-quay',
|
|
||||||
image: 'thegeeklab/drone-docker:19',
|
|
||||||
settings: {
|
|
||||||
auto_tag: true,
|
|
||||||
auto_tag_suffix: arch,
|
|
||||||
dockerfile: 'docker/Dockerfile.' + arch,
|
|
||||||
registry: 'quay.io',
|
|
||||||
repo: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
|
|
||||||
username: { from_secret: 'quay_username' },
|
|
||||||
password: { from_secret: 'quay_password' },
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**'],
|
|
||||||
},
|
|
||||||
depends_on: ['dryrun'],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
depends_on: [
|
|
||||||
'security',
|
|
||||||
],
|
|
||||||
trigger: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
local PipelineDocs = {
|
|
||||||
kind: 'pipeline',
|
|
||||||
name: 'docs',
|
|
||||||
platform: {
|
|
||||||
os: 'linux',
|
|
||||||
arch: 'amd64',
|
|
||||||
},
|
|
||||||
concurrency: {
|
|
||||||
limit: 1,
|
|
||||||
},
|
|
||||||
steps: [
|
|
||||||
{
|
|
||||||
name: 'assets',
|
|
||||||
image: 'thegeeklab/alpine-tools',
|
|
||||||
commands: [
|
|
||||||
'make doc',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'markdownlint',
|
|
||||||
image: 'thegeeklab/markdownlint-cli',
|
|
||||||
commands: [
|
|
||||||
"markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'spellcheck',
|
|
||||||
image: 'node:lts-alpine',
|
|
||||||
commands: [
|
|
||||||
'npm install -g spellchecker-cli',
|
|
||||||
"spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions",
|
|
||||||
],
|
|
||||||
environment: {
|
|
||||||
FORCE_COLOR: true,
|
|
||||||
NPM_CONFIG_LOGLEVEL: 'error',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'testbuild',
|
|
||||||
image: 'thegeeklab/hugo:0.83.1',
|
|
||||||
commands: [
|
|
||||||
'hugo -s docs/ -b http://localhost/',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'link-validation',
|
|
||||||
image: 'thegeeklab/link-validator',
|
|
||||||
commands: [
|
|
||||||
'link-validator -ro',
|
|
||||||
],
|
|
||||||
environment: {
|
|
||||||
LINK_VALIDATOR_BASE_DIR: 'docs/public',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'build',
|
|
||||||
image: 'thegeeklab/hugo:0.83.1',
|
|
||||||
commands: [
|
|
||||||
'hugo -s docs/',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'beautify',
|
|
||||||
image: 'node:lts-alpine',
|
|
||||||
commands: [
|
|
||||||
'npm install -g js-beautify',
|
|
||||||
"html-beautify -r -f 'docs/public/**/*.html'",
|
|
||||||
],
|
|
||||||
environment: {
|
|
||||||
FORCE_COLOR: true,
|
|
||||||
NPM_CONFIG_LOGLEVEL: 'error',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'publish',
|
|
||||||
image: 'plugins/s3-sync',
|
|
||||||
settings: {
|
|
||||||
access_key: { from_secret: 's3_access_key' },
|
|
||||||
bucket: 'geekdocs',
|
|
||||||
delete: true,
|
|
||||||
endpoint: 'https://sp.rknet.org',
|
|
||||||
path_style: true,
|
|
||||||
secret_key: { from_secret: 's3_secret_access_key' },
|
|
||||||
source: 'docs/public/',
|
|
||||||
strip_prefix: 'docs/public/',
|
|
||||||
target: '/${DRONE_REPO_NAME}',
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
depends_on: [
|
|
||||||
'build-package',
|
|
||||||
'build-container-amd64',
|
|
||||||
'build-container-arm64',
|
|
||||||
'build-container-arm',
|
|
||||||
],
|
|
||||||
trigger: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
local PipelineNotifications = {
|
|
||||||
kind: 'pipeline',
|
|
||||||
name: 'notifications',
|
|
||||||
platform: {
|
|
||||||
os: 'linux',
|
|
||||||
arch: 'amd64',
|
|
||||||
},
|
|
||||||
steps: [
|
|
||||||
{
|
|
||||||
image: 'plugins/manifest',
|
|
||||||
name: 'manifest-dockerhub',
|
|
||||||
settings: {
|
|
||||||
ignore_missing: true,
|
|
||||||
auto_tag: true,
|
|
||||||
username: { from_secret: 'docker_username' },
|
|
||||||
password: { from_secret: 'docker_password' },
|
|
||||||
spec: 'docker/manifest.tmpl',
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
status: ['success'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
image: 'plugins/manifest',
|
|
||||||
name: 'manifest-quay',
|
|
||||||
settings: {
|
|
||||||
ignore_missing: true,
|
|
||||||
auto_tag: true,
|
|
||||||
username: { from_secret: 'quay_username' },
|
|
||||||
password: { from_secret: 'quay_password' },
|
|
||||||
spec: 'docker/manifest-quay.tmpl',
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
status: ['success'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'pushrm-dockerhub',
|
|
||||||
pull: 'always',
|
|
||||||
image: 'chko/docker-pushrm:1',
|
|
||||||
environment: {
|
|
||||||
DOCKER_PASS: {
|
|
||||||
from_secret: 'docker_password',
|
|
||||||
},
|
|
||||||
DOCKER_USER: {
|
|
||||||
from_secret: 'docker_username',
|
|
||||||
},
|
|
||||||
PUSHRM_FILE: 'README.md',
|
|
||||||
PUSHRM_SHORT: 'Another best practice scanner for Ansible roles and playbooks',
|
|
||||||
PUSHRM_TARGET: 'thegeeklab/${DRONE_REPO_NAME}',
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
status: ['success'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'pushrm-quay',
|
|
||||||
pull: 'always',
|
|
||||||
image: 'chko/docker-pushrm:1',
|
|
||||||
environment: {
|
|
||||||
APIKEY__QUAY_IO: {
|
|
||||||
from_secret: 'quay_token',
|
|
||||||
},
|
|
||||||
PUSHRM_FILE: 'README.md',
|
|
||||||
PUSHRM_TARGET: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
status: ['success'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'matrix',
|
|
||||||
image: 'thegeeklab/drone-matrix',
|
|
||||||
settings: {
|
|
||||||
homeserver: { from_secret: 'matrix_homeserver' },
|
|
||||||
roomid: { from_secret: 'matrix_roomid' },
|
|
||||||
template: 'Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}',
|
|
||||||
username: { from_secret: 'matrix_username' },
|
|
||||||
password: { from_secret: 'matrix_password' },
|
|
||||||
},
|
|
||||||
when: {
|
|
||||||
status: ['success', 'failure'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
depends_on: [
|
|
||||||
'docs',
|
|
||||||
],
|
|
||||||
trigger: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**'],
|
|
||||||
status: ['success', 'failure'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
[
|
|
||||||
PipelineLint,
|
|
||||||
PipelineTest,
|
|
||||||
PipelineSecurity,
|
|
||||||
PipelineBuildPackage,
|
|
||||||
PipelineBuildContainer(arch='amd64'),
|
|
||||||
PipelineBuildContainer(arch='arm64'),
|
|
||||||
PipelineBuildContainer(arch='arm'),
|
|
||||||
PipelineDocs,
|
|
||||||
PipelineNotifications,
|
|
||||||
]
|
|
652
.drone.yml
652
.drone.yml
@ -1,652 +0,0 @@
|
|||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: lint
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: amd64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: yapf
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry config experimental.new-installer false
|
|
||||||
- poetry install
|
|
||||||
- poetry run yapf -dr ./ansiblelater
|
|
||||||
environment:
|
|
||||||
PY_COLORS: 1
|
|
||||||
|
|
||||||
- name: flake8
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run flake8 ./ansiblelater
|
|
||||||
environment:
|
|
||||||
PY_COLORS: 1
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
- refs/pull/**
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: test
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: amd64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: fetch
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
|
|
||||||
- name: python37-pytest
|
|
||||||
image: python:3.7
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry config experimental.new-installer false
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run pytest
|
|
||||||
- poetry version
|
|
||||||
- poetry run ansible-later --help
|
|
||||||
environment:
|
|
||||||
PY_COLORS: 1
|
|
||||||
depends_on:
|
|
||||||
- fetch
|
|
||||||
|
|
||||||
- name: python38-pytest
|
|
||||||
image: python:3.8
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry config experimental.new-installer false
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run pytest
|
|
||||||
- poetry version
|
|
||||||
- poetry run ansible-later --help
|
|
||||||
environment:
|
|
||||||
PY_COLORS: 1
|
|
||||||
depends_on:
|
|
||||||
- fetch
|
|
||||||
|
|
||||||
- name: python39-pytest
|
|
||||||
image: python:3.9
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry config experimental.new-installer false
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run pytest
|
|
||||||
- poetry version
|
|
||||||
- poetry run ansible-later --help
|
|
||||||
environment:
|
|
||||||
PY_COLORS: 1
|
|
||||||
depends_on:
|
|
||||||
- fetch
|
|
||||||
|
|
||||||
- name: python310-pytest
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry config experimental.new-installer false
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run pytest
|
|
||||||
- poetry version
|
|
||||||
- poetry run ansible-later --help
|
|
||||||
environment:
|
|
||||||
PY_COLORS: 1
|
|
||||||
depends_on:
|
|
||||||
- fetch
|
|
||||||
|
|
||||||
- name: codecov
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- pip install codecov -qq
|
|
||||||
- codecov --required -X gcov
|
|
||||||
environment:
|
|
||||||
CODECOV_TOKEN:
|
|
||||||
from_secret: codecov_token
|
|
||||||
PY_COLORS: 1
|
|
||||||
depends_on:
|
|
||||||
- python37-pytest
|
|
||||||
- python38-pytest
|
|
||||||
- python39-pytest
|
|
||||||
- python310-pytest
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
- refs/pull/**
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- lint
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: security
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: amd64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: bandit
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run bandit -r ./ansiblelater -x ./ansiblelater/test
|
|
||||||
environment:
|
|
||||||
PY_COLORS: 1
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
- refs/pull/**
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- test
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: build-package
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: amd64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry build
|
|
||||||
|
|
||||||
- name: checksum
|
|
||||||
image: alpine
|
|
||||||
commands:
|
|
||||||
- cd dist/ && sha256sum * > ../sha256sum.txt
|
|
||||||
|
|
||||||
- name: changelog-generate
|
|
||||||
image: thegeeklab/git-chglog
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
- git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}
|
|
||||||
|
|
||||||
- name: changelog-format
|
|
||||||
image: thegeeklab/alpine-tools
|
|
||||||
commands:
|
|
||||||
- prettier CHANGELOG.md
|
|
||||||
- prettier -w CHANGELOG.md
|
|
||||||
|
|
||||||
- name: publish-github
|
|
||||||
image: plugins/github-release
|
|
||||||
settings:
|
|
||||||
api_key:
|
|
||||||
from_secret: github_token
|
|
||||||
files:
|
|
||||||
- dist/*
|
|
||||||
- sha256sum.txt
|
|
||||||
note: CHANGELOG.md
|
|
||||||
overwrite: true
|
|
||||||
title: ${DRONE_TAG}
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/tags/**
|
|
||||||
|
|
||||||
- name: publish-pypi
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry publish -n
|
|
||||||
environment:
|
|
||||||
POETRY_HTTP_BASIC_PYPI_PASSWORD:
|
|
||||||
from_secret: pypi_password
|
|
||||||
POETRY_HTTP_BASIC_PYPI_USERNAME:
|
|
||||||
from_secret: pypi_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/tags/**
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
- refs/pull/**
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- security
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: build-container-amd64
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: amd64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry build
|
|
||||||
|
|
||||||
- name: dryrun
|
|
||||||
image: thegeeklab/drone-docker:19
|
|
||||||
settings:
|
|
||||||
dockerfile: docker/Dockerfile.amd64
|
|
||||||
dry_run: true
|
|
||||||
password:
|
|
||||||
from_secret: docker_password
|
|
||||||
repo: thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
username:
|
|
||||||
from_secret: docker_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/pull/**
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
- name: publish-dockerhub
|
|
||||||
image: thegeeklab/drone-docker:19
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
auto_tag_suffix: amd64
|
|
||||||
dockerfile: docker/Dockerfile.amd64
|
|
||||||
password:
|
|
||||||
from_secret: docker_password
|
|
||||||
repo: thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
username:
|
|
||||||
from_secret: docker_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
depends_on:
|
|
||||||
- dryrun
|
|
||||||
|
|
||||||
- name: publish-quay
|
|
||||||
image: thegeeklab/drone-docker:19
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
auto_tag_suffix: amd64
|
|
||||||
dockerfile: docker/Dockerfile.amd64
|
|
||||||
password:
|
|
||||||
from_secret: quay_password
|
|
||||||
registry: quay.io
|
|
||||||
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
username:
|
|
||||||
from_secret: quay_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
depends_on:
|
|
||||||
- dryrun
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
- refs/pull/**
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- security
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: build-container-arm64
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: arm64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
image: python:3.10
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry build
|
|
||||||
|
|
||||||
- name: dryrun
|
|
||||||
image: thegeeklab/drone-docker:19
|
|
||||||
settings:
|
|
||||||
dockerfile: docker/Dockerfile.arm64
|
|
||||||
dry_run: true
|
|
||||||
password:
|
|
||||||
from_secret: docker_password
|
|
||||||
repo: thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
username:
|
|
||||||
from_secret: docker_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/pull/**
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
- name: publish-dockerhub
|
|
||||||
image: thegeeklab/drone-docker:19
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
auto_tag_suffix: arm64
|
|
||||||
dockerfile: docker/Dockerfile.arm64
|
|
||||||
password:
|
|
||||||
from_secret: docker_password
|
|
||||||
repo: thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
username:
|
|
||||||
from_secret: docker_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
depends_on:
|
|
||||||
- dryrun
|
|
||||||
|
|
||||||
- name: publish-quay
|
|
||||||
image: thegeeklab/drone-docker:19
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
auto_tag_suffix: arm64
|
|
||||||
dockerfile: docker/Dockerfile.arm64
|
|
||||||
password:
|
|
||||||
from_secret: quay_password
|
|
||||||
registry: quay.io
|
|
||||||
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
username:
|
|
||||||
from_secret: quay_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
depends_on:
|
|
||||||
- dryrun
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
- refs/pull/**
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- security
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: build-container-arm
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: arm
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
image: python:3.10-alpine
|
|
||||||
commands:
|
|
||||||
- apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo
|
|
||||||
- git fetch -tq
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry build
|
|
||||||
environment:
|
|
||||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
|
||||||
|
|
||||||
- name: dryrun
|
|
||||||
image: thegeeklab/drone-docker:19
|
|
||||||
settings:
|
|
||||||
dockerfile: docker/Dockerfile.arm
|
|
||||||
dry_run: true
|
|
||||||
password:
|
|
||||||
from_secret: docker_password
|
|
||||||
repo: thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
username:
|
|
||||||
from_secret: docker_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/pull/**
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
- name: publish-dockerhub
|
|
||||||
image: thegeeklab/drone-docker:19
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
auto_tag_suffix: arm
|
|
||||||
dockerfile: docker/Dockerfile.arm
|
|
||||||
password:
|
|
||||||
from_secret: docker_password
|
|
||||||
repo: thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
username:
|
|
||||||
from_secret: docker_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
depends_on:
|
|
||||||
- dryrun
|
|
||||||
|
|
||||||
- name: publish-quay
|
|
||||||
image: thegeeklab/drone-docker:19
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
auto_tag_suffix: arm
|
|
||||||
dockerfile: docker/Dockerfile.arm
|
|
||||||
password:
|
|
||||||
from_secret: quay_password
|
|
||||||
registry: quay.io
|
|
||||||
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
username:
|
|
||||||
from_secret: quay_username
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
depends_on:
|
|
||||||
- dryrun
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
- refs/pull/**
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- security
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: docs
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: amd64
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
limit: 1
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: assets
|
|
||||||
image: thegeeklab/alpine-tools
|
|
||||||
commands:
|
|
||||||
- make doc
|
|
||||||
|
|
||||||
- name: markdownlint
|
|
||||||
image: thegeeklab/markdownlint-cli
|
|
||||||
commands:
|
|
||||||
- markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'
|
|
||||||
|
|
||||||
- name: spellcheck
|
|
||||||
image: node:lts-alpine
|
|
||||||
commands:
|
|
||||||
- npm install -g spellchecker-cli
|
|
||||||
- spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions
|
|
||||||
environment:
|
|
||||||
FORCE_COLOR: true
|
|
||||||
NPM_CONFIG_LOGLEVEL: error
|
|
||||||
|
|
||||||
- name: testbuild
|
|
||||||
image: thegeeklab/hugo:0.83.1
|
|
||||||
commands:
|
|
||||||
- hugo -s docs/ -b http://localhost/
|
|
||||||
|
|
||||||
- name: link-validation
|
|
||||||
image: thegeeklab/link-validator
|
|
||||||
commands:
|
|
||||||
- link-validator -ro
|
|
||||||
environment:
|
|
||||||
LINK_VALIDATOR_BASE_DIR: docs/public
|
|
||||||
|
|
||||||
- name: build
|
|
||||||
image: thegeeklab/hugo:0.83.1
|
|
||||||
commands:
|
|
||||||
- hugo -s docs/
|
|
||||||
|
|
||||||
- name: beautify
|
|
||||||
image: node:lts-alpine
|
|
||||||
commands:
|
|
||||||
- npm install -g js-beautify
|
|
||||||
- html-beautify -r -f 'docs/public/**/*.html'
|
|
||||||
environment:
|
|
||||||
FORCE_COLOR: true
|
|
||||||
NPM_CONFIG_LOGLEVEL: error
|
|
||||||
|
|
||||||
- name: publish
|
|
||||||
image: plugins/s3-sync
|
|
||||||
settings:
|
|
||||||
access_key:
|
|
||||||
from_secret: s3_access_key
|
|
||||||
bucket: geekdocs
|
|
||||||
delete: true
|
|
||||||
endpoint: https://sp.rknet.org
|
|
||||||
path_style: true
|
|
||||||
secret_key:
|
|
||||||
from_secret: s3_secret_access_key
|
|
||||||
source: docs/public/
|
|
||||||
strip_prefix: docs/public/
|
|
||||||
target: /${DRONE_REPO_NAME}
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
- refs/pull/**
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- build-package
|
|
||||||
- build-container-amd64
|
|
||||||
- build-container-arm64
|
|
||||||
- build-container-arm
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: notifications
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: amd64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: manifest-dockerhub
|
|
||||||
image: plugins/manifest
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
ignore_missing: true
|
|
||||||
password:
|
|
||||||
from_secret: docker_password
|
|
||||||
spec: docker/manifest.tmpl
|
|
||||||
username:
|
|
||||||
from_secret: docker_username
|
|
||||||
when:
|
|
||||||
status:
|
|
||||||
- success
|
|
||||||
|
|
||||||
- name: manifest-quay
|
|
||||||
image: plugins/manifest
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
ignore_missing: true
|
|
||||||
password:
|
|
||||||
from_secret: quay_password
|
|
||||||
spec: docker/manifest-quay.tmpl
|
|
||||||
username:
|
|
||||||
from_secret: quay_username
|
|
||||||
when:
|
|
||||||
status:
|
|
||||||
- success
|
|
||||||
|
|
||||||
- name: pushrm-dockerhub
|
|
||||||
pull: always
|
|
||||||
image: chko/docker-pushrm:1
|
|
||||||
environment:
|
|
||||||
DOCKER_PASS:
|
|
||||||
from_secret: docker_password
|
|
||||||
DOCKER_USER:
|
|
||||||
from_secret: docker_username
|
|
||||||
PUSHRM_FILE: README.md
|
|
||||||
PUSHRM_SHORT: Another best practice scanner for Ansible roles and playbooks
|
|
||||||
PUSHRM_TARGET: thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
when:
|
|
||||||
status:
|
|
||||||
- success
|
|
||||||
|
|
||||||
- name: pushrm-quay
|
|
||||||
pull: always
|
|
||||||
image: chko/docker-pushrm:1
|
|
||||||
environment:
|
|
||||||
APIKEY__QUAY_IO:
|
|
||||||
from_secret: quay_token
|
|
||||||
PUSHRM_FILE: README.md
|
|
||||||
PUSHRM_TARGET: quay.io/thegeeklab/${DRONE_REPO_NAME}
|
|
||||||
when:
|
|
||||||
status:
|
|
||||||
- success
|
|
||||||
|
|
||||||
- name: matrix
|
|
||||||
image: thegeeklab/drone-matrix
|
|
||||||
settings:
|
|
||||||
homeserver:
|
|
||||||
from_secret: matrix_homeserver
|
|
||||||
password:
|
|
||||||
from_secret: matrix_password
|
|
||||||
roomid:
|
|
||||||
from_secret: matrix_roomid
|
|
||||||
template: "Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}"
|
|
||||||
username:
|
|
||||||
from_secret: matrix_username
|
|
||||||
when:
|
|
||||||
status:
|
|
||||||
- success
|
|
||||||
- failure
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
status:
|
|
||||||
- success
|
|
||||||
- failure
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- docs
|
|
||||||
|
|
||||||
---
|
|
||||||
kind: signature
|
|
||||||
hmac: 978f8129485afdd50c04d9cf48b9cb7835ea4152d4688004d887827260fc7426
|
|
||||||
|
|
||||||
...
|
|
10
.github/settings.yml
vendored
10
.github/settings.yml
vendored
@ -1,7 +1,6 @@
|
|||||||
repository:
|
repository:
|
||||||
name: ansible-later
|
name: ansible-later
|
||||||
description: Another best practice scanner for Ansible roles and playbooks
|
description: Another best practice scanner for Ansible roles and playbooks
|
||||||
homepage: https://ansible-later.geekdocs.de
|
|
||||||
topics: ansible, ansible-later, ansible-review, best practice
|
topics: ansible, ansible-later, ansible-review, best practice
|
||||||
|
|
||||||
private: false
|
private: false
|
||||||
@ -52,6 +51,11 @@ branches:
|
|||||||
required_status_checks:
|
required_status_checks:
|
||||||
strict: false
|
strict: false
|
||||||
contexts:
|
contexts:
|
||||||
- continuous-integration/drone/pr
|
- ci/woodpecker/pr/lint
|
||||||
enforce_admins: null
|
- ci/woodpecker/pr/test
|
||||||
|
- ci/woodpecker/pr/build-package
|
||||||
|
- ci/woodpecker/pr/build-container
|
||||||
|
- ci/woodpecker/pr/docs
|
||||||
|
enforce_admins: false
|
||||||
|
required_linear_history: true
|
||||||
restrictions: null
|
restrictions: null
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -106,6 +106,8 @@ pip-wheel-metadata
|
|||||||
docs/themes/
|
docs/themes/
|
||||||
docs/public/
|
docs/public/
|
||||||
resources/_gen/
|
resources/_gen/
|
||||||
|
.hugo_build.lock
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
CHANGELOG.md
|
CHANGELOG.md
|
||||||
|
.ruff_cache
|
||||||
|
47
.gitsv/config.yml
Normal file
47
.gitsv/config.yml
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
---
|
||||||
|
version: "1.1"
|
||||||
|
|
||||||
|
versioning:
|
||||||
|
update-major: []
|
||||||
|
update-minor: [feat]
|
||||||
|
update-patch: [fix, perf, refactor, chore, test, ci, docs]
|
||||||
|
|
||||||
|
tag:
|
||||||
|
pattern: "v%d.%d.%d"
|
||||||
|
|
||||||
|
release-notes:
|
||||||
|
sections:
|
||||||
|
- name: Features
|
||||||
|
commit-types: [feat]
|
||||||
|
section-type: commits
|
||||||
|
- name: Bug Fixes
|
||||||
|
commit-types: [fix]
|
||||||
|
section-type: commits
|
||||||
|
- name: Performance Improvements
|
||||||
|
commit-types: [perf]
|
||||||
|
section-type: commits
|
||||||
|
- name: Code Refactoring
|
||||||
|
commit-types: [refactor]
|
||||||
|
section-type: commits
|
||||||
|
- name: Others
|
||||||
|
commit-types: [chore]
|
||||||
|
section-type: commits
|
||||||
|
- name: Testing
|
||||||
|
commit-types: [test]
|
||||||
|
section-type: commits
|
||||||
|
- name: CI Pipeline
|
||||||
|
commit-types: [ci]
|
||||||
|
section-type: commits
|
||||||
|
- name: Documentation
|
||||||
|
commit-types: [docs]
|
||||||
|
section-type: commits
|
||||||
|
- name: Breaking Changes
|
||||||
|
section-type: breaking-changes
|
||||||
|
|
||||||
|
commit-message:
|
||||||
|
footer:
|
||||||
|
issue:
|
||||||
|
key: issue
|
||||||
|
add-value-prefix: "#"
|
||||||
|
issue:
|
||||||
|
regex: "#?[0-9]+"
|
1
.lycheeignore
Normal file
1
.lycheeignore
Normal file
@ -0,0 +1 @@
|
|||||||
|
https://hub.docker.com/r/thegeeklab/*
|
10
.pre-commit-hooks.yaml
Normal file
10
.pre-commit-hooks.yaml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
---
|
||||||
|
- id: ansible-later
|
||||||
|
name: ansible-later
|
||||||
|
description: Run ansible-later, a best-practice scanner for Ansible.
|
||||||
|
entry: ansible-later
|
||||||
|
language: python
|
||||||
|
pass_filenames: False
|
||||||
|
always_run: True
|
||||||
|
additional_dependencies:
|
||||||
|
- .[ansible-core]
|
@ -1,2 +1,2 @@
|
|||||||
.drone.yml
|
|
||||||
*.tpl.md
|
*.tpl.md
|
||||||
|
LICENSE
|
||||||
|
82
.woodpecker/build-container.yml
Normal file
82
.woodpecker/build-container.yml
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry build
|
||||||
|
|
||||||
|
- name: security-build
|
||||||
|
image: quay.io/thegeeklab/wp-docker-buildx:5
|
||||||
|
depends_on: [build]
|
||||||
|
settings:
|
||||||
|
containerfile: Containerfile.multiarch
|
||||||
|
output: type=oci,dest=oci/${CI_REPO_NAME},tar=false
|
||||||
|
repo: ${CI_REPO}
|
||||||
|
|
||||||
|
- name: security-scan
|
||||||
|
image: docker.io/aquasec/trivy
|
||||||
|
depends_on: [security-build]
|
||||||
|
commands:
|
||||||
|
- trivy -v
|
||||||
|
- trivy image --input oci/${CI_REPO_NAME}
|
||||||
|
environment:
|
||||||
|
TRIVY_EXIT_CODE: "1"
|
||||||
|
TRIVY_IGNORE_UNFIXED: "true"
|
||||||
|
TRIVY_NO_PROGRESS: "true"
|
||||||
|
TRIVY_SEVERITY: HIGH,CRITICAL
|
||||||
|
TRIVY_TIMEOUT: 1m
|
||||||
|
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2
|
||||||
|
|
||||||
|
- name: publish-dockerhub
|
||||||
|
image: quay.io/thegeeklab/wp-docker-buildx:5
|
||||||
|
depends_on: [security-scan]
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
containerfile: Containerfile.multiarch
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
platforms:
|
||||||
|
- linux/amd64
|
||||||
|
- linux/arm64
|
||||||
|
provenance: false
|
||||||
|
repo: ${CI_REPO}
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
- name: publish-quay
|
||||||
|
image: quay.io/thegeeklab/wp-docker-buildx:5
|
||||||
|
depends_on: security-scan
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
containerfile: Containerfile.multiarch
|
||||||
|
password:
|
||||||
|
from_secret: quay_password
|
||||||
|
platforms:
|
||||||
|
- linux/amd64
|
||||||
|
- linux/arm64
|
||||||
|
provenance: false
|
||||||
|
registry: quay.io
|
||||||
|
repo: quay.io/${CI_REPO}
|
||||||
|
username:
|
||||||
|
from_secret: quay_username
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- lint
|
||||||
|
- test
|
56
.woodpecker/build-package.yml
Normal file
56
.woodpecker/build-package.yml
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry build
|
||||||
|
|
||||||
|
- name: checksum
|
||||||
|
image: quay.io/thegeeklab/alpine-tools
|
||||||
|
commands:
|
||||||
|
- cd dist/ && sha256sum * > ../sha256sum.txt
|
||||||
|
|
||||||
|
- name: changelog
|
||||||
|
image: quay.io/thegeeklab/git-sv
|
||||||
|
commands:
|
||||||
|
- git sv current-version
|
||||||
|
- git sv release-notes -t ${CI_COMMIT_TAG:-next} -o CHANGELOG.md
|
||||||
|
- cat CHANGELOG.md
|
||||||
|
|
||||||
|
- name: publish-github
|
||||||
|
image: docker.io/plugins/github-release
|
||||||
|
settings:
|
||||||
|
api_key:
|
||||||
|
from_secret: github_token
|
||||||
|
files:
|
||||||
|
- dist/*
|
||||||
|
- sha256sum.txt
|
||||||
|
note: CHANGELOG.md
|
||||||
|
overwrite: true
|
||||||
|
title: ${CI_COMMIT_TAG}
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
||||||
|
|
||||||
|
- name: publish-pypi
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
environment:
|
||||||
|
POETRY_HTTP_BASIC_PYPI_PASSWORD:
|
||||||
|
from_secret: pypi_password
|
||||||
|
POETRY_HTTP_BASIC_PYPI_USERNAME:
|
||||||
|
from_secret: pypi_username
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry publish -n
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- lint
|
||||||
|
- test
|
101
.woodpecker/docs.yml
Normal file
101
.woodpecker/docs.yml
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: assets
|
||||||
|
image: quay.io/thegeeklab/alpine-tools
|
||||||
|
commands:
|
||||||
|
- make doc
|
||||||
|
|
||||||
|
- name: markdownlint
|
||||||
|
image: quay.io/thegeeklab/markdownlint-cli
|
||||||
|
depends_on: [assets]
|
||||||
|
commands:
|
||||||
|
- markdownlint 'README.md' 'CONTRIBUTING.md'
|
||||||
|
|
||||||
|
- name: spellcheck
|
||||||
|
image: quay.io/thegeeklab/alpine-tools
|
||||||
|
depends_on: [assets]
|
||||||
|
commands:
|
||||||
|
- spellchecker --files 'docs/**/*.md' 'README.md' 'CONTRIBUTING.md' -d .dictionary -p spell indefinite-article syntax-urls
|
||||||
|
environment:
|
||||||
|
FORCE_COLOR: "true"
|
||||||
|
|
||||||
|
- name: link-validation
|
||||||
|
image: docker.io/lycheeverse/lychee
|
||||||
|
depends_on: [assets]
|
||||||
|
commands:
|
||||||
|
- lychee --no-progress --format detailed docs/content README.md
|
||||||
|
|
||||||
|
- name: build
|
||||||
|
image: quay.io/thegeeklab/hugo:0.136.5
|
||||||
|
depends_on: [link-validation]
|
||||||
|
commands:
|
||||||
|
- hugo --panicOnWarning -s docs/
|
||||||
|
|
||||||
|
- name: beautify
|
||||||
|
image: quay.io/thegeeklab/alpine-tools
|
||||||
|
depends_on: [build]
|
||||||
|
commands:
|
||||||
|
- html-beautify -r -f 'docs/public/**/*.html'
|
||||||
|
|
||||||
|
- name: publish
|
||||||
|
image: quay.io/thegeeklab/wp-s3-action
|
||||||
|
depends_on: [beautify]
|
||||||
|
settings:
|
||||||
|
access_key:
|
||||||
|
from_secret: s3_access_key
|
||||||
|
bucket: geekdocs
|
||||||
|
delete: true
|
||||||
|
endpoint:
|
||||||
|
from_secret: s3_endpoint
|
||||||
|
path_style: true
|
||||||
|
secret_key:
|
||||||
|
from_secret: s3_secret_access_key
|
||||||
|
source: docs/public/
|
||||||
|
strip_prefix: docs/public/
|
||||||
|
target: /${CI_REPO_NAME}
|
||||||
|
when:
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
status: [success, failure]
|
||||||
|
|
||||||
|
- name: pushrm-dockerhub
|
||||||
|
image: docker.io/chko/docker-pushrm:1
|
||||||
|
depends_on: [publish]
|
||||||
|
environment:
|
||||||
|
DOCKER_PASS:
|
||||||
|
from_secret: docker_password
|
||||||
|
DOCKER_USER:
|
||||||
|
from_secret: docker_username
|
||||||
|
PUSHRM_FILE: README.md
|
||||||
|
PUSHRM_SHORT: Another best practice scanner for Ansible roles and playbooks
|
||||||
|
PUSHRM_TARGET: ${CI_REPO}
|
||||||
|
when:
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
status: [success]
|
||||||
|
|
||||||
|
- name: pushrm-quay
|
||||||
|
image: docker.io/chko/docker-pushrm:1
|
||||||
|
depends_on: [publish]
|
||||||
|
environment:
|
||||||
|
APIKEY__QUAY_IO:
|
||||||
|
from_secret: quay_token
|
||||||
|
PUSHRM_FILE: README.md
|
||||||
|
PUSHRM_TARGET: quay.io/${CI_REPO}
|
||||||
|
when:
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
status: [success]
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- build-package
|
||||||
|
- build-container
|
27
.woodpecker/lint.yml
Normal file
27
.woodpecker/lint.yml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: check-format
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
depends_on: []
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry install
|
||||||
|
- poetry run ruff format --check --diff ./${CI_REPO_NAME//-/}
|
||||||
|
environment:
|
||||||
|
PY_COLORS: "1"
|
||||||
|
|
||||||
|
- name: check-coding
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
depends_on: []
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry install -E ansible-core
|
||||||
|
- poetry run ruff check ./${CI_REPO_NAME//-/}
|
||||||
|
environment:
|
||||||
|
PY_COLORS: "1"
|
26
.woodpecker/notify.yml
Normal file
26
.woodpecker/notify.yml
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
runs_on: [success, failure]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: matrix
|
||||||
|
image: quay.io/thegeeklab/wp-matrix
|
||||||
|
settings:
|
||||||
|
homeserver:
|
||||||
|
from_secret: matrix_homeserver
|
||||||
|
room_id:
|
||||||
|
from_secret: matrix_room_id
|
||||||
|
user_id:
|
||||||
|
from_secret: matrix_user_id
|
||||||
|
access_token:
|
||||||
|
from_secret: matrix_access_token
|
||||||
|
when:
|
||||||
|
- status: [success, failure]
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- docs
|
35
.woodpecker/test.yml
Normal file
35
.woodpecker/test.yml
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- &pytest_base
|
||||||
|
depends_on: []
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry install -E ansible-core
|
||||||
|
- poetry run pytest --cov-append
|
||||||
|
- poetry version
|
||||||
|
- poetry run ${CI_REPO_NAME} --help
|
||||||
|
environment:
|
||||||
|
PY_COLORS: "1"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: python-312
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
<<: *pytest_base
|
||||||
|
|
||||||
|
- name: python-311
|
||||||
|
image: docker.io/library/python:3.11
|
||||||
|
<<: *pytest_base
|
||||||
|
|
||||||
|
- name: python-310
|
||||||
|
image: docker.io/library/python:3.10
|
||||||
|
<<: *pytest_base
|
||||||
|
|
||||||
|
- name: python-39
|
||||||
|
image: docker.io/library/python:3.9
|
||||||
|
<<: *pytest_base
|
@ -3,7 +3,7 @@
|
|||||||
## Security
|
## Security
|
||||||
|
|
||||||
If you think you have found a **security issue**, please do not mention it in this repository.
|
If you think you have found a **security issue**, please do not mention it in this repository.
|
||||||
Instead, send an email to security@thegeeklab.de with as many details as possible so it can be handled confidential.
|
Instead, send an email to `security@thegeeklab.de` with as many details as possible so it can be handled confidential.
|
||||||
|
|
||||||
## Bug Reports and Feature Requests
|
## Bug Reports and Feature Requests
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM python:3.10-alpine@sha256:c13a6cf74fb452f9eab9f1a521f3ff6d056d5bc51b1a29bfe4758fcb00135394
|
FROM python:3.12-alpine@sha256:38e179a0f0436c97ecc76bcd378d7293ab3ee79e4b8c440fdc7113670cb6e204
|
||||||
|
|
||||||
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
|
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
|
||||||
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
|
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
|
||||||
@ -12,7 +12,7 @@ ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
|||||||
|
|
||||||
ADD dist/ansible_later-*.whl /
|
ADD dist/ansible_later-*.whl /
|
||||||
|
|
||||||
RUN apk --update add --virtual .build-deps build-base libffi-dev musl-dev openssl-dev python3-dev cargo && \
|
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev musl-dev python3-dev cargo && \
|
||||||
apk --update add git && \
|
apk --update add git && \
|
||||||
pip install --upgrade --no-cache-dir pip && \
|
pip install --upgrade --no-cache-dir pip && \
|
||||||
pip install --no-cache-dir $(find / -name "ansible_later-*.whl")[ansible] && \
|
pip install --no-cache-dir $(find / -name "ansible_later-*.whl")[ansible] && \
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2021 Robert Kaussow <mail@thegeeklab.de>
|
Copyright (c) 2022 Robert Kaussow <mail@thegeeklab.de>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
4
Makefile
4
Makefile
@ -1,5 +1,5 @@
|
|||||||
# renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc
|
# renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc
|
||||||
THEME_VERSION := v0.20.1
|
THEME_VERSION := v1.2.1
|
||||||
THEME := hugo-geekdoc
|
THEME := hugo-geekdoc
|
||||||
BASEDIR := docs
|
BASEDIR := docs
|
||||||
THEMEDIR := $(BASEDIR)/themes
|
THEMEDIR := $(BASEDIR)/themes
|
||||||
@ -17,4 +17,4 @@ doc-assets:
|
|||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean:
|
clean:
|
||||||
rm -rf $(THEMEDIR) && \
|
rm -rf $(THEMEDIR)
|
||||||
|
19
README.md
19
README.md
@ -2,36 +2,25 @@
|
|||||||
|
|
||||||
Another best practice scanner for Ansible roles and playbooks
|
Another best practice scanner for Ansible roles and playbooks
|
||||||
|
|
||||||
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-later?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-later)
|
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-later/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-later)
|
||||||
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later)
|
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later)
|
||||||
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later)
|
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later)
|
||||||
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
||||||
[![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
[![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
||||||
[![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
[![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
||||||
[![Codecov](https://img.shields.io/codecov/c/github/thegeeklab/ansible-later)](https://codecov.io/gh/thegeeklab/ansible-later)
|
|
||||||
[![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors)
|
[![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors)
|
||||||
[![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later)
|
[![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later)
|
||||||
[![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE)
|
[![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE)
|
||||||
|
|
||||||
|
> **Discontinued:** This project is no longer maintained. Please use [ansible-lint](https://github.com/ansible-community/ansible-lint) instead.
|
||||||
|
|
||||||
ansible-later is a best practice scanner and linting tool. In most cases, if you write Ansible roles in a team, it helps to have a coding or best practice guideline in place. This will make Ansible roles more readable for all maintainers and can reduce the troubleshooting time. While ansible-later aims to be a fast and easy to use linting tool for your Ansible resources, it might not be that feature completed as required in some situations. If you need a more in-depth analysis you can take a look at [ansible-lint](https://github.com/ansible-community/ansible-lint).
|
ansible-later is a best practice scanner and linting tool. In most cases, if you write Ansible roles in a team, it helps to have a coding or best practice guideline in place. This will make Ansible roles more readable for all maintainers and can reduce the troubleshooting time. While ansible-later aims to be a fast and easy to use linting tool for your Ansible resources, it might not be that feature completed as required in some situations. If you need a more in-depth analysis you can take a look at [ansible-lint](https://github.com/ansible-community/ansible-lint).
|
||||||
|
|
||||||
ansible-later does **not** ensure that your role will work as expected. For deployment tests you can use other tools like [molecule](https://github.com/ansible/molecule).
|
ansible-later does **not** ensure that your role will work as expected. For deployment tests you can use other tools like [molecule](https://github.com/ansible/molecule).
|
||||||
|
|
||||||
You can find the full documentation at [https://ansible-later.geekdocs.de](https://ansible-later.geekdocs.de/).
|
|
||||||
|
|
||||||
## Community
|
|
||||||
|
|
||||||
<!-- prettier-ignore-start -->
|
|
||||||
<!-- spellchecker-disable -->
|
|
||||||
|
|
||||||
- [GitHub Action](https://github.com/patrickjahns/ansible-later-action) by [@patrickjahns](https://github.com/patrickjahns)
|
|
||||||
|
|
||||||
<!-- spellchecker-enable -->
|
|
||||||
<!-- prettier-ignore-end -->
|
|
||||||
|
|
||||||
## Contributors
|
## Contributors
|
||||||
|
|
||||||
Special thanks goes to all [contributors](https://github.com/thegeeklab/ansible-later/graphs/contributors). If you would like to contribute,
|
Special thanks to all [contributors](https://github.com/thegeeklab/ansible-later/graphs/contributors). If you would like to contribute,
|
||||||
please see the [instructions](https://github.com/thegeeklab/ansible-later/blob/main/CONTRIBUTING.md).
|
please see the [instructions](https://github.com/thegeeklab/ansible-later/blob/main/CONTRIBUTING.md).
|
||||||
|
|
||||||
ansible-later is a fork of Will Thames [ansible-review](https://github.com/willthames/ansible-review). Thanks for your work on ansible-review and ansible-lint.
|
ansible-later is a fork of Will Thames [ansible-review](https://github.com/willthames/ansible-review). Thanks for your work on ansible-review and ansible-lint.
|
||||||
|
@ -5,12 +5,10 @@ import argparse
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from ansiblelater import LOG
|
from ansiblelater import LOG, __version__, logger
|
||||||
from ansiblelater import __version__
|
|
||||||
from ansiblelater import logger
|
|
||||||
from ansiblelater.candidate import Candidate
|
from ansiblelater.candidate import Candidate
|
||||||
|
from ansiblelater.rule import SingleRules
|
||||||
from ansiblelater.settings import Settings
|
from ansiblelater.settings import Settings
|
||||||
from ansiblelater.standard import SingleStandards
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -24,33 +22,33 @@ def main():
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-r",
|
"-r",
|
||||||
"--rules-dir",
|
"--rules-dir",
|
||||||
dest="rules.standards",
|
dest="rules.dir",
|
||||||
metavar="RULES",
|
metavar="DIR",
|
||||||
action="append",
|
action="append",
|
||||||
help="directory of standard rules"
|
help="directory of rules",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-B",
|
"-B",
|
||||||
"--no-buildin",
|
"--no-builtin",
|
||||||
dest="rules.buildin",
|
dest="rules.builtin",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
help="disables build-in standard rules"
|
help="disables built-in rules",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-s",
|
"-i",
|
||||||
"--standards",
|
"--include-rules",
|
||||||
dest="rules.filter",
|
dest="rules.include_filter",
|
||||||
metavar="FILTER",
|
metavar="TAGS",
|
||||||
action="append",
|
action="append",
|
||||||
help="limit standards to given ID's"
|
help="limit rules to given id/tags",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-x",
|
"-x",
|
||||||
"--exclude-standards",
|
"--exclude-rules",
|
||||||
dest="rules.exclude_filter",
|
dest="rules.exclude_filter",
|
||||||
metavar="EXCLUDE_FILTER",
|
metavar="TAGS",
|
||||||
action="append",
|
action="append",
|
||||||
help="exclude standards by given ID's"
|
help="exclude rules by given it/tags",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level"
|
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level"
|
||||||
@ -59,9 +57,7 @@ def main():
|
|||||||
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level"
|
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level"
|
||||||
)
|
)
|
||||||
parser.add_argument("rules.files", nargs="*")
|
parser.add_argument("rules.files", nargs="*")
|
||||||
parser.add_argument(
|
parser.add_argument("-V", "--version", action="version", version=f"%(prog)s {__version__}")
|
||||||
"-V", "--version", action="version", version="%(prog)s {}".format(__version__)
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args().__dict__
|
args = parser.parse_args().__dict__
|
||||||
|
|
||||||
@ -69,7 +65,7 @@ def main():
|
|||||||
config = settings.config
|
config = settings.config
|
||||||
|
|
||||||
logger.update_logger(LOG, config["logging"]["level"], config["logging"]["json"])
|
logger.update_logger(LOG, config["logging"]["level"], config["logging"]["json"])
|
||||||
SingleStandards(config["rules"]["standards"]).rules
|
SingleRules(config["rules"]["dir"])
|
||||||
|
|
||||||
workers = max(multiprocessing.cpu_count() - 2, 2)
|
workers = max(multiprocessing.cpu_count() - 2, 2)
|
||||||
p = multiprocessing.Pool(workers)
|
p = multiprocessing.Pool(workers)
|
||||||
@ -78,25 +74,22 @@ def main():
|
|||||||
candidate = Candidate.classify(filename, settings)
|
candidate = Candidate.classify(filename, settings)
|
||||||
if candidate:
|
if candidate:
|
||||||
if candidate.binary:
|
if candidate.binary:
|
||||||
LOG.info("Not reviewing binary file {name}".format(name=filename))
|
LOG.info(f"Not reviewing binary file {filename}")
|
||||||
continue
|
continue
|
||||||
if candidate.vault:
|
if candidate.vault:
|
||||||
LOG.info("Not reviewing vault file {name}".format(name=filename))
|
LOG.info(f"Not reviewing vault file {filename}")
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
LOG.info("Reviewing all of {candidate}".format(candidate=candidate))
|
|
||||||
tasks.append(candidate)
|
|
||||||
else:
|
|
||||||
LOG.info("Couldn't classify file {name}".format(name=filename))
|
|
||||||
|
|
||||||
errors = (sum(p.map(_review_wrapper, tasks)))
|
LOG.info(f"Reviewing all of {candidate}")
|
||||||
|
tasks.append(candidate)
|
||||||
|
else:
|
||||||
|
LOG.info(f"Couldn't classify file {filename}")
|
||||||
|
|
||||||
|
errors = sum(p.map(_review_wrapper, tasks))
|
||||||
p.close()
|
p.close()
|
||||||
p.join()
|
p.join()
|
||||||
|
|
||||||
if not errors == 0:
|
return_code = 1 if errors != 0 else 0
|
||||||
return_code = 1
|
|
||||||
else:
|
|
||||||
return_code = 0
|
|
||||||
|
|
||||||
sys.exit(return_code)
|
sys.exit(return_code)
|
||||||
|
|
||||||
|
@ -3,19 +3,15 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import copy
|
import copy
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
from distutils.version import LooseVersion
|
|
||||||
|
|
||||||
from ansible.plugins.loader import module_loader
|
from ansible.plugins.loader import module_loader
|
||||||
|
|
||||||
from ansiblelater import LOG
|
from ansiblelater import LOG
|
||||||
from ansiblelater import utils
|
|
||||||
from ansiblelater.logger import flag_extra
|
from ansiblelater.logger import flag_extra
|
||||||
from ansiblelater.standard import SingleStandards
|
from ansiblelater.rule import RuleBase, SingleRules
|
||||||
from ansiblelater.standard import StandardBase
|
|
||||||
|
|
||||||
|
|
||||||
class Candidate(object):
|
class Candidate:
|
||||||
"""
|
"""
|
||||||
Meta object for all files which later has to process.
|
Meta object for all files which later has to process.
|
||||||
|
|
||||||
@ -23,12 +19,12 @@ class Candidate(object):
|
|||||||
bundled with necessary meta informations for rule processing.
|
bundled with necessary meta informations for rule processing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, filename, settings={}, standards=[]):
|
def __init__(self, filename, settings={}, rules=[]): # noqa
|
||||||
self.path = filename
|
self.path = filename
|
||||||
self.binary = False
|
self.binary = False
|
||||||
self.vault = False
|
self.vault = False
|
||||||
self.filetype = type(self).__name__.lower()
|
self.filemeta = type(self).__name__.lower()
|
||||||
self.expected_version = True
|
self.kind = type(self).__name__.lower()
|
||||||
self.faulty = False
|
self.faulty = False
|
||||||
self.config = settings.config
|
self.config = settings.config
|
||||||
self.settings = settings
|
self.settings = settings
|
||||||
@ -40,204 +36,127 @@ class Candidate(object):
|
|||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
self.binary = True
|
self.binary = True
|
||||||
|
|
||||||
def _get_version(self):
|
def _filter_rules(self):
|
||||||
path = self.path
|
target_rules = []
|
||||||
version = None
|
includes = self.config["rules"]["include_filter"]
|
||||||
config_version = self.config["rules"]["version"].strip()
|
|
||||||
|
|
||||||
if config_version:
|
|
||||||
version_config_re = re.compile(r"([\d.]+)")
|
|
||||||
match = version_config_re.match(config_version)
|
|
||||||
if match:
|
|
||||||
version = match.group(1)
|
|
||||||
|
|
||||||
if not self.binary:
|
|
||||||
if isinstance(self, RoleFile):
|
|
||||||
parentdir = os.path.dirname(os.path.abspath(self.path))
|
|
||||||
while parentdir != os.path.dirname(parentdir):
|
|
||||||
meta_file = os.path.join(parentdir, "meta", "main.yml")
|
|
||||||
if os.path.exists(meta_file):
|
|
||||||
path = meta_file
|
|
||||||
break
|
|
||||||
parentdir = os.path.dirname(parentdir)
|
|
||||||
|
|
||||||
version_file_re = re.compile(r"^# Standards:\s*([\d.]+)")
|
|
||||||
with codecs.open(path, mode="rb", encoding="utf-8") as f:
|
|
||||||
for line in f:
|
|
||||||
match = version_file_re.match(line)
|
|
||||||
if match:
|
|
||||||
version = match.group(1)
|
|
||||||
|
|
||||||
if not version:
|
|
||||||
version = utils.standards_latest(self.standards)
|
|
||||||
if self.expected_version:
|
|
||||||
if isinstance(self, RoleFile):
|
|
||||||
LOG.warning(
|
|
||||||
"{name} {path} is in a role that contains a meta/main.yml without a "
|
|
||||||
"declared standards version. "
|
|
||||||
"Using latest standards version {version}".format(
|
|
||||||
name=type(self).__name__, path=self.path, version=version
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
LOG.warning(
|
|
||||||
"{name} {path} does not present standards version. "
|
|
||||||
"Using latest standards version {version}".format(
|
|
||||||
name=type(self).__name__, path=self.path, version=version
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
LOG.info(
|
|
||||||
"{name} {path} declares standards version {version}".format(
|
|
||||||
name=type(self).__name__, path=self.path, version=version
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return version
|
|
||||||
|
|
||||||
def _filter_standards(self):
|
|
||||||
target_standards = []
|
|
||||||
includes = self.config["rules"]["filter"]
|
|
||||||
excludes = self.config["rules"]["exclude_filter"]
|
excludes = self.config["rules"]["exclude_filter"]
|
||||||
|
|
||||||
if len(includes) == 0:
|
if len(includes) == 0:
|
||||||
includes = [s.sid for s in self.standards]
|
includes = [s.rid for s in self.rules]
|
||||||
|
|
||||||
for standard in self.standards:
|
for rule in self.rules:
|
||||||
if standard.sid in includes and standard.sid not in excludes:
|
if rule.rid in includes and rule.rid not in excludes:
|
||||||
target_standards.append(standard)
|
target_rules.append(rule)
|
||||||
|
|
||||||
return target_standards
|
return target_rules
|
||||||
|
|
||||||
def review(self, lines=None):
|
def review(self):
|
||||||
errors = 0
|
errors = 0
|
||||||
self.standards = SingleStandards(self.config["rules"]["standards"]).rules
|
self.rules = SingleRules(self.config["rules"]["dir"]).rules
|
||||||
self.version = self._get_version()
|
|
||||||
|
|
||||||
for standard in self._filter_standards():
|
for rule in self._filter_rules():
|
||||||
if type(self).__name__.lower() not in standard.types:
|
if self.kind not in rule.types:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
result = standard.check(self, self.config)
|
result = rule.check(self, self.config)
|
||||||
|
|
||||||
if not result:
|
if not result:
|
||||||
LOG.error(
|
LOG.error(f"rule '{rule.rid}' returns an empty result object. Check failed!")
|
||||||
"Standard '{id}' returns an empty result object. Check failed!".format(
|
|
||||||
id=standard.sid
|
|
||||||
)
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
labels = {
|
labels = {
|
||||||
"tag": "review",
|
"tag": "review",
|
||||||
"standard": standard.description,
|
"rule": rule.description,
|
||||||
"file": self.path,
|
"file": self.path,
|
||||||
"passed": True
|
"passed": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
if standard.sid and standard.sid.strip():
|
if rule.rid and rule.rid.strip():
|
||||||
labels["sid"] = standard.sid
|
labels["rid"] = rule.rid
|
||||||
|
|
||||||
for err in result.errors:
|
for err in result.errors:
|
||||||
err_labels = copy.copy(labels)
|
err_labels = copy.copy(labels)
|
||||||
err_labels["passed"] = False
|
err_labels["passed"] = False
|
||||||
if isinstance(err, StandardBase.Error):
|
|
||||||
|
rid = self._format_id(rule.rid)
|
||||||
|
path = self.path
|
||||||
|
description = rule.description
|
||||||
|
|
||||||
|
if isinstance(err, RuleBase.Error):
|
||||||
err_labels.update(err.to_dict())
|
err_labels.update(err.to_dict())
|
||||||
|
|
||||||
if not standard.version:
|
msg = f"{rid}rule '{description}' not met:\n{path}:{err}"
|
||||||
LOG.warning(
|
|
||||||
"{sid}Best practice '{description}' not met:\n{path}:{error}".format(
|
|
||||||
sid=self._format_id(standard.sid),
|
|
||||||
description=standard.description,
|
|
||||||
path=self.path,
|
|
||||||
error=err
|
|
||||||
),
|
|
||||||
extra=flag_extra(err_labels)
|
|
||||||
)
|
|
||||||
elif LooseVersion(standard.version) > LooseVersion(self.version):
|
|
||||||
LOG.warning(
|
|
||||||
"{sid}Future standard '{description}' not met:\n{path}:{error}".format(
|
|
||||||
sid=self._format_id(standard.sid),
|
|
||||||
description=standard.description,
|
|
||||||
path=self.path,
|
|
||||||
error=err
|
|
||||||
),
|
|
||||||
extra=flag_extra(err_labels)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
msg = "{sid}Standard '{description}' not met:\n{path}:{error}".format(
|
|
||||||
sid=self._format_id(standard.sid),
|
|
||||||
description=standard.description,
|
|
||||||
path=self.path,
|
|
||||||
error=err
|
|
||||||
)
|
|
||||||
|
|
||||||
if standard.sid not in self.config["rules"]["warning_filter"]:
|
if rule.rid not in self.config["rules"]["warning_filter"]:
|
||||||
LOG.error(msg, extra=flag_extra(err_labels))
|
LOG.error(msg, extra=flag_extra(err_labels))
|
||||||
errors = errors + 1
|
errors = errors + 1
|
||||||
else:
|
else:
|
||||||
LOG.warning(msg, extra=flag_extra(err_labels))
|
LOG.warning(msg, extra=flag_extra(err_labels))
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def classify(filename, settings={}, standards=[]):
|
def classify(filename, settings={}, rules=[]): # noqa
|
||||||
parentdir = os.path.basename(os.path.dirname(filename))
|
parentdir = os.path.basename(os.path.dirname(filename))
|
||||||
basename = os.path.basename(filename)
|
basename = os.path.basename(filename)
|
||||||
|
ext = os.path.splitext(filename)[1][1:]
|
||||||
|
|
||||||
if parentdir in ["tasks"]:
|
if parentdir in ["tasks"]:
|
||||||
return Task(filename, settings, standards)
|
return Task(filename, settings, rules)
|
||||||
if parentdir in ["handlers"]:
|
if parentdir in ["handlers"]:
|
||||||
return Handler(filename, settings, standards)
|
return Handler(filename, settings, rules)
|
||||||
if parentdir in ["vars", "defaults"]:
|
if parentdir in ["vars", "defaults"]:
|
||||||
return RoleVars(filename, settings, standards)
|
return RoleVars(filename, settings, rules)
|
||||||
if "group_vars" in filename.split(os.sep):
|
if "group_vars" in filename.split(os.sep):
|
||||||
return GroupVars(filename, settings, standards)
|
return GroupVars(filename, settings, rules)
|
||||||
if "host_vars" in filename.split(os.sep):
|
if "host_vars" in filename.split(os.sep):
|
||||||
return HostVars(filename, settings, standards)
|
return HostVars(filename, settings, rules)
|
||||||
if parentdir in ["meta"] and "main" in basename:
|
if parentdir in ["meta"] and "main" in basename:
|
||||||
return Meta(filename, settings, standards)
|
return Meta(filename, settings, rules)
|
||||||
if parentdir in ["meta"] and "argument_specs" in basename:
|
if parentdir in ["meta"] and "argument_specs" in basename:
|
||||||
return ArgumentSpecs(filename, settings, standards)
|
return ArgumentSpecs(filename, settings, rules)
|
||||||
if (
|
if parentdir in [
|
||||||
parentdir in ["library", "lookup_plugins", "callback_plugins", "filter_plugins"]
|
"library",
|
||||||
or filename.endswith(".py")
|
"lookup_plugins",
|
||||||
):
|
"callback_plugins",
|
||||||
return Code(filename, settings, standards)
|
"filter_plugins",
|
||||||
|
] or filename.endswith(".py"):
|
||||||
|
return Code(filename, settings, rules)
|
||||||
if basename == "inventory" or basename == "hosts" or parentdir in ["inventories"]:
|
if basename == "inventory" or basename == "hosts" or parentdir in ["inventories"]:
|
||||||
return Inventory(filename, settings, standards)
|
return Inventory(filename, settings, rules)
|
||||||
if "rolesfile" in basename or "requirements" in basename:
|
if "rolesfile" in basename or ("requirements" in basename and ext in ["yaml", "yml"]):
|
||||||
return Rolesfile(filename, settings, standards)
|
return Rolesfile(filename, settings, rules)
|
||||||
if "Makefile" in basename:
|
if "Makefile" in basename:
|
||||||
return Makefile(filename, settings, standards)
|
return Makefile(filename, settings, rules)
|
||||||
if "templates" in filename.split(os.sep) or basename.endswith(".j2"):
|
if "templates" in filename.split(os.sep) or basename.endswith(".j2"):
|
||||||
return Template(filename, settings, standards)
|
return Template(filename, settings, rules)
|
||||||
if "files" in filename.split(os.sep):
|
if "files" in filename.split(os.sep):
|
||||||
return File(filename, settings, standards)
|
return File(filename, settings, rules)
|
||||||
if basename.endswith(".yml") or basename.endswith(".yaml"):
|
if basename.endswith(".yml") or basename.endswith(".yaml"):
|
||||||
return Playbook(filename, settings, standards)
|
return Playbook(filename, settings, rules)
|
||||||
if "README" in basename:
|
if "README" in basename:
|
||||||
return Doc(filename, settings, standards)
|
return Doc(filename, settings, rules)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _format_id(self, standard_id):
|
def _format_id(self, rule_id):
|
||||||
if standard_id and standard_id.strip():
|
rid = rule_id.strip()
|
||||||
standard_id = "[{id}] ".format(id=standard_id.strip())
|
if rid:
|
||||||
|
rule_id = f"[{rid}] "
|
||||||
|
|
||||||
return standard_id
|
return rule_id
|
||||||
|
|
||||||
def __repr__(self): # noqa
|
def __repr__(self):
|
||||||
return "{name} ({path})".format(name=type(self).__name__, path=self.path)
|
return f"{self.kind} ({self.path})"
|
||||||
|
|
||||||
def __getitem__(self, item): # noqa
|
def __getitem__(self, item):
|
||||||
return self.__dict__.get(item)
|
return self.__dict__.get(item)
|
||||||
|
|
||||||
|
|
||||||
class RoleFile(Candidate):
|
class RoleFile(Candidate):
|
||||||
"""Object classified as Ansible role file."""
|
"""Object classified as Ansible role file."""
|
||||||
|
|
||||||
def __init__(self, filename, settings={}, standards=[]):
|
def __init__(self, filename, settings={}, rules=[]): # noqa
|
||||||
super(RoleFile, self).__init__(filename, settings, standards)
|
super().__init__(filename, settings, rules)
|
||||||
|
|
||||||
parentdir = os.path.dirname(os.path.abspath(filename))
|
parentdir = os.path.dirname(os.path.abspath(filename))
|
||||||
while parentdir != os.path.dirname(parentdir):
|
while parentdir != os.path.dirname(parentdir):
|
||||||
@ -257,17 +176,17 @@ class Playbook(Candidate):
|
|||||||
class Task(RoleFile):
|
class Task(RoleFile):
|
||||||
"""Object classified as Ansible task file."""
|
"""Object classified as Ansible task file."""
|
||||||
|
|
||||||
def __init__(self, filename, settings={}, standards=[]):
|
def __init__(self, filename, settings={}, rules=[]): # noqa
|
||||||
super(Task, self).__init__(filename, settings, standards)
|
super().__init__(filename, settings, rules)
|
||||||
self.filetype = "tasks"
|
self.filemeta = "tasks"
|
||||||
|
|
||||||
|
|
||||||
class Handler(RoleFile):
|
class Handler(RoleFile):
|
||||||
"""Object classified as Ansible handler file."""
|
"""Object classified as Ansible handler file."""
|
||||||
|
|
||||||
def __init__(self, filename, settings={}, standards=[]):
|
def __init__(self, filename, settings={}, rules=[]): # noqa
|
||||||
super(Handler, self).__init__(filename, settings, standards)
|
super().__init__(filename, settings, rules)
|
||||||
self.filetype = "handlers"
|
self.filemeta = "handlers"
|
||||||
|
|
||||||
|
|
||||||
class Vars(Candidate):
|
class Vars(Candidate):
|
||||||
@ -276,15 +195,7 @@ class Vars(Candidate):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Unversioned(Candidate):
|
class InventoryVars(Candidate):
|
||||||
"""Object classified as unversioned file."""
|
|
||||||
|
|
||||||
def __init__(self, filename, settings={}, standards=[]):
|
|
||||||
super(Unversioned, self).__init__(filename, settings, standards)
|
|
||||||
self.expected_version = False
|
|
||||||
|
|
||||||
|
|
||||||
class InventoryVars(Unversioned):
|
|
||||||
"""Object classified as Ansible inventory vars."""
|
"""Object classified as Ansible inventory vars."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
@ -320,13 +231,13 @@ class ArgumentSpecs(RoleFile):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Inventory(Unversioned):
|
class Inventory(Candidate):
|
||||||
"""Object classified as Ansible inventory file."""
|
"""Object classified as Ansible inventory file."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Code(Unversioned):
|
class Code(Candidate):
|
||||||
"""Object classified as code file."""
|
"""Object classified as code file."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
@ -338,13 +249,13 @@ class Template(RoleFile):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Doc(Unversioned):
|
class Doc(Candidate):
|
||||||
"""Object classified as documentation file."""
|
"""Object classified as documentation file."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Makefile(Unversioned):
|
class Makefile(Candidate):
|
||||||
"""Object classified as makefile."""
|
"""Object classified as makefile."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
@ -356,7 +267,7 @@ class File(RoleFile):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Rolesfile(Unversioned):
|
class Rolesfile(Candidate):
|
||||||
"""Object classified as Ansible roles file."""
|
"""Object classified as Ansible roles file."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
@ -8,14 +8,14 @@ class LaterError(Exception):
|
|||||||
|
|
||||||
def __init__(self, msg, original):
|
def __init__(self, msg, original):
|
||||||
"""Initialize new exception."""
|
"""Initialize new exception."""
|
||||||
super(LaterError, self).__init__("{msg}: {org}".format(msg=msg, org=original))
|
super().__init__(f"{msg}: {original}")
|
||||||
self.original = original
|
self.original = original
|
||||||
|
|
||||||
|
|
||||||
class LaterAnsibleError(Exception):
|
class LaterAnsibleError(Exception):
|
||||||
"""Wrapper for ansible syntax errors."""
|
"""Wrapper for ansible syntax errors."""
|
||||||
|
|
||||||
def __init__(self, msg, original):
|
def __init__(self, original):
|
||||||
lines = original.message.splitlines()
|
lines = original.message.splitlines()
|
||||||
|
|
||||||
line_no = re.search("line(.*?),", lines[2])
|
line_no = re.search("line(.*?),", lines[2])
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from distutils.util import strtobool
|
|
||||||
|
|
||||||
import colorama
|
import colorama
|
||||||
from pythonjsonlogger import jsonlogger
|
from pythonjsonlogger import jsonlogger
|
||||||
@ -12,12 +11,35 @@ CONSOLE_FORMAT = "{}%(levelname)s:{} %(message)s"
|
|||||||
JSON_FORMAT = "%(asctime)s %(levelname)s %(message)s"
|
JSON_FORMAT = "%(asctime)s %(levelname)s %(message)s"
|
||||||
|
|
||||||
|
|
||||||
|
def strtobool(value):
|
||||||
|
"""Convert a string representation of truth to true or false."""
|
||||||
|
|
||||||
|
_map = {
|
||||||
|
"y": True,
|
||||||
|
"yes": True,
|
||||||
|
"t": True,
|
||||||
|
"true": True,
|
||||||
|
"on": True,
|
||||||
|
"1": True,
|
||||||
|
"n": False,
|
||||||
|
"no": False,
|
||||||
|
"f": False,
|
||||||
|
"false": False,
|
||||||
|
"off": False,
|
||||||
|
"0": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
return _map[str(value).lower()]
|
||||||
|
except KeyError as err:
|
||||||
|
raise ValueError(f'"{value}" is not a valid bool value') from err
|
||||||
|
|
||||||
|
|
||||||
def to_bool(string):
|
def to_bool(string):
|
||||||
return bool(strtobool(str(string)))
|
return bool(strtobool(str(string)))
|
||||||
|
|
||||||
|
|
||||||
def _should_do_markup():
|
def _should_do_markup():
|
||||||
|
|
||||||
py_colors = os.environ.get("PY_COLORS", None)
|
py_colors = os.environ.get("PY_COLORS", None)
|
||||||
if py_colors is not None:
|
if py_colors is not None:
|
||||||
return to_bool(py_colors)
|
return to_bool(py_colors)
|
||||||
@ -30,7 +52,7 @@ colorama.init(autoreset=True, strip=(not _should_do_markup()))
|
|||||||
|
|
||||||
def flag_extra(extra):
|
def flag_extra(extra):
|
||||||
"""Ensure extra args are prefixed."""
|
"""Ensure extra args are prefixed."""
|
||||||
flagged = dict()
|
flagged = {}
|
||||||
|
|
||||||
if isinstance(extra, dict):
|
if isinstance(extra, dict):
|
||||||
for key, value in extra.items():
|
for key, value in extra.items():
|
||||||
@ -39,7 +61,7 @@ def flag_extra(extra):
|
|||||||
return flagged
|
return flagged
|
||||||
|
|
||||||
|
|
||||||
class LogFilter(object):
|
class LogFilter:
|
||||||
"""A custom log filter which excludes log messages above the logged level."""
|
"""A custom log filter which excludes log messages above the logged level."""
|
||||||
|
|
||||||
def __init__(self, level):
|
def __init__(self, level):
|
||||||
@ -60,8 +82,8 @@ class LogFilter(object):
|
|||||||
class MultilineFormatter(logging.Formatter):
|
class MultilineFormatter(logging.Formatter):
|
||||||
"""Logging Formatter to reset color after newline characters."""
|
"""Logging Formatter to reset color after newline characters."""
|
||||||
|
|
||||||
def format(self, record): # noqa
|
def format(self, record):
|
||||||
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
|
record.msg = record.msg.replace("\n", f"\n{colorama.Style.RESET_ALL}... ")
|
||||||
record.msg = record.msg + "\n"
|
record.msg = record.msg + "\n"
|
||||||
return logging.Formatter.format(self, record)
|
return logging.Formatter.format(self, record)
|
||||||
|
|
||||||
@ -69,7 +91,7 @@ class MultilineFormatter(logging.Formatter):
|
|||||||
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
|
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
|
||||||
"""Logging Formatter to remove newline characters."""
|
"""Logging Formatter to remove newline characters."""
|
||||||
|
|
||||||
def format(self, record): # noqa
|
def format(self, record):
|
||||||
record.msg = record.msg.replace("\n", " ")
|
record.msg = record.msg.replace("\n", " ")
|
||||||
return jsonlogger.JsonFormatter.format(self, record)
|
return jsonlogger.JsonFormatter.format(self, record)
|
||||||
|
|
||||||
@ -185,4 +207,4 @@ def color_text(color, msg):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
msg = msg.format(colorama.Style.BRIGHT, colorama.Style.NORMAL)
|
msg = msg.format(colorama.Style.BRIGHT, colorama.Style.NORMAL)
|
||||||
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
|
return f"{color}{msg}{colorama.Style.RESET_ALL}"
|
||||||
|
@ -1,98 +1,90 @@
|
|||||||
"""Standard definition."""
|
"""Rule definition."""
|
||||||
|
|
||||||
import codecs
|
|
||||||
import copy
|
import copy
|
||||||
import importlib
|
import importlib
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
from abc import ABCMeta
|
from abc import ABCMeta, abstractmethod
|
||||||
from abc import abstractmethod
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import toolz
|
import toolz
|
||||||
import yaml
|
import yaml
|
||||||
from yamllint import linter
|
from yamllint import linter
|
||||||
from yamllint.config import YamlLintConfig
|
from yamllint.config import YamlLintConfig
|
||||||
|
|
||||||
from ansiblelater.exceptions import LaterAnsibleError
|
from ansiblelater.exceptions import LaterAnsibleError, LaterError
|
||||||
from ansiblelater.exceptions import LaterError
|
from ansiblelater.utils import Singleton, sysexit_with_message
|
||||||
from ansiblelater.utils import Singleton
|
from ansiblelater.utils.yamlhelper import (
|
||||||
from ansiblelater.utils import sysexit_with_message
|
UnsafeTag,
|
||||||
from ansiblelater.utils.yamlhelper import UnsafeTag
|
VaultTag,
|
||||||
from ansiblelater.utils.yamlhelper import VaultTag
|
action_tasks,
|
||||||
from ansiblelater.utils.yamlhelper import action_tasks
|
normalize_task,
|
||||||
from ansiblelater.utils.yamlhelper import normalize_task
|
normalized_yaml,
|
||||||
from ansiblelater.utils.yamlhelper import normalized_yaml
|
parse_yaml_linenumbers,
|
||||||
from ansiblelater.utils.yamlhelper import parse_yaml_linenumbers
|
)
|
||||||
|
|
||||||
|
|
||||||
class StandardMeta(type):
|
class RuleMeta(type):
|
||||||
|
def __call__(cls, *args):
|
||||||
def __call__(cls, *args, **kwargs):
|
|
||||||
mcls = type.__call__(cls, *args)
|
mcls = type.__call__(cls, *args)
|
||||||
setattr(mcls, "sid", cls.sid)
|
mcls.rid = cls.rid
|
||||||
setattr(mcls, "description", getattr(cls, "description", "__unknown__"))
|
mcls.description = getattr(cls, "description", "__unknown__")
|
||||||
setattr(mcls, "helptext", getattr(cls, "helptext", ""))
|
mcls.helptext = getattr(cls, "helptext", "")
|
||||||
setattr(mcls, "version", getattr(cls, "version", None))
|
mcls.types = getattr(cls, "types", [])
|
||||||
setattr(mcls, "types", getattr(cls, "types", []))
|
|
||||||
return mcls
|
return mcls
|
||||||
|
|
||||||
|
|
||||||
class StandardExtendedMeta(StandardMeta, ABCMeta):
|
class RuleExtendedMeta(RuleMeta, ABCMeta):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class StandardBase(object, metaclass=StandardExtendedMeta):
|
class RuleBase(metaclass=RuleExtendedMeta):
|
||||||
|
SHELL_PIPE_CHARS = "&|<>;$\n*[]{}?"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def sid(self):
|
def rid(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __repr__(self): # noqa
|
def __repr__(self):
|
||||||
return "Standard: {description} (version: {version}, types: {types})".format(
|
return f"Rule: {self.description} (types: {self.types})"
|
||||||
description=self.description, version=self.version, types=self.types
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_tasks(candidate, settings):
|
def get_tasks(candidate, settings): # noqa
|
||||||
errors = []
|
errors = []
|
||||||
yamllines = []
|
yamllines = []
|
||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||||
except LaterError as ex:
|
except LaterError as ex:
|
||||||
e = ex.original
|
e = ex.original
|
||||||
errors.append(
|
errors.append(
|
||||||
StandardBase.Error(
|
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
|
||||||
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
except LaterAnsibleError as e:
|
except LaterAnsibleError as e:
|
||||||
errors.append(
|
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
|
||||||
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
|
|
||||||
)
|
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
|
|
||||||
return yamllines, errors
|
return yamllines, errors
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_action_tasks(candidate, settings):
|
def get_action_tasks(candidate, settings): # noqa
|
||||||
tasks = []
|
tasks = []
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||||
|
|
||||||
if yamllines:
|
if yamllines:
|
||||||
@ -100,13 +92,11 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
except LaterError as ex:
|
except LaterError as ex:
|
||||||
e = ex.original
|
e = ex.original
|
||||||
errors.append(
|
errors.append(
|
||||||
StandardBase.Error(
|
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
|
||||||
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
except LaterAnsibleError as e:
|
except LaterAnsibleError as e:
|
||||||
errors.append(StandardBase.Error(e.line, "syntax error: {}".format(e.message)))
|
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
|
|
||||||
return tasks, errors
|
return tasks, errors
|
||||||
@ -124,15 +114,11 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
except LaterError as ex:
|
except LaterError as ex:
|
||||||
e = ex.original
|
e = ex.original
|
||||||
errors.append(
|
errors.append(
|
||||||
StandardBase.Error(
|
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
|
||||||
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
except LaterAnsibleError as e:
|
except LaterAnsibleError as e:
|
||||||
errors.append(
|
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
|
||||||
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
|
|
||||||
)
|
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
|
|
||||||
return normalized, errors
|
return normalized, errors
|
||||||
@ -144,7 +130,7 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||||
|
|
||||||
if yamllines:
|
if yamllines:
|
||||||
@ -163,30 +149,27 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
# No need to normalize_task if we are skipping it.
|
# No need to normalize_task if we are skipping it.
|
||||||
continue
|
continue
|
||||||
|
|
||||||
normalized.append(
|
normalized_task = normalize_task(
|
||||||
normalize_task(
|
task, candidate.path, settings["ansible"]["custom_modules"]
|
||||||
task, candidate.path, settings["ansible"]["custom_modules"]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
normalized_task["__raw_task__"] = task
|
||||||
|
|
||||||
|
normalized.append(normalized_task)
|
||||||
|
|
||||||
except LaterError as ex:
|
except LaterError as ex:
|
||||||
e = ex.original
|
e = ex.original
|
||||||
errors.append(
|
errors.append(
|
||||||
StandardBase.Error(
|
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
|
||||||
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
except LaterAnsibleError as e:
|
except LaterAnsibleError as e:
|
||||||
errors.append(
|
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
|
||||||
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
|
|
||||||
)
|
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
|
|
||||||
return normalized, errors
|
return normalized, errors
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_normalized_yaml(candidate, settings, options=None):
|
def get_normalized_yaml(candidate, settings, options=None): # noqa
|
||||||
errors = []
|
errors = []
|
||||||
yamllines = []
|
yamllines = []
|
||||||
|
|
||||||
@ -201,27 +184,23 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
except LaterError as ex:
|
except LaterError as ex:
|
||||||
e = ex.original
|
e = ex.original
|
||||||
errors.append(
|
errors.append(
|
||||||
StandardBase.Error(
|
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
|
||||||
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
except LaterAnsibleError as e:
|
except LaterAnsibleError as e:
|
||||||
errors.append(
|
errors.append(RuleBase.Error(e.line, f"syntax error: {e.message}"))
|
||||||
StandardBase.Error(e.line, "syntax error: {msg}".format(msg=e.message))
|
|
||||||
)
|
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
|
|
||||||
return yamllines, errors
|
return yamllines, errors
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_raw_yaml(candidate, settings):
|
def get_raw_yaml(candidate, settings): # noqa
|
||||||
content = None
|
content = None
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
yaml.add_constructor(
|
yaml.add_constructor(
|
||||||
UnsafeTag.yaml_tag, UnsafeTag.yaml_constructor, Loader=yaml.SafeLoader
|
UnsafeTag.yaml_tag, UnsafeTag.yaml_constructor, Loader=yaml.SafeLoader
|
||||||
)
|
)
|
||||||
@ -231,9 +210,7 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
content = yaml.safe_load(f)
|
content = yaml.safe_load(f)
|
||||||
except yaml.YAMLError as e:
|
except yaml.YAMLError as e:
|
||||||
errors.append(
|
errors.append(
|
||||||
StandardBase.Error(
|
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
|
||||||
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
|
|
||||||
@ -245,16 +222,17 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with codecs.open(candidate.path, mode="rb", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
for problem in linter.run(f, YamlLintConfig(options)):
|
for problem in linter.run(f, YamlLintConfig(options)):
|
||||||
errors.append(StandardBase.Error(problem.line, problem.desc))
|
errors.append(RuleBase.Error(problem.line, problem.desc))
|
||||||
except yaml.YAMLError as e:
|
except yaml.YAMLError as e:
|
||||||
errors.append(
|
errors.append(
|
||||||
StandardBase.Error(
|
RuleBase.Error(e.problem_mark.line + 1, f"syntax error: {e.problem}")
|
||||||
e.problem_mark.line + 1, "syntax error: {msg}".format(msg=e.problem)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
candidate.faulty = True
|
candidate.faulty = True
|
||||||
|
except (TypeError, ValueError) as e:
|
||||||
|
errors.append(RuleBase.Error(None, f"yamllint error: {e}"))
|
||||||
|
candidate.faulty = True
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
@ -269,10 +247,26 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
|
|
||||||
return first_cmd_arg
|
return first_cmd_arg
|
||||||
|
|
||||||
class Error(object):
|
@staticmethod
|
||||||
|
def get_safe_cmd(task):
|
||||||
|
if "cmd" in task["action"]:
|
||||||
|
cmd = task["action"].get("cmd", "")
|
||||||
|
else:
|
||||||
|
cmd = " ".join(task["action"].get("__ansible_arguments__", []))
|
||||||
|
|
||||||
|
cmd = re.sub(r"{{.+?}}", "JINJA_EXPRESSION", cmd)
|
||||||
|
cmd = re.sub(r"{%.+?%}", "JINJA_STATEMENT", cmd)
|
||||||
|
cmd = re.sub(r"{#.+?#}", "JINJA_COMMENT", cmd)
|
||||||
|
|
||||||
|
parts = cmd.split()
|
||||||
|
parts = [p if not urlparse(p.strip('"').strip("'")).scheme else "URL" for p in parts]
|
||||||
|
|
||||||
|
return " ".join(parts)
|
||||||
|
|
||||||
|
class Error:
|
||||||
"""Default error object created if a rule failed."""
|
"""Default error object created if a rule failed."""
|
||||||
|
|
||||||
def __init__(self, lineno, message, error_type=None, **kwargs):
|
def __init__(self, lineno, message, **kwargs):
|
||||||
"""
|
"""
|
||||||
Initialize a new error object and returns None.
|
Initialize a new error object and returns None.
|
||||||
|
|
||||||
@ -283,22 +277,21 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
self.lineno = lineno
|
self.lineno = lineno
|
||||||
self.message = message
|
self.message = message
|
||||||
self.kwargs = kwargs
|
self.kwargs = kwargs
|
||||||
for (key, value) in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
def __repr__(self): # noqa
|
def __repr__(self):
|
||||||
if self.lineno:
|
if self.lineno:
|
||||||
return "{no}: {msg}".format(no=self.lineno, msg=self.message)
|
return f"{self.lineno}: {self.message}"
|
||||||
else:
|
return f" {self.message}"
|
||||||
return " {msg}".format(msg=self.message)
|
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
result = dict(lineno=self.lineno, message=self.message)
|
result = {"lineno": self.lineno, "message": self.message}
|
||||||
for (key, value) in self.kwargs.items():
|
for key, value in self.kwargs.items():
|
||||||
result[key] = value
|
result[key] = value
|
||||||
return result
|
return result
|
||||||
|
|
||||||
class Result(object):
|
class Result:
|
||||||
"""Generic result object."""
|
"""Generic result object."""
|
||||||
|
|
||||||
def __init__(self, candidate, errors=None):
|
def __init__(self, candidate, errors=None):
|
||||||
@ -306,11 +299,10 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
self.errors = errors or []
|
self.errors = errors or []
|
||||||
|
|
||||||
def message(self):
|
def message(self):
|
||||||
return "\n".join(["{0}:{1}".format(self.candidate, error) for error in self.errors])
|
return "\n".join([f"{self.candidate}:{error}" for error in self.errors])
|
||||||
|
|
||||||
|
|
||||||
class StandardLoader():
|
class RulesLoader:
|
||||||
|
|
||||||
def __init__(self, source):
|
def __init__(self, source):
|
||||||
self.rules = []
|
self.rules = []
|
||||||
|
|
||||||
@ -326,37 +318,33 @@ class StandardLoader():
|
|||||||
try:
|
try:
|
||||||
spec.loader.exec_module(module)
|
spec.loader.exec_module(module)
|
||||||
except (ImportError, NameError) as e:
|
except (ImportError, NameError) as e:
|
||||||
sysexit_with_message(
|
sysexit_with_message(f"Failed to load roles file {filename}: \n {e!s}")
|
||||||
"Failed to load roles file {module}: \n {msg}".format(
|
|
||||||
msg=str(e), module=filename
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for name, obj in inspect.getmembers(module):
|
for _name, obj in inspect.getmembers(module):
|
||||||
if self._is_plugin(obj):
|
if self._is_plugin(obj):
|
||||||
self.rules.append(obj())
|
self.rules.append(obj())
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
sysexit_with_message("Failed to load roles file: \n {msg}".format(msg=str(e)))
|
sysexit_with_message(f"Failed to load roles file: \n {e!s}")
|
||||||
|
|
||||||
self.validate()
|
self.validate()
|
||||||
|
|
||||||
def _is_plugin(self, obj):
|
def _is_plugin(self, obj):
|
||||||
return inspect.isclass(obj) and issubclass(
|
return (
|
||||||
obj, StandardBase
|
inspect.isclass(obj) and issubclass(obj, RuleBase) and obj is not RuleBase and not None
|
||||||
) and obj is not StandardBase and not None
|
)
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
normalized_std = (list(toolz.remove(lambda x: x.sid == "", self.rules)))
|
normalize_rule = list(toolz.remove(lambda x: x.rid == "", self.rules))
|
||||||
unique_std = len(list(toolz.unique(normalized_std, key=lambda x: x.sid)))
|
unique_rule = len(list(toolz.unique(normalize_rule, key=lambda x: x.rid)))
|
||||||
all_std = len(normalized_std)
|
all_rules = len(normalize_rule)
|
||||||
if not all_std == unique_std:
|
if all_rules != unique_rule:
|
||||||
sysexit_with_message(
|
sysexit_with_message(
|
||||||
"Detect duplicate ID's in standards definition. Please use unique ID's only."
|
"Found duplicate tags in rules definition. Please use unique tags only."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class SingleStandards(StandardLoader, metaclass=Singleton):
|
class SingleRules(RulesLoader, metaclass=Singleton):
|
||||||
"""Singleton config class."""
|
"""Singleton config class."""
|
||||||
|
|
||||||
pass
|
pass
|
@ -1,12 +1,10 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckBecomeUser(StandardBase):
|
class CheckBecomeUser(RuleBase):
|
||||||
|
rid = "ANS115"
|
||||||
sid = "ANSIBLE0015"
|
|
||||||
description = "Become should be combined with become_user"
|
description = "Become should be combined with become_user"
|
||||||
helptext = "the task has `become` enabled but `become_user` is missing"
|
helptext = "the task has `become` enabled but `become_user` is missing"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -16,7 +14,7 @@ class CheckBecomeUser(StandardBase):
|
|||||||
if not errors:
|
if not errors:
|
||||||
gen = (task for task in tasks if "become" in task)
|
gen = (task for task in tasks if "become" in task)
|
||||||
for task in gen:
|
for task in gen:
|
||||||
if task["become"] in true_value and "become_user" not in task.keys():
|
if task["become"] in true_value and "become_user" not in task:
|
||||||
errors.append(self.Error(task["__line__"], self.helptext))
|
errors.append(self.Error(task["__line__"], self.helptext))
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
from ansiblelater.utils import count_spaces
|
from ansiblelater.utils import count_spaces
|
||||||
|
|
||||||
|
|
||||||
class CheckBracesSpaces(StandardBase):
|
class CheckBracesSpaces(RuleBase):
|
||||||
|
rid = "ANS104"
|
||||||
sid = "ANSIBLE0004"
|
|
||||||
description = "YAML should use consistent number of spaces around variables"
|
description = "YAML should use consistent number of spaces around variables"
|
||||||
helptext = "no suitable numbers of spaces (min: {min} max: {max})"
|
helptext = "no suitable numbers of spaces (min: {min} max: {max})"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -41,7 +39,7 @@ class CheckBracesSpaces(StandardBase):
|
|||||||
i,
|
i,
|
||||||
self.helptext.format(
|
self.helptext.format(
|
||||||
min=conf["min-spaces-inside"], max=conf["max-spaces-inside"]
|
min=conf["min-spaces-inside"], max=conf["max-spaces-inside"]
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -17,15 +17,14 @@
|
|||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
# THE SOFTWARE.
|
# THE SOFTWARE.
|
||||||
from ansiblelater.standard import StandardBase
|
|
||||||
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckChangedInWhen(StandardBase):
|
class CheckChangedInWhen(RuleBase):
|
||||||
|
rid = "ANS126"
|
||||||
sid = "ANSIBLE0026"
|
|
||||||
description = "Use handlers instead of `when: changed`"
|
description = "Use handlers instead of `when: changed`"
|
||||||
helptext = "tasks using `when: result.changed` setting are effectively acting as a handler"
|
helptext = "tasks using `when: result.changed` setting are effectively acting as a handler"
|
||||||
version = "0.2"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -35,7 +34,7 @@ class CheckChangedInWhen(StandardBase):
|
|||||||
for task in tasks:
|
for task in tasks:
|
||||||
when = None
|
when = None
|
||||||
|
|
||||||
if task["__ansible_action_type__"] == "task":
|
if task["__ansible_action_type__"] in ["task", "meta"]:
|
||||||
when = task.get("when")
|
when = task.get("when")
|
||||||
|
|
||||||
if isinstance(when, str):
|
if isinstance(when, str):
|
||||||
@ -53,6 +52,16 @@ class CheckChangedInWhen(StandardBase):
|
|||||||
if not isinstance(item, str):
|
if not isinstance(item, str):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if not {"and", "or", "not"}.isdisjoint(item.split()):
|
||||||
|
return False
|
||||||
|
|
||||||
return any(
|
return any(
|
||||||
changed in item for changed in [".changed", "|changed", '["changed"]', "['changed']"]
|
changed in item
|
||||||
|
for changed in [
|
||||||
|
".changed",
|
||||||
|
"|changed",
|
||||||
|
'["changed"]',
|
||||||
|
"['changed']",
|
||||||
|
"is changed",
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckCommandHasChanges(StandardBase):
|
class CheckCommandHasChanges(RuleBase):
|
||||||
|
rid = "ANS111"
|
||||||
sid = "ANSIBLE0011"
|
|
||||||
description = "Commands should be idempotent"
|
description = "Commands should be idempotent"
|
||||||
helptext = (
|
helptext = (
|
||||||
"commands should only read while using `changed_when` or try to be "
|
"commands should only read while using `changed_when` or try to be "
|
||||||
"idempotent while using controls like `creates`, `removes` or `when`"
|
"idempotent while using controls like `creates`, `removes` or `when`"
|
||||||
)
|
)
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task"]
|
types = ["playbook", "task"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -18,12 +16,13 @@ class CheckCommandHasChanges(StandardBase):
|
|||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
if task["action"]["__ansible_module__"] in commands:
|
if task["action"]["__ansible_module__"] in commands and (
|
||||||
if (
|
"changed_when" not in task
|
||||||
"changed_when" not in task and "when" not in task
|
and "when" not in task
|
||||||
and "when" not in task.get("__ansible_action_meta__", [])
|
and "when" not in task.get("__ansible_action_meta__", [])
|
||||||
and "creates" not in task["action"] and "removes" not in task["action"]
|
and "creates" not in task["action"]
|
||||||
):
|
and "removes" not in task["action"]
|
||||||
errors.append(self.Error(task["__line__"], self.helptext))
|
):
|
||||||
|
errors.append(self.Error(task["__line__"], self.helptext))
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -20,15 +20,13 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckCommandInsteadOfArgument(StandardBase):
|
class CheckCommandInsteadOfArgument(RuleBase):
|
||||||
|
rid = "ANS117"
|
||||||
sid = "ANSIBLE0017"
|
|
||||||
description = "Commands should not be used in place of module arguments"
|
description = "Commands should not be used in place of module arguments"
|
||||||
helptext = "{exec} used in place of file modules argument {arg}"
|
helptext = "{exec} used in place of file modules argument {arg}"
|
||||||
version = "0.2"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -41,7 +39,7 @@ class CheckCommandInsteadOfArgument(StandardBase):
|
|||||||
"ln": "state=link",
|
"ln": "state=link",
|
||||||
"mkdir": "state=directory",
|
"mkdir": "state=directory",
|
||||||
"rmdir": "state=absent",
|
"rmdir": "state=absent",
|
||||||
"rm": "state=absent"
|
"rm": "state=absent",
|
||||||
}
|
}
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
@ -51,13 +49,14 @@ class CheckCommandInsteadOfArgument(StandardBase):
|
|||||||
executable = os.path.basename(first_cmd_arg)
|
executable = os.path.basename(first_cmd_arg)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
first_cmd_arg and executable in arguments
|
first_cmd_arg
|
||||||
|
and executable in arguments
|
||||||
and task["action"].get("warn", True)
|
and task["action"].get("warn", True)
|
||||||
):
|
):
|
||||||
errors.append(
|
errors.append(
|
||||||
self.Error(
|
self.Error(
|
||||||
task["__line__"],
|
task["__line__"],
|
||||||
self.helptext.format(exec=executable, arg=arguments[executable])
|
self.helptext.format(exec=executable, arg=arguments[executable]),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckCommandInsteadOfModule(StandardBase):
|
class CheckCommandInsteadOfModule(RuleBase):
|
||||||
|
rid = "ANS108"
|
||||||
sid = "ANSIBLE0008"
|
|
||||||
description = "Commands should not be used in place of modules"
|
description = "Commands should not be used in place of modules"
|
||||||
helptext = "{exec} command used in place of {module} module"
|
helptext = "{exec} command used in place of {module} module"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -31,7 +29,7 @@ class CheckCommandInsteadOfModule(StandardBase):
|
|||||||
"rsync": "synchronize",
|
"rsync": "synchronize",
|
||||||
"supervisorctl": "supervisorctl",
|
"supervisorctl": "supervisorctl",
|
||||||
"systemctl": "systemd",
|
"systemctl": "systemd",
|
||||||
"sed": "template or lineinfile"
|
"sed": "template or lineinfile",
|
||||||
}
|
}
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
@ -39,14 +37,19 @@ class CheckCommandInsteadOfModule(StandardBase):
|
|||||||
if task["action"]["__ansible_module__"] in commands:
|
if task["action"]["__ansible_module__"] in commands:
|
||||||
first_cmd_arg = self.get_first_cmd_arg(task)
|
first_cmd_arg = self.get_first_cmd_arg(task)
|
||||||
executable = os.path.basename(first_cmd_arg)
|
executable = os.path.basename(first_cmd_arg)
|
||||||
|
cmd = cmd = self.get_safe_cmd(task)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
first_cmd_arg and executable in modules
|
first_cmd_arg
|
||||||
and task["action"].get("warn", True) and "register" not in task
|
and executable in modules
|
||||||
|
and task["action"].get("warn", True)
|
||||||
|
and "register" not in task
|
||||||
|
and not any(ch in cmd for ch in self.SHELL_PIPE_CHARS)
|
||||||
):
|
):
|
||||||
errors.append(
|
errors.append(
|
||||||
self.Error(
|
self.Error(
|
||||||
task["__line__"],
|
task["__line__"],
|
||||||
self.helptext.format(exec=executable, module=modules[executable])
|
self.helptext.format(exec=executable, module=modules[executable]),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from ansiblelater.candidate import Template
|
from ansiblelater.candidate import Template
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckCompareToEmptyString(StandardBase):
|
class CheckCompareToEmptyString(RuleBase):
|
||||||
|
rid = "ANS112"
|
||||||
sid = "ANSIBLE0012"
|
description = 'Don\'t compare to empty string ""'
|
||||||
description = "Don't compare to empty string \"\""
|
helptext = "use `when: var` rather than `when: var !=` (or conversely `when: not var`)"
|
||||||
helptext = ("use `when: var` rather than `when: var !=` (or conversely `when: not var`)")
|
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "template"]
|
types = ["playbook", "task", "handler", "template"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from ansiblelater.candidate import Template
|
from ansiblelater.candidate import Template
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckCompareToLiteralBool(StandardBase):
|
class CheckCompareToLiteralBool(RuleBase):
|
||||||
|
rid = "ANS113"
|
||||||
sid = "ANSIBLE0013"
|
|
||||||
description = "Don't compare to True or False"
|
description = "Don't compare to True or False"
|
||||||
helptext = ("use `when: var` rather than `when: var == True` (or conversely `when: not var`)")
|
helptext = "use `when: var` rather than `when: var == True` (or conversely `when: not var`)"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckDeprecated(StandardBase):
|
class CheckDeprecated(RuleBase):
|
||||||
|
rid = "ANS999"
|
||||||
sid = "ANSIBLE9999"
|
|
||||||
description = "Deprecated features should not be used"
|
description = "Deprecated features should not be used"
|
||||||
helptext = "'{old}' is deprecated and should not be used anymore. Use '{new}' instead."
|
helptext = "`{old}` is deprecated and should not be used anymore. Use `{new}` instead."
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -20,7 +18,7 @@ class CheckDeprecated(StandardBase):
|
|||||||
task["__line__"],
|
task["__line__"],
|
||||||
self.helptext.format(
|
self.helptext.format(
|
||||||
old="skip_ansible_lint", new="skip_ansible_later"
|
old="skip_ansible_lint", new="skip_ansible_later"
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
87
ansiblelater/rules/CheckDeprecatedBareVars.py
Normal file
87
ansiblelater/rules/CheckDeprecatedBareVars.py
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ansiblelater.rule import RuleBase
|
||||||
|
from ansiblelater.utils import has_glob, has_jinja
|
||||||
|
|
||||||
|
|
||||||
|
class CheckDeprecatedBareVars(RuleBase):
|
||||||
|
rid = "ANS127"
|
||||||
|
description = "Deprecated bare variables in loops must not be used"
|
||||||
|
helptext = (
|
||||||
|
"bare var '{barevar}' in '{loop_type}' must use full var syntax '{{{{ {barevar} }}}}' "
|
||||||
|
"or be converted to a list"
|
||||||
|
)
|
||||||
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
|
def check(self, candidate, settings):
|
||||||
|
tasks, self.errors = self.get_normalized_tasks(candidate, settings)
|
||||||
|
|
||||||
|
if not self.errors:
|
||||||
|
for task in tasks:
|
||||||
|
loop_type = next((key for key in task if key.startswith("with_")), None)
|
||||||
|
|
||||||
|
if not loop_type:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if loop_type in [
|
||||||
|
"with_nested",
|
||||||
|
"with_together",
|
||||||
|
"with_flattened",
|
||||||
|
"with_filetree",
|
||||||
|
"with_community.general.filetree",
|
||||||
|
]:
|
||||||
|
# These loops can either take a list defined directly in the task
|
||||||
|
# or a variable that is a list itself. When a single variable is used
|
||||||
|
# we just need to check that one variable, and not iterate over it like
|
||||||
|
# it's a list. Otherwise, loop through and check all items.
|
||||||
|
items = task[loop_type]
|
||||||
|
|
||||||
|
if not isinstance(items, (list, tuple)):
|
||||||
|
items = [items]
|
||||||
|
for var in items:
|
||||||
|
self._matchvar(var, task, loop_type)
|
||||||
|
elif loop_type == "with_subelements":
|
||||||
|
self._matchvar(task[loop_type][0], task, loop_type)
|
||||||
|
elif loop_type in ["with_sequence", "with_ini", "with_inventory_hostnames"]:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self._matchvar(task[loop_type], task, loop_type)
|
||||||
|
|
||||||
|
return self.Result(candidate.path, self.errors)
|
||||||
|
|
||||||
|
def _matchvar(self, varstring, task, loop_type):
|
||||||
|
if isinstance(varstring, str) and not has_jinja(varstring):
|
||||||
|
valid = loop_type == "with_fileglob" and bool(
|
||||||
|
has_jinja(varstring) or has_glob(varstring),
|
||||||
|
)
|
||||||
|
|
||||||
|
valid |= loop_type == "with_filetree" and bool(
|
||||||
|
has_jinja(varstring) or varstring.endswith(os.sep),
|
||||||
|
)
|
||||||
|
if not valid:
|
||||||
|
self.errors.append(
|
||||||
|
self.Error(
|
||||||
|
task["__line__"],
|
||||||
|
self.helptext.format(barevar=varstring, loop_type=loop_type),
|
||||||
|
)
|
||||||
|
)
|
132
ansiblelater/rules/CheckFQCNBuiltin.py
Normal file
132
ansiblelater/rules/CheckFQCNBuiltin.py
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
# Original code written by the authors of ansible-lint
|
||||||
|
|
||||||
|
from ansiblelater.rule import RuleBase
|
||||||
|
from ansiblelater.utils import load_plugin
|
||||||
|
|
||||||
|
|
||||||
|
class CheckFQCNBuiltin(RuleBase):
|
||||||
|
rid = "ANS128"
|
||||||
|
helptext = "use FQCN `{module_alias}` for module action `{module}`"
|
||||||
|
description = "Module actions should use full qualified collection names"
|
||||||
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
|
||||||
|
module_aliases = {"block/always/rescue": "block/always/rescue"}
|
||||||
|
|
||||||
|
def check(self, candidate, settings):
|
||||||
|
tasks, errors = self.get_normalized_tasks(candidate, settings)
|
||||||
|
|
||||||
|
_builtins = [
|
||||||
|
"add_host",
|
||||||
|
"apt",
|
||||||
|
"apt_key",
|
||||||
|
"apt_repository",
|
||||||
|
"assemble",
|
||||||
|
"assert",
|
||||||
|
"async_status",
|
||||||
|
"blockinfile",
|
||||||
|
"command",
|
||||||
|
"copy",
|
||||||
|
"cron",
|
||||||
|
"debconf",
|
||||||
|
"debug",
|
||||||
|
"dnf",
|
||||||
|
"dpkg_selections",
|
||||||
|
"expect",
|
||||||
|
"fail",
|
||||||
|
"fetch",
|
||||||
|
"file",
|
||||||
|
"find",
|
||||||
|
"gather_facts",
|
||||||
|
"get_url",
|
||||||
|
"getent",
|
||||||
|
"git",
|
||||||
|
"group",
|
||||||
|
"group_by",
|
||||||
|
"hostname",
|
||||||
|
"import_playbook",
|
||||||
|
"import_role",
|
||||||
|
"import_tasks",
|
||||||
|
"include",
|
||||||
|
"include_role",
|
||||||
|
"include_tasks",
|
||||||
|
"include_vars",
|
||||||
|
"iptables",
|
||||||
|
"known_hosts",
|
||||||
|
"lineinfile",
|
||||||
|
"meta",
|
||||||
|
"package",
|
||||||
|
"package_facts",
|
||||||
|
"pause",
|
||||||
|
"ping",
|
||||||
|
"pip",
|
||||||
|
"raw",
|
||||||
|
"reboot",
|
||||||
|
"replace",
|
||||||
|
"rpm_key",
|
||||||
|
"script",
|
||||||
|
"service",
|
||||||
|
"service_facts",
|
||||||
|
"set_fact",
|
||||||
|
"set_stats",
|
||||||
|
"setup",
|
||||||
|
"shell",
|
||||||
|
"slurp",
|
||||||
|
"stat",
|
||||||
|
"subversion",
|
||||||
|
"systemd",
|
||||||
|
"sysvinit",
|
||||||
|
"tempfile",
|
||||||
|
"template",
|
||||||
|
"unarchive",
|
||||||
|
"uri",
|
||||||
|
"user",
|
||||||
|
"wait_for",
|
||||||
|
"wait_for_connection",
|
||||||
|
"yum",
|
||||||
|
"yum_repository",
|
||||||
|
]
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
return self.Result(candidate.path, errors)
|
||||||
|
|
||||||
|
for task in tasks:
|
||||||
|
module = task["action"]["__ansible_module_original__"]
|
||||||
|
|
||||||
|
if module not in self.module_aliases:
|
||||||
|
loaded_module = load_plugin(module)
|
||||||
|
target = loaded_module.resolved_fqcn
|
||||||
|
self.module_aliases[module] = target
|
||||||
|
|
||||||
|
if target is None:
|
||||||
|
self.module_aliases[module] = module
|
||||||
|
continue
|
||||||
|
|
||||||
|
if target not in self.module_aliases:
|
||||||
|
self.module_aliases[target] = target
|
||||||
|
|
||||||
|
if module != self.module_aliases[module]:
|
||||||
|
module_alias = self.module_aliases[module]
|
||||||
|
if module_alias.startswith("ansible.builtin"):
|
||||||
|
legacy_module = module_alias.replace(
|
||||||
|
"ansible.builtin.",
|
||||||
|
"ansible.legacy.",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
if module != legacy_module:
|
||||||
|
helptext = self.helptext.format(module_alias=module_alias, module=module)
|
||||||
|
if module == "ansible.builtin.include":
|
||||||
|
helptext = (
|
||||||
|
"`ansible.builtin.include_task` or `ansible.builtin.import_tasks` "
|
||||||
|
f"should be used instead of deprecated `{module}`",
|
||||||
|
)
|
||||||
|
|
||||||
|
errors.append(self.Error(task["__line__"], helptext))
|
||||||
|
else:
|
||||||
|
if module.count(".") < 2:
|
||||||
|
errors.append(
|
||||||
|
self.Error(
|
||||||
|
task["__line__"],
|
||||||
|
self.helptext.format(module_alias=module_alias, module=module),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.Result(candidate.path, errors)
|
@ -19,18 +19,16 @@
|
|||||||
# THE SOFTWARE.
|
# THE SOFTWARE.
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckFilePermissionMissing(StandardBase):
|
class CheckFilePermissionMissing(RuleBase):
|
||||||
|
rid = "ANS118"
|
||||||
sid = "ANSIBLE0018"
|
|
||||||
description = "File permissions unset or incorrect"
|
description = "File permissions unset or incorrect"
|
||||||
helptext = (
|
helptext = (
|
||||||
"`mode` parameter should set permissions explicitly (e.g. `mode: 0644`) "
|
"`mode` parameter should set permissions explicitly (e.g. `mode: 0644`) "
|
||||||
"to avoid unexpected file permissions"
|
"to avoid unexpected file permissions"
|
||||||
)
|
)
|
||||||
version = "0.2"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
_modules = {
|
_modules = {
|
||||||
@ -67,8 +65,7 @@ class CheckFilePermissionMissing(StandardBase):
|
|||||||
mode = task["action"].get("mode", None)
|
mode = task["action"].get("mode", None)
|
||||||
state = task["action"].get("state", "file")
|
state = task["action"].get("state", "file")
|
||||||
|
|
||||||
if module not in self._modules and \
|
if module not in self._modules and module not in self._create_modules:
|
||||||
module not in self._create_modules:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if mode == "preserve" and module not in self._preserve_modules:
|
if mode == "preserve" and module not in self._preserve_modules:
|
||||||
|
@ -17,22 +17,28 @@
|
|||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
# THE SOFTWARE.
|
# THE SOFTWARE.
|
||||||
from ansiblelater.standard import StandardBase
|
|
||||||
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckFilePermissionOctal(StandardBase):
|
class CheckFilePermissionOctal(RuleBase):
|
||||||
|
rid = "ANS119"
|
||||||
sid = "ANSIBLE0019"
|
description = "Numeric file permissions without a leading zero can behave unexpectedly"
|
||||||
description = "Octal file permissions must contain leading zero or be a string"
|
helptext = '`mode: {mode}` should be strings with a leading zero `mode: "0{mode}"`'
|
||||||
helptext = "numeric file permissions without leading zero can behave in unexpected ways"
|
|
||||||
version = "0.2"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
tasks, errors = self.get_normalized_tasks(candidate, settings)
|
tasks, errors = self.get_normalized_tasks(candidate, settings)
|
||||||
modules = [
|
modules = [
|
||||||
"assemble", "copy", "file", "ini_file", "lineinfile", "replace", "synchronize",
|
"assemble",
|
||||||
"template", "unarchive"
|
"copy",
|
||||||
|
"file",
|
||||||
|
"ini_file",
|
||||||
|
"lineinfile",
|
||||||
|
"replace",
|
||||||
|
"synchronize",
|
||||||
|
"template",
|
||||||
|
"unarchive",
|
||||||
]
|
]
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
@ -40,28 +46,33 @@ class CheckFilePermissionOctal(StandardBase):
|
|||||||
if task["action"]["__ansible_module__"] in modules:
|
if task["action"]["__ansible_module__"] in modules:
|
||||||
mode = task["action"].get("mode", None)
|
mode = task["action"].get("mode", None)
|
||||||
|
|
||||||
if isinstance(mode, int):
|
if isinstance(mode, int) and self._is_invalid_permission(mode):
|
||||||
if self._is_invalid_permission(mode):
|
errors.append(
|
||||||
errors.append(self.Error(task["__line__"], self.helptext))
|
self.Error(task["__line__"], self.helptext.format(mode=mode))
|
||||||
|
)
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _is_invalid_permission(mode):
|
def _is_invalid_permission(mode):
|
||||||
|
|
||||||
other_write_without_read = (
|
other_write_without_read = (
|
||||||
mode % 8 and mode % 8 < 4 and not (mode % 8 == 1 and (mode >> 6) % 2 == 1)
|
mode % 8 and mode % 8 < 4 and not (mode % 8 == 1 and (mode >> 6) % 2 == 1)
|
||||||
)
|
)
|
||||||
group_write_without_read = ((mode >> 3) % 8 and (mode >> 3) % 8 < 4
|
group_write_without_read = (
|
||||||
and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1))
|
(mode >> 3) % 8
|
||||||
user_write_without_read = ((mode >> 6) % 8 and (mode >> 6) % 8 < 4
|
and (mode >> 3) % 8 < 4
|
||||||
and not (mode >> 6) % 8 == 1)
|
and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1)
|
||||||
|
)
|
||||||
|
user_write_without_read = (mode >> 6) % 8 and (mode >> 6) % 8 < 4 and (mode >> 6) % 8 != 1
|
||||||
other_more_generous_than_group = mode % 8 > (mode >> 3) % 8
|
other_more_generous_than_group = mode % 8 > (mode >> 3) % 8
|
||||||
other_more_generous_than_user = mode % 8 > (mode >> 6) % 8
|
other_more_generous_than_user = mode % 8 > (mode >> 6) % 8
|
||||||
group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8
|
group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8
|
||||||
|
|
||||||
return bool(
|
return bool(
|
||||||
other_write_without_read or group_write_without_read or user_write_without_read
|
other_write_without_read
|
||||||
or other_more_generous_than_group or other_more_generous_than_user
|
or group_write_without_read
|
||||||
|
or user_write_without_read
|
||||||
|
or other_more_generous_than_group
|
||||||
|
or other_more_generous_than_user
|
||||||
or group_more_generous_than_user
|
or group_more_generous_than_user
|
||||||
)
|
)
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckFilterSeparation(StandardBase):
|
class CheckFilterSeparation(RuleBase):
|
||||||
|
rid = "ANS116"
|
||||||
sid = "ANSIBLE0016"
|
|
||||||
description = "Jinja2 filters should be separated with spaces"
|
description = "Jinja2 filters should be separated with spaces"
|
||||||
helptext = "no suitable numbers of spaces (required: 1)"
|
helptext = "no suitable numbers of spaces (required: 1)"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -16,16 +14,18 @@ class CheckFilterSeparation(StandardBase):
|
|||||||
|
|
||||||
matches = []
|
matches = []
|
||||||
braces = re.compile("{{(.*?)}}")
|
braces = re.compile("{{(.*?)}}")
|
||||||
filters = re.compile(r"(?<=\|)([\s]{2,}[^\s}]+|[^\s]+)|([^\s{]+[\s]{2,}|[^\s]+)(?=\|)")
|
filters = re.compile(r"(?<=\|)((\s{2,})*\S+)|(\S+(\s{2,})*)(?=\|)")
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
for i, line in yamllines:
|
for i, line in yamllines:
|
||||||
match = braces.findall(line)
|
match = braces.findall(line)
|
||||||
if match:
|
if match:
|
||||||
for item in match:
|
for item in match:
|
||||||
|
# replace potential regex in filters
|
||||||
|
item = re.sub(r"\(.+\)", "(dummy)", item)
|
||||||
matches.append((i, item))
|
matches.append((i, item))
|
||||||
|
|
||||||
for i, line in matches:
|
for i, item in matches:
|
||||||
if filters.findall(line):
|
if filters.findall(item):
|
||||||
errors.append(self.Error(i, self.helptext))
|
errors.append(self.Error(i, self.helptext))
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -17,15 +17,14 @@
|
|||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
# THE SOFTWARE.
|
# THE SOFTWARE.
|
||||||
from ansiblelater.standard import StandardBase
|
|
||||||
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckGitHasVersion(StandardBase):
|
class CheckGitHasVersion(RuleBase):
|
||||||
|
rid = "ANS120"
|
||||||
sid = "ANSIBLE0020"
|
|
||||||
description = "Git checkouts should use explicit version"
|
description = "Git checkouts should use explicit version"
|
||||||
helptext = "git checkouts should point to an explicit commit or tag, not `latest`"
|
helptext = "git checkouts should point to an explicit commit or tag, not `latest`"
|
||||||
version = "0.2"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,21 +1,41 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckInstallUseLatest(StandardBase):
|
class CheckInstallUseLatest(RuleBase):
|
||||||
|
rid = "ANS109"
|
||||||
sid = "ANSIBLE0009"
|
|
||||||
description = "Package installs should use present, not latest"
|
description = "Package installs should use present, not latest"
|
||||||
helptext = "package installs should use `state=present` with or without a version"
|
helptext = "package installs should use `state=present` with or without a version"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
tasks, errors = self.get_normalized_tasks(candidate, settings)
|
tasks, errors = self.get_normalized_tasks(candidate, settings)
|
||||||
package_managers = [
|
package_managers = [
|
||||||
"yum", "apt", "dnf", "homebrew", "pacman", "openbsd_package", "pkg5", "portage",
|
"yum",
|
||||||
"pkgutil", "slackpkg", "swdepot", "zypper", "bundler", "pip", "pear", "npm", "yarn",
|
"apt",
|
||||||
"gem", "easy_install", "bower", "package", "apk", "openbsd_pkg", "pkgng", "sorcery",
|
"dnf",
|
||||||
"xbps"
|
"homebrew",
|
||||||
|
"pacman",
|
||||||
|
"openbsd_package",
|
||||||
|
"pkg5",
|
||||||
|
"portage",
|
||||||
|
"pkgutil",
|
||||||
|
"slackpkg",
|
||||||
|
"swdepot",
|
||||||
|
"zypper",
|
||||||
|
"bundler",
|
||||||
|
"pip",
|
||||||
|
"pear",
|
||||||
|
"npm",
|
||||||
|
"yarn",
|
||||||
|
"gem",
|
||||||
|
"easy_install",
|
||||||
|
"bower",
|
||||||
|
"package",
|
||||||
|
"apk",
|
||||||
|
"openbsd_pkg",
|
||||||
|
"pkgng",
|
||||||
|
"sorcery",
|
||||||
|
"xbps",
|
||||||
]
|
]
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
|
89
ansiblelater/rules/CheckKeyOrder.py
Normal file
89
ansiblelater/rules/CheckKeyOrder.py
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
# Original code written by the authors of ansible-lint
|
||||||
|
|
||||||
|
import functools
|
||||||
|
|
||||||
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
SORTER_TASKS = (
|
||||||
|
"name",
|
||||||
|
# "__module__",
|
||||||
|
# "action",
|
||||||
|
# "args",
|
||||||
|
None, # <-- None include all modules that not using action and *
|
||||||
|
# "when",
|
||||||
|
# "notify",
|
||||||
|
# "tags",
|
||||||
|
"block",
|
||||||
|
"rescue",
|
||||||
|
"always",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckKeyOrder(RuleBase):
|
||||||
|
rid = "ANS129"
|
||||||
|
description = "Check for recommended key order"
|
||||||
|
helptext = "{type} key order can be improved to `{sorted_keys}`"
|
||||||
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
|
def check(self, candidate, settings):
|
||||||
|
errors = []
|
||||||
|
tasks, err = self.get_normalized_tasks(candidate, settings)
|
||||||
|
|
||||||
|
if err:
|
||||||
|
return self.Result(candidate.path, err)
|
||||||
|
|
||||||
|
for task in tasks:
|
||||||
|
is_sorted, keys = self._sort_keys(task.get("__raw_task__"))
|
||||||
|
if not is_sorted:
|
||||||
|
errors.append(
|
||||||
|
self.Error(
|
||||||
|
task["__line__"],
|
||||||
|
self.helptext.format(type="task", sorted_keys=", ".join(keys)),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if candidate.kind == "playbook":
|
||||||
|
tasks, err = self.get_tasks(candidate, settings)
|
||||||
|
|
||||||
|
if err:
|
||||||
|
return self.Result(candidate.path, err)
|
||||||
|
|
||||||
|
for task in tasks:
|
||||||
|
is_sorted, keys = self._sort_keys(task)
|
||||||
|
if not is_sorted:
|
||||||
|
errors.append(
|
||||||
|
self.Error(
|
||||||
|
task["__line__"],
|
||||||
|
self.helptext.format(type="play", sorted_keys=", ".join(keys)),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.Result(candidate.path, errors)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _sort_keys(task):
|
||||||
|
if not task:
|
||||||
|
return True, []
|
||||||
|
|
||||||
|
keys = [str(key) for key in task if not key.startswith("_")]
|
||||||
|
sorted_keys = sorted(keys, key=functools.cmp_to_key(_task_property_sorter))
|
||||||
|
|
||||||
|
return (keys == sorted_keys), sorted_keys
|
||||||
|
|
||||||
|
|
||||||
|
def _task_property_sorter(property1, property2):
|
||||||
|
"""Sort task properties based on SORTER."""
|
||||||
|
v_1 = _get_property_sort_index(property1)
|
||||||
|
v_2 = _get_property_sort_index(property2)
|
||||||
|
return (v_1 > v_2) - (v_1 < v_2)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_property_sort_index(name):
|
||||||
|
"""Return the index of the property in the sorter."""
|
||||||
|
a_index = -1
|
||||||
|
for i, v in enumerate(SORTER_TASKS):
|
||||||
|
if v == name:
|
||||||
|
return i
|
||||||
|
if v is None:
|
||||||
|
a_index = i
|
||||||
|
return a_index
|
@ -1,14 +1,12 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckLiteralBoolFormat(StandardBase):
|
class CheckLiteralBoolFormat(RuleBase):
|
||||||
|
rid = "ANS114"
|
||||||
sid = "ANSIBLE0014"
|
|
||||||
description = "Literal bools should be consistent"
|
description = "Literal bools should be consistent"
|
||||||
helptext = "literal bools should be written as `{bools}`"
|
helptext = "literal bools should be written as `{bools}`"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp>
|
# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp>
|
||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckLocalAction(StandardBase):
|
class CheckLocalAction(RuleBase):
|
||||||
|
rid = "ANS124"
|
||||||
sid = "ANSIBLE0024"
|
|
||||||
description = "Don't use local_action"
|
description = "Don't use local_action"
|
||||||
helptext = ("`delegate_to: localhost` should be used instead of `local_action`")
|
helptext = "`delegate_to: localhost` should be used instead of `local_action`"
|
||||||
version = "0.2"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
from nested_lookup import nested_lookup
|
from nested_lookup import nested_lookup
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckMetaChangeFromDefault(StandardBase):
|
class CheckMetaChangeFromDefault(RuleBase):
|
||||||
|
rid = "ANS121"
|
||||||
sid = "ANSIBLE0021"
|
|
||||||
description = "Roles meta/main.yml default values should be changed"
|
description = "Roles meta/main.yml default values should be changed"
|
||||||
helptext = "meta/main.yml default values should be changed for: `{field}`"
|
helptext = "meta/main.yml default values should be changed for: `{field}`"
|
||||||
version = "0.2"
|
|
||||||
types = ["meta"]
|
types = ["meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -24,7 +22,7 @@ class CheckMetaChangeFromDefault(StandardBase):
|
|||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
for field, default in field_defaults:
|
for field, default in field_defaults:
|
||||||
pair = "{field}: {default}".format(field=field, default=default)
|
pair = f"{field}: {default}"
|
||||||
lookup = nested_lookup(field, content)
|
lookup = nested_lookup(field, content)
|
||||||
if lookup and default in nested_lookup(field, content):
|
if lookup and default in nested_lookup(field, content):
|
||||||
errors.append(self.Error(None, self.helptext.format(field=pair)))
|
errors.append(self.Error(None, self.helptext.format(field=pair)))
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
from nested_lookup import nested_lookup
|
from nested_lookup import nested_lookup
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckMetaMain(StandardBase):
|
class CheckMetaMain(RuleBase):
|
||||||
|
rid = "ANS102"
|
||||||
sid = "ANSIBLE0002"
|
|
||||||
description = "Roles must contain suitable meta/main.yml"
|
description = "Roles must contain suitable meta/main.yml"
|
||||||
helptext = "file should contain `{key}` key"
|
helptext = "file should contain `{key}` key"
|
||||||
version = "0.1"
|
|
||||||
types = ["meta"]
|
types = ["meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -16,8 +14,8 @@ class CheckMetaMain(StandardBase):
|
|||||||
keys = ["author", "description", "min_ansible_version", "platforms"]
|
keys = ["author", "description", "min_ansible_version", "platforms"]
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
has_galaxy_info = (isinstance(content, dict) and "galaxy_info" in content.keys())
|
has_galaxy_info = isinstance(content, dict) and "galaxy_info" in content
|
||||||
has_dependencies = (isinstance(content, dict) and "dependencies" in content.keys())
|
has_dependencies = isinstance(content, dict) and "dependencies" in content
|
||||||
|
|
||||||
if not has_galaxy_info:
|
if not has_galaxy_info:
|
||||||
errors.append(self.Error(None, self.helptext.format(key="galaxy_info")))
|
errors.append(self.Error(None, self.helptext.format(key="galaxy_info")))
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckNameFormat(StandardBase):
|
class CheckNameFormat(RuleBase):
|
||||||
|
rid = "ANS107"
|
||||||
sid = "ANSIBLE0007"
|
|
||||||
description = "Name of tasks and handlers must be formatted"
|
description = "Name of tasks and handlers must be formatted"
|
||||||
helptext = "name '{name}' should start with uppercase"
|
helptext = "name `{name}` should start with uppercase"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -19,7 +17,7 @@ class CheckNameFormat(StandardBase):
|
|||||||
for task in tasks:
|
for task in tasks:
|
||||||
if "name" in task:
|
if "name" in task:
|
||||||
namelines[task["name"]].append(task["__line__"])
|
namelines[task["name"]].append(task["__line__"])
|
||||||
for (name, lines) in namelines.items():
|
for name, lines in namelines.items():
|
||||||
if name and not name[0].isupper():
|
if name and not name[0].isupper():
|
||||||
errors.append(self.Error(lines[-1], self.helptext.format(name=name)))
|
errors.append(self.Error(lines[-1], self.helptext.format(name=name)))
|
||||||
|
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckNamedTask(StandardBase):
|
class CheckNamedTask(RuleBase):
|
||||||
|
rid = "ANS106"
|
||||||
sid = "ANSIBLE0006"
|
|
||||||
description = "Tasks and handlers must be named"
|
description = "Tasks and handlers must be named"
|
||||||
helptext = "module '{module}' used without or empty `name` attribute"
|
helptext = "module `{module}` used without or empty `name` attribute"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckNativeYaml(StandardBase):
|
class CheckNativeYaml(RuleBase):
|
||||||
|
rid = "YML108"
|
||||||
sid = "LINT0008"
|
|
||||||
description = "Use YAML format for tasks and handlers rather than key=value"
|
description = "Use YAML format for tasks and handlers rather than key=value"
|
||||||
helptext = "task arguments appear to be in key value rather than YAML format"
|
helptext = "task arguments appear to be in key value rather than YAML format"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Author: Adrián Tóth <adtoth@redhat.com>
|
# Author: Adrián Tóth <adtoth@redhat.com>
|
||||||
#
|
#
|
||||||
# Copyright (c) 2020, Red Hat, Inc.
|
# Copyright (c) 2020, Red Hat, Inc.
|
||||||
@ -22,18 +21,16 @@
|
|||||||
# THE SOFTWARE.
|
# THE SOFTWARE.
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckNestedJinja(StandardBase):
|
class CheckNestedJinja(RuleBase):
|
||||||
|
rid = "ANS123"
|
||||||
sid = "ANSIBLE0023"
|
|
||||||
description = "Don't use nested Jinja2 pattern"
|
description = "Don't use nested Jinja2 pattern"
|
||||||
helptext = (
|
helptext = (
|
||||||
"there should not be any nested jinja pattern "
|
"there should not be any nested jinja pattern "
|
||||||
"like `{{ list_one + {{ list_two | max }} }}`"
|
"like `{{ list_one + {{ list_two | max }} }}`"
|
||||||
)
|
)
|
||||||
version = "0.2"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -51,7 +48,7 @@ class CheckNestedJinja(StandardBase):
|
|||||||
for item in match:
|
for item in match:
|
||||||
matches.append((i, item))
|
matches.append((i, item))
|
||||||
|
|
||||||
for i, line in matches:
|
for i, _ in matches:
|
||||||
errors.append(self.Error(i, self.helptext))
|
errors.append(self.Error(i, self.helptext))
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp>
|
# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp>
|
||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckRelativeRolePaths(StandardBase):
|
class CheckRelativeRolePaths(RuleBase):
|
||||||
|
rid = "ANS125"
|
||||||
sid = "ANSIBLE0025"
|
|
||||||
description = "Don't use a relative path in a role"
|
description = "Don't use a relative path in a role"
|
||||||
helptext = "`copy` and `template` modules don't need relative path for `src`"
|
helptext = "`copy` and `template` modules don't need relative path for `src`"
|
||||||
version = "0.2"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -26,7 +24,7 @@ class CheckRelativeRolePaths(StandardBase):
|
|||||||
path_to_check = None
|
path_to_check = None
|
||||||
|
|
||||||
if module in module_to_path_folder and "src" in task["action"]:
|
if module in module_to_path_folder and "src" in task["action"]:
|
||||||
path_to_check = "../{}".format(module_to_path_folder[module])
|
path_to_check = f"../{module_to_path_folder[module]}"
|
||||||
|
|
||||||
if path_to_check and path_to_check in task["action"]["src"]:
|
if path_to_check and path_to_check in task["action"]["src"]:
|
||||||
errors.append(self.Error(task["__line__"], self.helptext))
|
errors.append(self.Error(task["__line__"], self.helptext))
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
from ansible.parsing.yaml.objects import AnsibleMapping
|
from ansible.parsing.yaml.objects import AnsibleMapping
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckScmInSrc(StandardBase):
|
class CheckScmInSrc(RuleBase):
|
||||||
|
rid = "ANS105"
|
||||||
sid = "ANSIBLE0005"
|
|
||||||
description = "Use `scm:` key rather than `src: scm+url`"
|
description = "Use `scm:` key rather than `src: scm+url`"
|
||||||
helptext = "usage of `src: scm+url` not recommended"
|
helptext = "usage of `src: scm+url` not recommended"
|
||||||
version = "0.1"
|
|
||||||
types = ["rolesfile"]
|
types = ["rolesfile"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -16,8 +14,11 @@ class CheckScmInSrc(StandardBase):
|
|||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
for role in roles:
|
for role in roles:
|
||||||
if isinstance(role, AnsibleMapping):
|
if (
|
||||||
if "+" in role.get("src"):
|
isinstance(role, AnsibleMapping)
|
||||||
errors.append(self.Error(role["__line__"], self.helptext))
|
and bool(role.get("src"))
|
||||||
|
and "+" in role.get("src")
|
||||||
|
):
|
||||||
|
errors.append(self.Error(role["__line__"], self.helptext))
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -1,14 +1,10 @@
|
|||||||
import re
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
|
||||||
|
|
||||||
|
|
||||||
class CheckShellInsteadCommand(StandardBase):
|
class CheckShellInsteadCommand(RuleBase):
|
||||||
|
rid = "ANS110"
|
||||||
sid = "ANSIBLE0010"
|
|
||||||
description = "Shell should only be used when essential"
|
description = "Shell should only be used when essential"
|
||||||
helptext = "shell should only be used when piping, redirecting or chaining commands"
|
helptext = "shell should only be used when piping, redirecting or chaining commands"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -22,13 +18,8 @@ class CheckShellInsteadCommand(StandardBase):
|
|||||||
if "executable" in task["action"]:
|
if "executable" in task["action"]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if "cmd" in task["action"]:
|
cmd = self.get_safe_cmd(task)
|
||||||
cmd = task["action"].get("cmd", [])
|
if not any(ch in cmd for ch in self.SHELL_PIPE_CHARS):
|
||||||
else:
|
|
||||||
cmd = " ".join(task["action"].get("__ansible_arguments__", []))
|
|
||||||
|
|
||||||
unjinja = re.sub(r"\{\{[^\}]*\}\}", "JINJA_VAR", cmd)
|
|
||||||
if not any(ch in unjinja for ch in "&|<>;$\n*[]{}?"):
|
|
||||||
errors.append(self.Error(task["__line__"], self.helptext))
|
errors.append(self.Error(task["__line__"], self.helptext))
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckTaskSeparation(StandardBase):
|
class CheckTaskSeparation(RuleBase):
|
||||||
|
rid = "ANS101"
|
||||||
sid = "ANSIBLE0001"
|
|
||||||
description = "Single tasks should be separated by empty line"
|
description = "Single tasks should be separated by empty line"
|
||||||
helptext = "missing task separation (required: 1 empty line)"
|
helptext = "missing task separation (required: 1 empty line)"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckUniqueNamedTask(StandardBase):
|
class CheckUniqueNamedTask(RuleBase):
|
||||||
|
rid = "ANS103"
|
||||||
sid = "ANSIBLE0003"
|
|
||||||
description = "Tasks and handlers must be uniquely named within a single file"
|
description = "Tasks and handlers must be uniquely named within a single file"
|
||||||
helptext = "name '{name}' appears multiple times"
|
helptext = "name `{name}` appears multiple times"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
@ -20,7 +18,7 @@ class CheckUniqueNamedTask(StandardBase):
|
|||||||
for task in tasks:
|
for task in tasks:
|
||||||
if "name" in task:
|
if "name" in task:
|
||||||
namelines[task["name"]].append(task["__line__"])
|
namelines[task["name"]].append(task["__line__"])
|
||||||
for (name, lines) in namelines.items():
|
for name, lines in namelines.items():
|
||||||
if name and len(lines) > 1:
|
if name and len(lines) > 1:
|
||||||
errors.append(self.Error(lines[-1], self.helptext.format(name=name)))
|
errors.append(self.Error(lines[-1], self.helptext.format(name=name)))
|
||||||
|
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckWhenFormat(StandardBase):
|
class CheckWhenFormat(RuleBase):
|
||||||
|
rid = "ANS122"
|
||||||
sid = "ANSIBLE0022"
|
|
||||||
description = "Don't use Jinja2 in when"
|
description = "Don't use Jinja2 in when"
|
||||||
helptext = (
|
helptext = (
|
||||||
"`when` is a raw Jinja2 expression, redundant {{ }} "
|
"`when` is a raw Jinja2 expression, redundant `{{ }}` should be removed from variable(s)"
|
||||||
"should be removed from variable(s)"
|
|
||||||
)
|
)
|
||||||
version = "0.2"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckYamlColons(StandardBase):
|
class CheckYamlColons(RuleBase):
|
||||||
|
rid = "YML105"
|
||||||
sid = "LINT0005"
|
|
||||||
description = "YAML should use consistent number of spaces around colons"
|
description = "YAML should use consistent number of spaces around colons"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
options = "rules: {{colons: {conf}}}".format(conf=settings["yamllint"]["colons"])
|
options = f"rules: {{colons: {settings['yamllint']['colons']}}}"
|
||||||
errors = self.run_yamllint(candidate, options)
|
errors = self.run_yamllint(candidate, options)
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -1,17 +1,13 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckYamlDocumentEnd(StandardBase):
|
class CheckYamlDocumentEnd(RuleBase):
|
||||||
|
rid = "YML109"
|
||||||
sid = "LINT0009"
|
description = "YAML document end marker should match configuration"
|
||||||
description = "YAML should contain document end marker"
|
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
options = "rules: {{document-end: {conf}}}".format(
|
options = f"rules: {{document-end: {settings['yamllint']['document-end']}}}"
|
||||||
conf=settings["yamllint"]["document-end"]
|
|
||||||
)
|
|
||||||
errors = self.run_yamllint(candidate, options)
|
errors = self.run_yamllint(candidate, options)
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -1,17 +1,13 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckYamlDocumentStart(StandardBase):
|
class CheckYamlDocumentStart(RuleBase):
|
||||||
|
rid = "YML104"
|
||||||
sid = "LINT0004"
|
description = "YAML document start marker should match configuration"
|
||||||
description = "YAML should contain document start marker"
|
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
options = "rules: {{document-start: {conf}}}".format(
|
options = f"rules: {{document-start: {settings['yamllint']['document-start']}}}"
|
||||||
conf=settings["yamllint"]["document-start"]
|
|
||||||
)
|
|
||||||
errors = self.run_yamllint(candidate, options)
|
errors = self.run_yamllint(candidate, options)
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckYamlEmptyLines(StandardBase):
|
class CheckYamlEmptyLines(RuleBase):
|
||||||
|
rid = "YML101"
|
||||||
sid = "LINT0001"
|
|
||||||
description = "YAML should not contain unnecessarily empty lines"
|
description = "YAML should not contain unnecessarily empty lines"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
options = "rules: {{empty-lines: {conf}}}".format(conf=settings["yamllint"]["empty-lines"])
|
options = f"rules: {{empty-lines: {settings['yamllint']['empty-lines']}}}"
|
||||||
errors = self.run_yamllint(candidate, options)
|
errors = self.run_yamllint(candidate, options)
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckYamlFile(StandardBase):
|
class CheckYamlFile(RuleBase):
|
||||||
|
rid = "YML106"
|
||||||
sid = "LINT0006"
|
|
||||||
description = "Roles file should be in yaml format"
|
description = "Roles file should be in yaml format"
|
||||||
helptext = "file does not have a .yml extension"
|
helptext = "file does not have a .yml extension"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckYamlHasContent(StandardBase):
|
class CheckYamlHasContent(RuleBase):
|
||||||
|
rid = "YML107"
|
||||||
sid = "LINT0007"
|
|
||||||
description = "Files should contain useful content"
|
description = "Files should contain useful content"
|
||||||
helptext = "the file appears to have no useful content"
|
helptext = "the file appears to have no useful content"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "defaults", "meta"]
|
types = ["playbook", "task", "handler", "rolevars", "defaults", "meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckYamlHyphens(StandardBase):
|
class CheckYamlHyphens(RuleBase):
|
||||||
|
rid = "YML103"
|
||||||
sid = "LINT0003"
|
|
||||||
description = "YAML should use consistent number of spaces after hyphens"
|
description = "YAML should use consistent number of spaces after hyphens"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
options = "rules: {{hyphens: {conf}}}".format(conf=settings["yamllint"]["hyphens"])
|
options = f"rules: {{hyphens: {settings['yamllint']['hyphens']}}}"
|
||||||
errors = self.run_yamllint(candidate, options)
|
errors = self.run_yamllint(candidate, options)
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -1,17 +1,13 @@
|
|||||||
from ansiblelater.standard import StandardBase
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
class CheckYamlIndent(StandardBase):
|
class CheckYamlIndent(RuleBase):
|
||||||
|
rid = "YML102"
|
||||||
sid = "LINT0002"
|
|
||||||
description = "YAML should not contain unnecessarily empty lines"
|
description = "YAML should not contain unnecessarily empty lines"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
options = "rules: {{document-start: {conf}}}".format(
|
options = f"rules: {{document-start: {settings['yamllint']['document-start']}}}"
|
||||||
conf=settings["yamllint"]["document-start"]
|
|
||||||
)
|
|
||||||
errors = self.run_yamllint(candidate, options)
|
errors = self.run_yamllint(candidate, options)
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
13
ansiblelater/rules/CheckYamlOctalValues.py
Normal file
13
ansiblelater/rules/CheckYamlOctalValues.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from ansiblelater.rule import RuleBase
|
||||||
|
|
||||||
|
|
||||||
|
class CheckYamlOctalValues(RuleBase):
|
||||||
|
rid = "YML110"
|
||||||
|
description = "YAML implicit/explicit octal value should match configuration"
|
||||||
|
types = ["playbook", "task", "handler", "rolevars", "hostvars", "groupvars", "meta"]
|
||||||
|
|
||||||
|
def check(self, candidate, settings):
|
||||||
|
options = f"rules: {{octal-values: {settings['yamllint']['octal-values']}}}"
|
||||||
|
errors = self.run_yamllint(candidate, options)
|
||||||
|
|
||||||
|
return self.Result(candidate.path, errors)
|
@ -1,5 +1,6 @@
|
|||||||
"""Global settings object definition."""
|
"""Global settings object definition."""
|
||||||
|
|
||||||
|
import importlib.resources
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import anyconfig
|
import anyconfig
|
||||||
@ -7,7 +8,6 @@ import jsonschema.exceptions
|
|||||||
import pathspec
|
import pathspec
|
||||||
from appdirs import AppDirs
|
from appdirs import AppDirs
|
||||||
from jsonschema._utils import format_as_index
|
from jsonschema._utils import format_as_index
|
||||||
from pkg_resources import resource_filename
|
|
||||||
|
|
||||||
from ansiblelater import utils
|
from ansiblelater import utils
|
||||||
|
|
||||||
@ -15,7 +15,7 @@ config_dir = AppDirs("ansible-later").user_config_dir
|
|||||||
default_config_file = os.path.join(config_dir, "config.yml")
|
default_config_file = os.path.join(config_dir, "config.yml")
|
||||||
|
|
||||||
|
|
||||||
class Settings(object):
|
class Settings:
|
||||||
"""
|
"""
|
||||||
Create an object with all necessary settings.
|
Create an object with all necessary settings.
|
||||||
|
|
||||||
@ -25,14 +25,13 @@ class Settings(object):
|
|||||||
- provides cli parameters
|
- provides cli parameters
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, args={}, config_file=default_config_file):
|
def __init__(self, args, config_file=default_config_file):
|
||||||
"""
|
"""
|
||||||
Initialize a new settings class.
|
Initialize a new settings class.
|
||||||
|
|
||||||
:param args: An optional dict of options, arguments and commands from the CLI.
|
:param args: An optional dict of options, arguments and commands from the CLI.
|
||||||
:param config_file: An optional path to a yaml config file.
|
:param config_file: An optional path to a yaml config file.
|
||||||
:returns: None
|
:returns: None
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.config_file = config_file
|
self.config_file = config_file
|
||||||
self.schema = None
|
self.schema = None
|
||||||
@ -42,6 +41,9 @@ class Settings(object):
|
|||||||
self._update_filelist()
|
self._update_filelist()
|
||||||
|
|
||||||
def _set_args(self, args):
|
def _set_args(self, args):
|
||||||
|
if args is None:
|
||||||
|
args = {}
|
||||||
|
|
||||||
defaults = self._get_defaults()
|
defaults = self._get_defaults()
|
||||||
self.config_file = args.get("config_file") or default_config_file
|
self.config_file = args.get("config_file") or default_config_file
|
||||||
|
|
||||||
@ -102,13 +104,13 @@ class Settings(object):
|
|||||||
if f not in defaults["ansible"]["custom_modules"]:
|
if f not in defaults["ansible"]["custom_modules"]:
|
||||||
defaults["ansible"]["custom_modules"].append(f)
|
defaults["ansible"]["custom_modules"].append(f)
|
||||||
|
|
||||||
if defaults["rules"]["buildin"]:
|
if defaults["rules"]["builtin"]:
|
||||||
defaults["rules"]["standards"].append(
|
ref = importlib.resources.files("ansiblelater") / "rules"
|
||||||
os.path.join(resource_filename("ansiblelater", "rules"))
|
with importlib.resources.as_file(ref) as path:
|
||||||
)
|
defaults["rules"]["dir"].append(path)
|
||||||
|
|
||||||
defaults["rules"]["standards"] = [
|
defaults["rules"]["dir"] = [
|
||||||
os.path.relpath(os.path.normpath(p)) for p in defaults["rules"]["standards"]
|
os.path.relpath(os.path.normpath(p)) for p in defaults["rules"]["dir"]
|
||||||
]
|
]
|
||||||
|
|
||||||
return defaults
|
return defaults
|
||||||
@ -116,18 +118,20 @@ class Settings(object):
|
|||||||
def _get_defaults(self):
|
def _get_defaults(self):
|
||||||
defaults = {
|
defaults = {
|
||||||
"rules": {
|
"rules": {
|
||||||
"buildin": True,
|
"builtin": True,
|
||||||
"standards": [],
|
"dir": [],
|
||||||
"filter": [],
|
"include_filter": [],
|
||||||
"exclude_filter": [],
|
"exclude_filter": [],
|
||||||
"warning_filter": ["ANSIBLE9999"],
|
"warning_filter": [
|
||||||
|
"ANS128",
|
||||||
|
"ANS999",
|
||||||
|
],
|
||||||
"ignore_dotfiles": True,
|
"ignore_dotfiles": True,
|
||||||
"exclude_files": [],
|
"exclude_files": [],
|
||||||
"version": ""
|
|
||||||
},
|
},
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "WARNING",
|
"level": "WARNING",
|
||||||
"json": False
|
"json": False,
|
||||||
},
|
},
|
||||||
"ansible": {
|
"ansible": {
|
||||||
"custom_modules": [],
|
"custom_modules": [],
|
||||||
@ -140,7 +144,7 @@ class Settings(object):
|
|||||||
"exclude": [
|
"exclude": [
|
||||||
"meta",
|
"meta",
|
||||||
"debug",
|
"debug",
|
||||||
"block",
|
"block/always/rescue",
|
||||||
"include_role",
|
"include_role",
|
||||||
"import_role",
|
"import_role",
|
||||||
"include_tasks",
|
"include_tasks",
|
||||||
@ -164,17 +168,21 @@ class Settings(object):
|
|||||||
"indent-sequences": True,
|
"indent-sequences": True,
|
||||||
},
|
},
|
||||||
"hyphens": {
|
"hyphens": {
|
||||||
"max-spaces-after": 1
|
"max-spaces-after": 1,
|
||||||
},
|
},
|
||||||
"document-start": {
|
"document-start": {
|
||||||
"present": True
|
"present": True,
|
||||||
},
|
},
|
||||||
"document-end": {
|
"document-end": {
|
||||||
"present": True
|
"present": False,
|
||||||
},
|
},
|
||||||
"colons": {
|
"colons": {
|
||||||
"max-spaces-before": 0,
|
"max-spaces-before": 0,
|
||||||
"max-spaces-after": 1
|
"max-spaces-after": 1,
|
||||||
|
},
|
||||||
|
"octal-values": {
|
||||||
|
"forbid-implicit-octal": True,
|
||||||
|
"forbid-explicit-octal": True,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -188,14 +196,16 @@ class Settings(object):
|
|||||||
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
||||||
return True
|
return True
|
||||||
except jsonschema.exceptions.ValidationError as e:
|
except jsonschema.exceptions.ValidationError as e:
|
||||||
schema_error = (
|
validator = e.validator
|
||||||
"Error while loading configuration:\n"
|
path = format_as_index(
|
||||||
"Failed validating '{validator}' in schema{schema}"
|
next(iter(e.absolute_path)),
|
||||||
).format(
|
list(e.absolute_path)[1:],
|
||||||
validator=e.validator, schema=format_as_index(list(e.relative_schema_path)[:-1])
|
|
||||||
)
|
)
|
||||||
|
msg = e.message
|
||||||
|
|
||||||
utils.sysexit_with_message(
|
utils.sysexit_with_message(
|
||||||
"{schema}: {msg}".format(schema=schema_error, msg=e.message)
|
"Error while loading configuration:\n"
|
||||||
|
f"Failed validating '{validator}' at {path}: {msg}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def _update_filelist(self):
|
def _update_filelist(self):
|
||||||
@ -203,13 +213,14 @@ class Settings(object):
|
|||||||
excludes = self.config["rules"]["exclude_files"]
|
excludes = self.config["rules"]["exclude_files"]
|
||||||
ignore_dotfiles = self.config["rules"]["ignore_dotfiles"]
|
ignore_dotfiles = self.config["rules"]["ignore_dotfiles"]
|
||||||
|
|
||||||
if ignore_dotfiles and not self.args_files:
|
if ignore_dotfiles:
|
||||||
excludes.append(".*")
|
excludes.append(".*")
|
||||||
else:
|
|
||||||
|
if self.args_files:
|
||||||
del excludes[:]
|
del excludes[:]
|
||||||
|
|
||||||
filelist = []
|
filelist = []
|
||||||
for root, dirs, files in os.walk("."):
|
for root, _dirs, files in os.walk("."):
|
||||||
for filename in files:
|
for filename in files:
|
||||||
filelist.append(os.path.relpath(os.path.normpath(os.path.join(root, filename))))
|
filelist.append(os.path.relpath(os.path.normpath(os.path.join(root, filename))))
|
||||||
|
|
||||||
|
@ -22,10 +22,8 @@ def test_critical(capsys, mocker):
|
|||||||
_, stderr = capsys.readouterr()
|
_, stderr = capsys.readouterr()
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"{}{}CRITICAL:{} foo\n{}".format(
|
f"{colorama.Fore.RED}{colorama.Style.BRIGHT}CRITICAL:{colorama.Style.NORMAL} foo\n"
|
||||||
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.NORMAL,
|
f"{colorama.Style.RESET_ALL}"
|
||||||
colorama.Style.RESET_ALL
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
x, _ = capsys.readouterr()
|
x, _ = capsys.readouterr()
|
||||||
|
|
||||||
@ -38,10 +36,8 @@ def test_error(capsys, mocker):
|
|||||||
_, stderr = capsys.readouterr()
|
_, stderr = capsys.readouterr()
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"{}{}ERROR:{} foo\n{}".format(
|
f"{colorama.Fore.RED}{colorama.Style.BRIGHT}ERROR:{colorama.Style.NORMAL} foo\n"
|
||||||
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.NORMAL,
|
f"{colorama.Style.RESET_ALL}"
|
||||||
colorama.Style.RESET_ALL
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
x, _ = capsys.readouterr()
|
x, _ = capsys.readouterr()
|
||||||
|
|
||||||
@ -54,10 +50,8 @@ def test_warn(capsys, mocker):
|
|||||||
stdout, _ = capsys.readouterr()
|
stdout, _ = capsys.readouterr()
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"{}{}WARNING:{} foo\n{}".format(
|
f"{colorama.Fore.YELLOW}{colorama.Style.BRIGHT}WARNING:{colorama.Style.NORMAL} foo\n"
|
||||||
colorama.Fore.YELLOW, colorama.Style.BRIGHT, colorama.Style.NORMAL,
|
f"{colorama.Style.RESET_ALL}"
|
||||||
colorama.Style.RESET_ALL
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
x, _ = capsys.readouterr()
|
x, _ = capsys.readouterr()
|
||||||
|
|
||||||
@ -70,10 +64,8 @@ def test_info(capsys, mocker):
|
|||||||
stdout, _ = capsys.readouterr()
|
stdout, _ = capsys.readouterr()
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"{}{}INFO:{} foo\n{}".format(
|
f"{colorama.Fore.BLUE}{colorama.Style.BRIGHT}INFO:{colorama.Style.NORMAL} foo\n"
|
||||||
colorama.Fore.BLUE, colorama.Style.BRIGHT, colorama.Style.NORMAL,
|
f"{colorama.Style.RESET_ALL}"
|
||||||
colorama.Style.RESET_ALL
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
x, _ = capsys.readouterr()
|
x, _ = capsys.readouterr()
|
||||||
|
|
||||||
|
@ -1,14 +1,13 @@
|
|||||||
"""Global utils collection."""
|
"""Global utils collection."""
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from distutils.version import LooseVersion
|
from contextlib import suppress
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
from ansible.plugins.loader import module_loader
|
||||||
|
|
||||||
from ansiblelater import logger
|
from ansiblelater import logger
|
||||||
|
|
||||||
@ -24,32 +23,17 @@ def count_spaces(c_string):
|
|||||||
leading_spaces = 0
|
leading_spaces = 0
|
||||||
trailing_spaces = 0
|
trailing_spaces = 0
|
||||||
|
|
||||||
for i, e in enumerate(c_string):
|
for _i, e in enumerate(c_string):
|
||||||
if not e.isspace():
|
if not e.isspace():
|
||||||
break
|
break
|
||||||
leading_spaces += 1
|
leading_spaces += 1
|
||||||
|
|
||||||
for i, e in reversed(list(enumerate(c_string))):
|
for _i, e in reversed(list(enumerate(c_string))):
|
||||||
if not e.isspace():
|
if not e.isspace():
|
||||||
break
|
break
|
||||||
trailing_spaces += 1
|
trailing_spaces += 1
|
||||||
|
|
||||||
return ((leading_spaces, trailing_spaces))
|
return (leading_spaces, trailing_spaces)
|
||||||
|
|
||||||
|
|
||||||
def get_property(prop):
|
|
||||||
currentdir = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
parentdir = os.path.dirname(currentdir)
|
|
||||||
result = re.search(
|
|
||||||
r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop),
|
|
||||||
open(os.path.join(parentdir, "__init__.py")).read()
|
|
||||||
)
|
|
||||||
return result.group(1)
|
|
||||||
|
|
||||||
|
|
||||||
def standards_latest(standards):
|
|
||||||
return max([standard.version for standard in standards if standard.version] or ["0.1"],
|
|
||||||
key=LooseVersion)
|
|
||||||
|
|
||||||
|
|
||||||
def lines_ranges(lines_spec):
|
def lines_ranges(lines_spec):
|
||||||
@ -74,10 +58,8 @@ def safe_load(string):
|
|||||||
:returns: dict
|
:returns: dict
|
||||||
|
|
||||||
"""
|
"""
|
||||||
try:
|
with suppress(yaml.scanner.ScannerError):
|
||||||
return yaml.safe_load(string) or {}
|
return yaml.safe_load(string) or {}
|
||||||
except yaml.scanner.ScannerError as e:
|
|
||||||
print(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
@ -96,14 +78,24 @@ def open_file(filename, mode="r"):
|
|||||||
|
|
||||||
def add_dict_branch(tree, vector, value):
|
def add_dict_branch(tree, vector, value):
|
||||||
key = vector[0]
|
key = vector[0]
|
||||||
tree[key] = value \
|
tree[key] = (
|
||||||
if len(vector) == 1 \
|
value if len(vector) == 1 else add_dict_branch(tree.get(key, {}), vector[1:], value)
|
||||||
else add_dict_branch(tree[key] if key in tree else {},
|
)
|
||||||
vector[1:],
|
|
||||||
value)
|
|
||||||
return tree
|
return tree
|
||||||
|
|
||||||
|
|
||||||
|
def has_jinja(value):
|
||||||
|
"""Return true if a string seems to contain jinja templating."""
|
||||||
|
re_has_jinja = re.compile(r"{[{%#].*[%#}]}", re.DOTALL)
|
||||||
|
return bool(isinstance(value, str) and re_has_jinja.search(value))
|
||||||
|
|
||||||
|
|
||||||
|
def has_glob(value):
|
||||||
|
"""Return true if a string looks like having a glob pattern."""
|
||||||
|
re_has_glob = re.compile("[][*?]")
|
||||||
|
return bool(isinstance(value, str) and re_has_glob.search(value))
|
||||||
|
|
||||||
|
|
||||||
def sysexit(code=1):
|
def sysexit(code=1):
|
||||||
sys.exit(code)
|
sys.exit(code)
|
||||||
|
|
||||||
@ -120,5 +112,23 @@ class Singleton(type):
|
|||||||
|
|
||||||
def __call__(cls, *args, **kwargs):
|
def __call__(cls, *args, **kwargs):
|
||||||
if cls not in cls._instances:
|
if cls not in cls._instances:
|
||||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
cls._instances[cls] = super().__call__(*args, **kwargs)
|
||||||
return cls._instances[cls]
|
return cls._instances[cls]
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache
|
||||||
|
def load_plugin(name):
|
||||||
|
"""Return loaded ansible plugin/module."""
|
||||||
|
loaded_module = module_loader.find_plugin_with_context(
|
||||||
|
name,
|
||||||
|
ignore_deprecated=True,
|
||||||
|
check_aliases=True,
|
||||||
|
)
|
||||||
|
if not loaded_module.resolved and name.startswith("ansible.builtin."):
|
||||||
|
# fallback to core behavior of using legacy
|
||||||
|
loaded_module = module_loader.find_plugin_with_context(
|
||||||
|
name.replace("ansible.builtin.", "ansible.legacy."),
|
||||||
|
ignore_deprecated=True,
|
||||||
|
check_aliases=True,
|
||||||
|
)
|
||||||
|
return loaded_module
|
||||||
|
@ -21,15 +21,13 @@
|
|||||||
# THE SOFTWARE.
|
# THE SOFTWARE.
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
import glob
|
|
||||||
import imp
|
|
||||||
import os
|
import os
|
||||||
|
from contextlib import suppress
|
||||||
|
|
||||||
import ansible.parsing.mod_args
|
import ansible.parsing.mod_args
|
||||||
import yaml
|
import yaml
|
||||||
from ansible import constants
|
from ansible import constants
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
from ansible.errors import AnsibleParserError
|
|
||||||
from ansible.parsing.dataloader import DataLoader
|
from ansible.parsing.dataloader import DataLoader
|
||||||
from ansible.parsing.mod_args import ModuleArgsParser
|
from ansible.parsing.mod_args import ModuleArgsParser
|
||||||
from ansible.parsing.yaml.constructor import AnsibleConstructor
|
from ansible.parsing.yaml.constructor import AnsibleConstructor
|
||||||
@ -37,8 +35,7 @@ from ansible.parsing.yaml.loader import AnsibleLoader
|
|||||||
from ansible.template import Templar
|
from ansible.template import Templar
|
||||||
from yaml.composer import Composer
|
from yaml.composer import Composer
|
||||||
|
|
||||||
from ansiblelater.exceptions import LaterAnsibleError
|
from ansiblelater.exceptions import LaterAnsibleError, LaterError
|
||||||
from ansiblelater.exceptions import LaterError
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Try to import the Ansible 2 module first, it's the future-proof one
|
# Try to import the Ansible 2 module first, it's the future-proof one
|
||||||
@ -68,7 +65,9 @@ def ansible_template(basedir, varname, templatevars, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from ansible.plugins import module_loader
|
from ansible.plugins.loader import init_plugin_loader, module_loader
|
||||||
|
|
||||||
|
init_plugin_loader()
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from ansible.plugins.loader import module_loader
|
from ansible.plugins.loader import module_loader
|
||||||
|
|
||||||
@ -129,24 +128,6 @@ BLOCK_NAME_TO_ACTION_TYPE_MAP = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def load_plugins(directory):
|
|
||||||
result = []
|
|
||||||
fh = None
|
|
||||||
|
|
||||||
for pluginfile in glob.glob(os.path.join(directory, "[A-Za-z]*.py")):
|
|
||||||
|
|
||||||
pluginname = os.path.basename(pluginfile.replace(".py", ""))
|
|
||||||
try:
|
|
||||||
fh, filename, desc = imp.find_module(pluginname, [directory])
|
|
||||||
mod = imp.load_module(pluginname, fh, filename, desc)
|
|
||||||
obj = getattr(mod, pluginname)()
|
|
||||||
result.append(obj)
|
|
||||||
finally:
|
|
||||||
if fh:
|
|
||||||
fh.close()
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def tokenize(line):
|
def tokenize(line):
|
||||||
tokens = line.lstrip().split(" ")
|
tokens = line.lstrip().split(" ")
|
||||||
if tokens[0] == "-":
|
if tokens[0] == "-":
|
||||||
@ -155,8 +136,8 @@ def tokenize(line):
|
|||||||
tokens = tokens[1:]
|
tokens = tokens[1:]
|
||||||
command = tokens[0].replace(":", "")
|
command = tokens[0].replace(":", "")
|
||||||
|
|
||||||
args = list()
|
args = []
|
||||||
kwargs = dict()
|
kwargs = {}
|
||||||
nonkvfound = False
|
nonkvfound = False
|
||||||
for arg in tokens[1:]:
|
for arg in tokens[1:]:
|
||||||
if "=" in arg and not nonkvfound:
|
if "=" in arg and not nonkvfound:
|
||||||
@ -171,10 +152,11 @@ def tokenize(line):
|
|||||||
def _playbook_items(pb_data):
|
def _playbook_items(pb_data):
|
||||||
if isinstance(pb_data, dict):
|
if isinstance(pb_data, dict):
|
||||||
return pb_data.items()
|
return pb_data.items()
|
||||||
elif not pb_data:
|
|
||||||
|
if not pb_data:
|
||||||
return []
|
return []
|
||||||
else:
|
|
||||||
return [item for play in pb_data for item in play.items()]
|
return [item for play in pb_data for item in play.items()]
|
||||||
|
|
||||||
|
|
||||||
def find_children(playbook, playbook_dir):
|
def find_children(playbook, playbook_dir):
|
||||||
@ -186,7 +168,7 @@ def find_children(playbook, playbook_dir):
|
|||||||
try:
|
try:
|
||||||
playbook_ds = parse_yaml_from_file(playbook[0])
|
playbook_ds = parse_yaml_from_file(playbook[0])
|
||||||
except AnsibleError as e:
|
except AnsibleError as e:
|
||||||
raise SystemExit(str(e))
|
raise SystemExit(str(e)) from e
|
||||||
results = []
|
results = []
|
||||||
basedir = os.path.dirname(playbook[0])
|
basedir = os.path.dirname(playbook[0])
|
||||||
items = _playbook_items(playbook_ds)
|
items = _playbook_items(playbook_ds)
|
||||||
@ -194,7 +176,7 @@ def find_children(playbook, playbook_dir):
|
|||||||
for child in play_children(basedir, item, playbook[1], playbook_dir):
|
for child in play_children(basedir, item, playbook[1], playbook_dir):
|
||||||
if "$" in child["path"] or "{{" in child["path"]:
|
if "$" in child["path"] or "{{" in child["path"]:
|
||||||
continue
|
continue
|
||||||
valid_tokens = list()
|
valid_tokens = []
|
||||||
for token in split_args(child["path"]):
|
for token in split_args(child["path"]):
|
||||||
if "=" in token:
|
if "=" in token:
|
||||||
break
|
break
|
||||||
@ -205,20 +187,18 @@ def find_children(playbook, playbook_dir):
|
|||||||
|
|
||||||
|
|
||||||
def template(basedir, value, variables, fail_on_undefined=False, **kwargs):
|
def template(basedir, value, variables, fail_on_undefined=False, **kwargs):
|
||||||
try:
|
# Hack to skip the following exception when using to_json filter on a variable.
|
||||||
value = ansible_template(
|
# I guess the filter doesn't like empty vars...
|
||||||
os.path.abspath(basedir), value, variables,
|
with suppress(AnsibleError, ValueError):
|
||||||
**dict(kwargs, fail_on_undefined=fail_on_undefined)
|
return ansible_template(
|
||||||
|
os.path.abspath(basedir),
|
||||||
|
value,
|
||||||
|
variables,
|
||||||
|
**dict(kwargs, fail_on_undefined=fail_on_undefined),
|
||||||
)
|
)
|
||||||
# Hack to skip the following exception when using to_json filter on a variable.
|
|
||||||
# I guess the filter doesn't like empty vars...
|
|
||||||
except (AnsibleError, ValueError):
|
|
||||||
# templating failed, so just keep value as is.
|
|
||||||
pass
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def play_children(basedir, item, parent_type, playbook_dir):
|
def play_children(basedir, item, parent_type):
|
||||||
delegate_map = {
|
delegate_map = {
|
||||||
"tasks": _taskshandlers_children,
|
"tasks": _taskshandlers_children,
|
||||||
"pre_tasks": _taskshandlers_children,
|
"pre_tasks": _taskshandlers_children,
|
||||||
@ -234,21 +214,20 @@ def play_children(basedir, item, parent_type, playbook_dir):
|
|||||||
play_library = os.path.join(os.path.abspath(basedir), "library")
|
play_library = os.path.join(os.path.abspath(basedir), "library")
|
||||||
_load_library_if_exists(play_library)
|
_load_library_if_exists(play_library)
|
||||||
|
|
||||||
if k in delegate_map:
|
if k in delegate_map and v:
|
||||||
if v:
|
v = template(
|
||||||
v = template(
|
os.path.abspath(basedir),
|
||||||
os.path.abspath(basedir),
|
v,
|
||||||
v,
|
{"playbook_dir": os.path.abspath(basedir)},
|
||||||
dict(playbook_dir=os.path.abspath(basedir)),
|
fail_on_undefined=False,
|
||||||
fail_on_undefined=False
|
)
|
||||||
)
|
return delegate_map[k](basedir, k, v, parent_type)
|
||||||
return delegate_map[k](basedir, k, v, parent_type)
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def _include_children(basedir, k, v, parent_type):
|
def _include_children(basedir, k, v, parent_type):
|
||||||
# handle include: filename.yml tags=blah
|
# handle include: filename.yml tags=blah
|
||||||
(command, args, kwargs) = tokenize("{0}: {1}".format(k, v))
|
(command, args, kwargs) = tokenize(f"{k}: {v}")
|
||||||
|
|
||||||
result = path_dwim(basedir, args[0])
|
result = path_dwim(basedir, args[0])
|
||||||
if not os.path.exists(result) and not basedir.endswith("tasks"):
|
if not os.path.exists(result) and not basedir.endswith("tasks"):
|
||||||
@ -271,18 +250,20 @@ def _taskshandlers_children(basedir, k, v, parent_type):
|
|||||||
results.extend(
|
results.extend(
|
||||||
_roles_children(
|
_roles_children(
|
||||||
basedir,
|
basedir,
|
||||||
k, [th["import_role"].get("name")],
|
k,
|
||||||
|
[th["import_role"].get("name")],
|
||||||
parent_type,
|
parent_type,
|
||||||
main=th["import_role"].get("tasks_from", "main")
|
main=th["import_role"].get("tasks_from", "main"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif "include_role" in th:
|
elif "include_role" in th:
|
||||||
results.extend(
|
results.extend(
|
||||||
_roles_children(
|
_roles_children(
|
||||||
basedir,
|
basedir,
|
||||||
k, [th["include_role"].get("name")],
|
k,
|
||||||
|
[th["include_role"].get("name")],
|
||||||
parent_type,
|
parent_type,
|
||||||
main=th["include_role"].get("tasks_from", "main")
|
main=th["include_role"].get("tasks_from", "main"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif "block" in th:
|
elif "block" in th:
|
||||||
@ -298,14 +279,11 @@ def append_children(taskhandler, basedir, k, parent_type, results):
|
|||||||
# when taskshandlers_children is called for playbooks, the
|
# when taskshandlers_children is called for playbooks, the
|
||||||
# actual type of the included tasks is the section containing the
|
# actual type of the included tasks is the section containing the
|
||||||
# include, e.g. tasks, pre_tasks, or handlers.
|
# include, e.g. tasks, pre_tasks, or handlers.
|
||||||
if parent_type == "playbook":
|
playbook_section = k if parent_type == "playbook" else parent_type
|
||||||
playbook_section = k
|
|
||||||
else:
|
|
||||||
playbook_section = parent_type
|
|
||||||
results.append({"path": path_dwim(basedir, taskhandler), "type": playbook_section})
|
results.append({"path": path_dwim(basedir, taskhandler), "type": playbook_section})
|
||||||
|
|
||||||
|
|
||||||
def _roles_children(basedir, k, v, parent_type, main="main"):
|
def _roles_children(basedir, k, v, parent_type, main="main"): # noqa
|
||||||
results = []
|
results = []
|
||||||
for role in v:
|
for role in v:
|
||||||
if isinstance(role, dict):
|
if isinstance(role, dict):
|
||||||
@ -317,10 +295,7 @@ def _roles_children(basedir, k, v, parent_type, main="main"):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise SystemExit(
|
raise SystemExit(f"role dict {role} does not contain a 'role' or 'name' key")
|
||||||
"role dict {0} does not contain a 'role' "
|
|
||||||
"or 'name' key".format(role)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
results.extend(_look_for_role_files(basedir, role, main=main))
|
results.extend(_look_for_role_files(basedir, role, main=main))
|
||||||
return results
|
return results
|
||||||
@ -340,7 +315,7 @@ def _rolepath(basedir, role):
|
|||||||
path_dwim(basedir, role),
|
path_dwim(basedir, role),
|
||||||
# if included from roles/[role]/meta/main.yml
|
# if included from roles/[role]/meta/main.yml
|
||||||
path_dwim(basedir, os.path.join("..", "..", "..", "roles", role)),
|
path_dwim(basedir, os.path.join("..", "..", "..", "roles", role)),
|
||||||
path_dwim(basedir, os.path.join("..", "..", role))
|
path_dwim(basedir, os.path.join("..", "..", role)),
|
||||||
]
|
]
|
||||||
|
|
||||||
if constants.DEFAULT_ROLES_PATH:
|
if constants.DEFAULT_ROLES_PATH:
|
||||||
@ -382,93 +357,114 @@ def rolename(filepath):
|
|||||||
idx = filepath.find("roles/")
|
idx = filepath.find("roles/")
|
||||||
if idx < 0:
|
if idx < 0:
|
||||||
return ""
|
return ""
|
||||||
role = filepath[idx + 6:]
|
role = filepath[idx + 6 :]
|
||||||
role = role[:role.find("/")]
|
return role[: role.find("/")]
|
||||||
return role
|
|
||||||
|
|
||||||
|
|
||||||
def _kv_to_dict(v):
|
def _kv_to_dict(v):
|
||||||
(command, args, kwargs) = tokenize(v)
|
(command, args, kwargs) = tokenize(v)
|
||||||
return (dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs))
|
return dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def normalize_task(task, filename, custom_modules=[]):
|
def normalize_task(task, filename, custom_modules=None):
|
||||||
"""Ensure tasks have an action key and strings are converted to python objects."""
|
"""Ensure tasks have an action key and strings are converted to python objects."""
|
||||||
ansible_action_type = task.get("__ansible_action_type__", "task")
|
|
||||||
if "__ansible_action_type__" in task:
|
def _normalize(task, custom_modules):
|
||||||
del (task["__ansible_action_type__"])
|
if custom_modules is None:
|
||||||
|
custom_modules = []
|
||||||
|
|
||||||
|
normalized = {}
|
||||||
|
ansible_parsed_keys = ("action", "local_action", "args", "delegate_to")
|
||||||
|
|
||||||
|
if is_nested_task(task):
|
||||||
|
_extract_ansible_parsed_keys_from_task(normalized, task, ansible_parsed_keys)
|
||||||
|
# Add dummy action for block/always/rescue statements
|
||||||
|
normalized["action"] = {
|
||||||
|
"__ansible_module__": "block/always/rescue",
|
||||||
|
"__ansible_module_original__": "block/always/rescue",
|
||||||
|
"__ansible_arguments__": "block/always/rescue",
|
||||||
|
}
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
|
||||||
|
builtin = list(set(builtin + custom_modules))
|
||||||
|
ansible.parsing.mod_args.BUILTIN_TASKS = frozenset(builtin)
|
||||||
|
mod_arg_parser = ModuleArgsParser(task)
|
||||||
|
|
||||||
|
try:
|
||||||
|
action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
|
||||||
|
except AnsibleParserError as e:
|
||||||
|
raise LaterAnsibleError(e) from e
|
||||||
|
|
||||||
|
# denormalize shell -> command conversion
|
||||||
|
if "_uses_shell" in arguments:
|
||||||
|
action = "shell"
|
||||||
|
del arguments["_uses_shell"]
|
||||||
|
|
||||||
|
for k, v in list(task.items()):
|
||||||
|
if k in ansible_parsed_keys or k == action:
|
||||||
|
# we don"t want to re-assign these values, which were
|
||||||
|
# determined by the ModuleArgsParser() above
|
||||||
|
continue
|
||||||
|
|
||||||
|
normalized[k] = v
|
||||||
|
|
||||||
|
# convert builtin fqn calls to short forms because most rules know only
|
||||||
|
# about short calls
|
||||||
|
normalized["action"] = {
|
||||||
|
"__ansible_module__": action.removeprefix("ansible.builtin."),
|
||||||
|
"__ansible_module_original__": action,
|
||||||
|
}
|
||||||
|
|
||||||
|
if "_raw_params" in arguments:
|
||||||
|
normalized["action"]["__ansible_arguments__"] = (
|
||||||
|
arguments["_raw_params"].strip().split()
|
||||||
|
)
|
||||||
|
del arguments["_raw_params"]
|
||||||
|
else:
|
||||||
|
normalized["action"]["__ansible_arguments__"] = []
|
||||||
|
normalized["action"].update(arguments)
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
|
||||||
# temp. extract metadata
|
# temp. extract metadata
|
||||||
ansible_meta = dict()
|
ansible_meta = {}
|
||||||
for key in ["__line__", "__file__", "__ansible_action_meta__"]:
|
for key in ["__line__", "__file__", "__ansible_action_meta__"]:
|
||||||
default = None
|
default = None
|
||||||
|
|
||||||
if key == "__ansible_action_meta__":
|
if key == "__ansible_action_meta__":
|
||||||
default = dict()
|
default = {}
|
||||||
|
|
||||||
ansible_meta[key] = task.pop(key, default)
|
ansible_meta[key] = task.pop(key, default)
|
||||||
|
|
||||||
normalized = dict()
|
ansible_action_type = task.get("__ansible_action_type__", "task")
|
||||||
|
if "__ansible_action_type__" in task:
|
||||||
|
del task["__ansible_action_type__"]
|
||||||
|
|
||||||
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
|
normalized = _normalize(task, custom_modules)
|
||||||
builtin = list(set(builtin + custom_modules))
|
|
||||||
ansible.parsing.mod_args.BUILTIN_TASKS = frozenset(builtin)
|
|
||||||
mod_arg_parser = ModuleArgsParser(task)
|
|
||||||
|
|
||||||
try:
|
|
||||||
action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
|
|
||||||
except AnsibleParserError as e:
|
|
||||||
raise LaterAnsibleError("syntax error", e)
|
|
||||||
|
|
||||||
# denormalize shell -> command conversion
|
|
||||||
if "_uses_shell" in arguments:
|
|
||||||
action = "shell"
|
|
||||||
del (arguments["_uses_shell"])
|
|
||||||
|
|
||||||
for (k, v) in list(task.items()):
|
|
||||||
if k in ("action", "local_action", "args", "delegate_to") or k == action:
|
|
||||||
# we don"t want to re-assign these values, which were
|
|
||||||
# determined by the ModuleArgsParser() above
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
normalized[k] = v
|
|
||||||
|
|
||||||
normalized["action"] = dict(__ansible_module__=action)
|
|
||||||
|
|
||||||
if "_raw_params" in arguments:
|
|
||||||
normalized["action"]["__ansible_arguments__"] = arguments["_raw_params"].strip().split()
|
|
||||||
del (arguments["_raw_params"])
|
|
||||||
else:
|
|
||||||
normalized["action"]["__ansible_arguments__"] = list()
|
|
||||||
normalized["action"].update(arguments)
|
|
||||||
|
|
||||||
normalized[FILENAME_KEY] = filename
|
normalized[FILENAME_KEY] = filename
|
||||||
normalized["__ansible_action_type__"] = ansible_action_type
|
normalized["__ansible_action_type__"] = ansible_action_type
|
||||||
|
|
||||||
# add back extracted metadata
|
# add back extracted metadata
|
||||||
for (k, v) in ansible_meta.items():
|
for k, v in ansible_meta.items():
|
||||||
if v:
|
if v:
|
||||||
normalized[k] = v
|
normalized[k] = v
|
||||||
|
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
def action_tasks(yaml, file):
|
def action_tasks(yaml, candidate):
|
||||||
tasks = list()
|
tasks = []
|
||||||
if file["filetype"] in ["tasks", "handlers"]:
|
if candidate.filemeta in ["tasks", "handlers"]:
|
||||||
tasks = add_action_type(yaml, file["filetype"])
|
tasks = add_action_type(yaml, candidate.filemeta)
|
||||||
else:
|
else:
|
||||||
tasks.extend(extract_from_list(yaml, ["tasks", "handlers", "pre_tasks", "post_tasks"]))
|
tasks.extend(extract_from_list(yaml, ["tasks", "handlers", "pre_tasks", "post_tasks"]))
|
||||||
|
|
||||||
# Add sub-elements of block/rescue/always to tasks list
|
# Add sub-elements of block/rescue/always to tasks list
|
||||||
tasks.extend(extract_from_list(tasks, ["block", "rescue", "always"]))
|
tasks.extend(extract_from_list(tasks, ["block", "rescue", "always"]))
|
||||||
# Remove block/rescue/always elements from tasks list
|
|
||||||
block_rescue_always = ("block", "rescue", "always")
|
|
||||||
tasks[:] = [task for task in tasks if all(k not in task for k in block_rescue_always)]
|
|
||||||
|
|
||||||
allowed = ["include", "include_tasks", "import_playbook", "import_tasks"]
|
return tasks
|
||||||
|
|
||||||
return [task for task in tasks if set(allowed).isdisjoint(task.keys())]
|
|
||||||
|
|
||||||
|
|
||||||
def task_to_str(task):
|
def task_to_str(task):
|
||||||
@ -476,16 +472,19 @@ def task_to_str(task):
|
|||||||
if name:
|
if name:
|
||||||
return name
|
return name
|
||||||
action = task.get("action")
|
action = task.get("action")
|
||||||
args = " ".join([
|
args = " ".join(
|
||||||
u"{0}={1}".format(k, v)
|
[
|
||||||
for (k, v) in action.items()
|
f"{k}={v}"
|
||||||
if k not in ["__ansible_module__", "__ansible_arguments__"]
|
for (k, v) in action.items()
|
||||||
] + action.get("__ansible_arguments__"))
|
if k not in ["__ansible_module__", "__ansible_arguments__"]
|
||||||
return u"{0} {1}".format(action["__ansible_module__"], args)
|
]
|
||||||
|
+ action.get("__ansible_arguments__")
|
||||||
|
)
|
||||||
|
return "{} {}".format(action["__ansible_module__"], args)
|
||||||
|
|
||||||
|
|
||||||
def extract_from_list(blocks, candidates):
|
def extract_from_list(blocks, candidates):
|
||||||
results = list()
|
results = []
|
||||||
for block in blocks:
|
for block in blocks:
|
||||||
for candidate in candidates:
|
for candidate in candidates:
|
||||||
delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"]
|
delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"]
|
||||||
@ -494,18 +493,19 @@ def extract_from_list(blocks, candidates):
|
|||||||
meta_data = dict(block)
|
meta_data = dict(block)
|
||||||
for key in delete_meta_keys:
|
for key in delete_meta_keys:
|
||||||
meta_data.pop(key, None)
|
meta_data.pop(key, None)
|
||||||
results.extend(add_action_type(block[candidate], candidate, meta_data))
|
|
||||||
|
actions = add_action_type(block[candidate], candidate, meta_data)
|
||||||
|
|
||||||
|
results.extend(actions)
|
||||||
elif block[candidate] is not None:
|
elif block[candidate] is not None:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Key '{candidate}' defined, but bad value: '{block}'".format(
|
f"Key '{candidate}' defined, but bad value: '{block[candidate]!s}'"
|
||||||
candidate=candidate, block=str(block[candidate])
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def add_action_type(actions, action_type, action_meta=None):
|
def add_action_type(actions, action_type, action_meta=None):
|
||||||
results = list()
|
results = []
|
||||||
for action in actions:
|
for action in actions:
|
||||||
action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
|
action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
|
||||||
if action_meta:
|
if action_meta:
|
||||||
@ -533,7 +533,7 @@ def parse_yaml_linenumbers(data, filename):
|
|||||||
try:
|
try:
|
||||||
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
|
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
|
||||||
except yaml.constructor.ConstructorError as e:
|
except yaml.constructor.ConstructorError as e:
|
||||||
raise LaterError("syntax error", e)
|
raise LaterError("syntax error", e) from e
|
||||||
|
|
||||||
if hasattr(node, "__line__"):
|
if hasattr(node, "__line__"):
|
||||||
mapping[LINE_NUMBER_KEY] = node.__line__
|
mapping[LINE_NUMBER_KEY] = node.__line__
|
||||||
@ -548,11 +548,15 @@ def parse_yaml_linenumbers(data, filename):
|
|||||||
loader.compose_node = compose_node
|
loader.compose_node = compose_node
|
||||||
loader.construct_mapping = construct_mapping
|
loader.construct_mapping = construct_mapping
|
||||||
data = loader.get_single_data() or []
|
data = loader.get_single_data() or []
|
||||||
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
except (
|
||||||
raise LaterError("syntax error", e)
|
yaml.parser.ParserError,
|
||||||
except (yaml.composer.ComposerError) as e:
|
yaml.scanner.ScannerError,
|
||||||
e.problem = "{} {}".format(e.context, e.problem)
|
yaml.constructor.ConstructorError,
|
||||||
raise LaterError("syntax error", e)
|
) as e:
|
||||||
|
raise LaterError("syntax error", e) from e
|
||||||
|
except yaml.composer.ComposerError as e:
|
||||||
|
e.problem = f"{e.context} {e.problem}"
|
||||||
|
raise LaterError("syntax error", e) from e
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@ -577,14 +581,34 @@ def normalized_yaml(file, options):
|
|||||||
for line in removes:
|
for line in removes:
|
||||||
lines.remove(line)
|
lines.remove(line)
|
||||||
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
||||||
raise LaterError("syntax error", e)
|
raise LaterError("syntax error", e) from e
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
|
|
||||||
|
def is_nested_task(task):
|
||||||
|
"""Check if task includes block/always/rescue."""
|
||||||
|
# Cannot really trust the input
|
||||||
|
if isinstance(task, str):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return any(task.get(key) for key in ["block", "rescue", "always"])
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_ansible_parsed_keys_from_task(result, task, keys):
|
||||||
|
"""Return a dict with existing key in task."""
|
||||||
|
for k, v in list(task.items()):
|
||||||
|
if k in keys:
|
||||||
|
# we don't want to re-assign these values, which were
|
||||||
|
# determined by the ModuleArgsParser() above
|
||||||
|
continue
|
||||||
|
result[k] = v
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class UnsafeTag:
|
class UnsafeTag:
|
||||||
"""Handle custom yaml unsafe tag."""
|
"""Handle custom yaml unsafe tag."""
|
||||||
|
|
||||||
yaml_tag = u"!unsafe"
|
yaml_tag = "!unsafe"
|
||||||
|
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.unsafe = value
|
self.unsafe = value
|
||||||
@ -597,7 +621,7 @@ class UnsafeTag:
|
|||||||
class VaultTag:
|
class VaultTag:
|
||||||
"""Handle custom yaml vault tag."""
|
"""Handle custom yaml vault tag."""
|
||||||
|
|
||||||
yaml_tag = u"!vault"
|
yaml_tag = "!vault"
|
||||||
|
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.unsafe = value
|
self.unsafe = value
|
||||||
|
21
codecov.yml
21
codecov.yml
@ -1,21 +0,0 @@
|
|||||||
codecov:
|
|
||||||
require_ci_to_pass: true
|
|
||||||
coverage:
|
|
||||||
status:
|
|
||||||
project:
|
|
||||||
default:
|
|
||||||
target: auto
|
|
||||||
threshold: 5%
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
if_ci_failed: error
|
|
||||||
informational: false
|
|
||||||
only_pulls: false
|
|
||||||
patch:
|
|
||||||
default:
|
|
||||||
target: auto
|
|
||||||
threshold: 5%
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
if_ci_failed: error
|
|
||||||
only_pulls: false
|
|
@ -1,26 +0,0 @@
|
|||||||
FROM arm32v7/python:3.10-alpine@sha256:265f910efda896cd5b77fa115971ce29fc57bc0cb5ebc626358fcdd6d92ffac2
|
|
||||||
|
|
||||||
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
|
|
||||||
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
|
|
||||||
LABEL org.opencontainers.image.title="ansible-later"
|
|
||||||
LABEL org.opencontainers.image.url="https://ansible-later.geekdocs.de/"
|
|
||||||
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-later"
|
|
||||||
LABEL org.opencontainers.image.documentation="https://ansible-later.geekdocs.de/"
|
|
||||||
|
|
||||||
ENV PY_COLORS=1
|
|
||||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
|
||||||
|
|
||||||
ADD dist/ansible_later-*.whl /
|
|
||||||
|
|
||||||
RUN apk --update add --virtual .build-deps build-base libffi-dev musl-dev openssl-dev python3-dev cargo && \
|
|
||||||
apk --update add git && \
|
|
||||||
pip install --upgrade --no-cache-dir pip && \
|
|
||||||
pip install --no-cache-dir $(find / -name "ansible_later-*.whl")[ansible] && \
|
|
||||||
apk del .build-deps && \
|
|
||||||
rm -f ansible_later-*.whl && \
|
|
||||||
rm -rf /var/cache/apk/* && \
|
|
||||||
rm -rf /root/.cache/
|
|
||||||
|
|
||||||
USER root
|
|
||||||
CMD []
|
|
||||||
ENTRYPOINT ["/usr/local/bin/ansible-later"]
|
|
@ -1,26 +0,0 @@
|
|||||||
FROM arm64v8/python:3.10-alpine@sha256:31a946d193794be6fb888864b77118f4a0b70070ffb2fa5166a9f8f016884446
|
|
||||||
|
|
||||||
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
|
|
||||||
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
|
|
||||||
LABEL org.opencontainers.image.title="ansible-later"
|
|
||||||
LABEL org.opencontainers.image.url="https://ansible-later.geekdocs.de/"
|
|
||||||
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-later"
|
|
||||||
LABEL org.opencontainers.image.documentation="https://ansible-later.geekdocs.de/"
|
|
||||||
|
|
||||||
ENV PY_COLORS=1
|
|
||||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
|
||||||
|
|
||||||
ADD dist/ansible_later-*.whl /
|
|
||||||
|
|
||||||
RUN apk --update add --virtual .build-deps build-base libffi-dev musl-dev openssl-dev python3-dev cargo && \
|
|
||||||
apk --update add git && \
|
|
||||||
pip install --upgrade --no-cache-dir pip && \
|
|
||||||
pip install --no-cache-dir $(find / -name "ansible_later-*.whl")[ansible] && \
|
|
||||||
apk del .build-deps && \
|
|
||||||
rm -f ansible_later-*.whl && \
|
|
||||||
rm -rf /var/cache/apk/* && \
|
|
||||||
rm -rf /root/.cache/
|
|
||||||
|
|
||||||
USER root
|
|
||||||
CMD []
|
|
||||||
ENTRYPOINT ["/usr/local/bin/ansible-later"]
|
|
@ -1,24 +0,0 @@
|
|||||||
image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
|
|
||||||
{{#if build.tags}}
|
|
||||||
tags:
|
|
||||||
{{#each build.tags}}
|
|
||||||
- {{this}}
|
|
||||||
{{/each}}
|
|
||||||
{{/if}}
|
|
||||||
manifests:
|
|
||||||
- image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
|
|
||||||
platform:
|
|
||||||
architecture: amd64
|
|
||||||
os: linux
|
|
||||||
|
|
||||||
- image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
|
|
||||||
platform:
|
|
||||||
architecture: arm64
|
|
||||||
os: linux
|
|
||||||
variant: v8
|
|
||||||
|
|
||||||
- image: quay.io/thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
|
|
||||||
platform:
|
|
||||||
architecture: arm
|
|
||||||
os: linux
|
|
||||||
variant: v7
|
|
@ -1,24 +0,0 @@
|
|||||||
image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
|
|
||||||
{{#if build.tags}}
|
|
||||||
tags:
|
|
||||||
{{#each build.tags}}
|
|
||||||
- {{this}}
|
|
||||||
{{/each}}
|
|
||||||
{{/if}}
|
|
||||||
manifests:
|
|
||||||
- image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
|
|
||||||
platform:
|
|
||||||
architecture: amd64
|
|
||||||
os: linux
|
|
||||||
|
|
||||||
- image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
|
|
||||||
platform:
|
|
||||||
architecture: arm64
|
|
||||||
os: linux
|
|
||||||
variant: v8
|
|
||||||
|
|
||||||
- image: thegeeklab/ansible-later:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
|
|
||||||
platform:
|
|
||||||
architecture: arm
|
|
||||||
os: linux
|
|
||||||
variant: v7
|
|
@ -18,11 +18,17 @@ markup:
|
|||||||
startLevel: 1
|
startLevel: 1
|
||||||
|
|
||||||
params:
|
params:
|
||||||
|
description: >
|
||||||
|
ansible-later is a fast and extensible best practice scanner and linting tool for Ansible resources
|
||||||
|
to enforce a coding or best practice guideline.
|
||||||
|
images:
|
||||||
|
- "socialmedia2.png"
|
||||||
|
|
||||||
geekdocMenuBundle: true
|
geekdocMenuBundle: true
|
||||||
geekdocToC: 3
|
geekdocToC: 3
|
||||||
|
|
||||||
geekdocRepo: https://github.com/thegeeklab/ansible-later
|
geekdocRepo: https://github.com/thegeeklab/ansible-later
|
||||||
geekdocEditPath: edit/main/docs/content
|
geekdocEditPath: edit/main/docs
|
||||||
|
|
||||||
geekdocDateFormat: "Jan 2, 2006"
|
geekdocDateFormat: "Jan 2, 2006"
|
||||||
geekdocSearch: true
|
geekdocSearch: true
|
||||||
|
@ -2,13 +2,12 @@
|
|||||||
title: Documentation
|
title: Documentation
|
||||||
---
|
---
|
||||||
|
|
||||||
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-later?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-later)
|
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-later/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-later)
|
||||||
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later)
|
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-later)
|
||||||
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later)
|
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-later)
|
||||||
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
||||||
[![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
[![PyPI Status](https://img.shields.io/pypi/status/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
||||||
[![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
[![PyPI Release](https://img.shields.io/pypi/v/ansible-later.svg)](https://pypi.org/project/ansible-later/)
|
||||||
[![Codecov](https://img.shields.io/codecov/c/github/thegeeklab/ansible-later)](https://codecov.io/gh/thegeeklab/ansible-later)
|
|
||||||
[![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors)
|
[![GitHub contributors](https://img.shields.io/github/contributors/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/graphs/contributors)
|
||||||
[![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later)
|
[![Source: GitHub](https://img.shields.io/badge/source-github-blue.svg?logo=github&logoColor=white)](https://github.com/thegeeklab/ansible-later)
|
||||||
[![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE)
|
[![License: MIT](https://img.shields.io/github/license/thegeeklab/ansible-later)](https://github.com/thegeeklab/ansible-later/blob/main/LICENSE)
|
||||||
|
@ -1,18 +1,17 @@
|
|||||||
---
|
---
|
||||||
title: Minimal standard checks
|
title: Write a rule
|
||||||
---
|
---
|
||||||
|
|
||||||
A typical standards check will look like:
|
A typical rule check will look like:
|
||||||
|
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
<!-- spellchecker-disable -->
|
<!-- spellchecker-disable -->
|
||||||
{{< highlight Python "linenos=table" >}}
|
{{< highlight Python "linenos=table" >}}
|
||||||
class CheckBecomeUser(StandardBase):
|
class CheckBecomeUser(RuleBase):
|
||||||
|
|
||||||
sid = "ANSIBLE0015"
|
rid = "ANS115"
|
||||||
description = "Become should be combined with become_user"
|
description = "Become should be combined with become_user"
|
||||||
helptext = "the task has `become` enabled but `become_user` is missing"
|
helptext = "the task has `become` enabled but `become_user` is missing"
|
||||||
version = "0.1"
|
|
||||||
types = ["playbook", "task", "handler"]
|
types = ["playbook", "task", "handler"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
@ -13,4 +13,4 @@ Changes can be made in a YAML configuration file or via CLI options, which are p
|
|||||||
|
|
||||||
Please note that YAML attributes are overwritten while YAML lists are merged in any configuration files.
|
Please note that YAML attributes are overwritten while YAML lists are merged in any configuration files.
|
||||||
|
|
||||||
To simplify single file linting, e.g. for debugging purposes, ansible-later ignores the `exclude_files` and `ignore_dotfiles` options when only one file is passed to the CLI.
|
To simplify the linting of individual files, e.g. for debugging purposes, ansible-later ignores the `exclude_files` and `ignore_dotfiles` options when files are passed to the CLI.
|
||||||
|
@ -8,28 +8,27 @@ You can get all available CLI options by running `ansible-later --help`:
|
|||||||
<!-- spellchecker-disable -->
|
<!-- spellchecker-disable -->
|
||||||
{{< highlight Shell "linenos=table" >}}
|
{{< highlight Shell "linenos=table" >}}
|
||||||
$ ansible-later --help
|
$ ansible-later --help
|
||||||
usage: ansible-later [-h] [-c CONFIG_FILE] [-r RULES.STANDARDS]
|
usage: ansible-later [-h] [-c CONFIG] [-r DIR] [-B] [-i TAGS] [-x TAGS] [-v] [-q] [-V] [rules.files ...]
|
||||||
[-s RULES.FILTER] [-v] [-q] [--version]
|
|
||||||
[rules.files [rules.files ...]]
|
|
||||||
|
|
||||||
Validate Ansible files against best practice guideline
|
Validate Ansible files against best practice guideline
|
||||||
|
|
||||||
positional arguments:
|
positional arguments:
|
||||||
rules.files
|
rules.files
|
||||||
|
|
||||||
optional arguments:
|
options:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
-c CONFIG_FILE, --config CONFIG_FILE
|
-c CONFIG, --config CONFIG
|
||||||
location of configuration file
|
path to configuration file
|
||||||
-r RULES.STANDARDS, --rules RULES.STANDARDS
|
-r DIR, --rules-dir DIR
|
||||||
location of standards rules
|
directory of rules
|
||||||
-s RULES.FILTER, --standards RULES.FILTER
|
-B, --no-builtin disables built-in rules
|
||||||
limit standards to given ID's
|
-i TAGS, --include-rules TAGS
|
||||||
-x RULES.EXCLUDE_FILTER, --exclude-standards RULES.EXCLUDE_FILTER
|
limit rules to given id/tags
|
||||||
exclude standards by given ID's
|
-x TAGS, --exclude-rules TAGS
|
||||||
|
exclude rules by given it/tags
|
||||||
-v increase log level
|
-v increase log level
|
||||||
-q decrease log level
|
-q decrease log level
|
||||||
--version show program's version number and exit
|
-V, --version show program's version number and exit
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
<!-- spellchecker-enable -->
|
<!-- spellchecker-enable -->
|
||||||
<!-- prettier-ignore-end -->
|
<!-- prettier-ignore-end -->
|
||||||
|
@ -11,37 +11,37 @@ The default configuration is used if no other value is specified. Each option ca
|
|||||||
---
|
---
|
||||||
ansible:
|
ansible:
|
||||||
# Add the name of used custom Ansible modules. Otherwise ansible-later
|
# Add the name of used custom Ansible modules. Otherwise ansible-later
|
||||||
# can't detect unknown modules and will through an error.
|
# can't detect unknown modules and will throw an error.
|
||||||
# Modules which are bundled with the role and placed in a './library'
|
# Modules which are bundled with the role and placed in a './library'
|
||||||
# directory will be auto-detected and don't need to be added to this list.
|
# directory will be auto-detected and don't need to be added to this list.
|
||||||
custom_modules: []
|
custom_modules: []
|
||||||
|
|
||||||
# Settings for variable formatting rule (ANSIBLE0004)
|
# Settings for variable formatting rule (ANS104)
|
||||||
double-braces:
|
double-braces:
|
||||||
max-spaces-inside: 1
|
max-spaces-inside: 1
|
||||||
min-spaces-inside: 1
|
min-spaces-inside: 1
|
||||||
|
|
||||||
# List of allowed literal bools (ANSIBLE0014)
|
# List of allowed literal bools (ANS114)
|
||||||
literal-bools:
|
literal-bools:
|
||||||
- "True"
|
- "True"
|
||||||
- "False"
|
- "False"
|
||||||
- "yes"
|
- "yes"
|
||||||
- "no"
|
- "no"
|
||||||
|
|
||||||
# List of modules that don't need to be named (ANSIBLE0006).
|
# List of modules that don't need to be named (ANS106).
|
||||||
# You must specify each individual module name, globs or wildcards do not work!
|
# You must specify each individual module name, globs or wildcards do not work!
|
||||||
named-task:
|
named-task:
|
||||||
exclude:
|
exclude:
|
||||||
- "meta"
|
- "meta"
|
||||||
- "debug"
|
- "debug"
|
||||||
- "block"
|
- "block/always/rescue"
|
||||||
- "include_role"
|
- "include_role"
|
||||||
- "include_tasks"
|
- "include_tasks"
|
||||||
- "include_vars"
|
- "include_vars"
|
||||||
- "import_role"
|
- "import_role"
|
||||||
- "import_tasks"
|
- "import_tasks"
|
||||||
|
|
||||||
# List of modules that are allowed to use the key=value format instead of the native YAML format (LINT0008).
|
# List of modules that are allowed to use the key=value format instead of the native YAML format (YML108).
|
||||||
# You must specify each individual module name, globs or wildcards do not work!
|
# You must specify each individual module name, globs or wildcards do not work!
|
||||||
native-yaml:
|
native-yaml:
|
||||||
exclude: []
|
exclude: []
|
||||||
@ -58,8 +58,8 @@ logging:
|
|||||||
|
|
||||||
# Global settings for all defined rules
|
# Global settings for all defined rules
|
||||||
rules:
|
rules:
|
||||||
# Disable build-in rules if required
|
# Disable built-in rules if required
|
||||||
buildin: True
|
builtin: True
|
||||||
|
|
||||||
# List of files to exclude
|
# List of files to exclude
|
||||||
exclude_files: []
|
exclude_files: []
|
||||||
@ -75,21 +75,17 @@ rules:
|
|||||||
exclude_filter: []
|
exclude_filter: []
|
||||||
|
|
||||||
# List of rule ID's that should be displayed as a warning instead of an error. By default,
|
# List of rule ID's that should be displayed as a warning instead of an error. By default,
|
||||||
# only rules whose version is higher than the current default version are marked as warnings.
|
# no rules are marked as warnings. This list allows to degrade errors to warnings for each rule.
|
||||||
# This list allows to degrade errors to warnings for each rule.
|
|
||||||
warning_filter:
|
warning_filter:
|
||||||
- "ANSIBLE9999"
|
- "ANS128"
|
||||||
|
- "ANS999"
|
||||||
|
|
||||||
# All dotfiles (including hidden folders) are excluded by default.
|
# All dotfiles (including hidden folders) are excluded by default.
|
||||||
# You can disable this setting and handle dotfiles by yourself with `exclude_files`.
|
# You can disable this setting and handle dotfiles by yourself with `exclude_files`.
|
||||||
ignore_dotfiles: True
|
ignore_dotfiles: True
|
||||||
|
|
||||||
# List of directories to load standard rules from (defaults to build-in)
|
# List of directories to load rules from (defaults to built-in)
|
||||||
standards: []
|
dir: []
|
||||||
|
|
||||||
# Standard version to use. Standard version set in a roles meta file
|
|
||||||
# or playbook will takes precedence.
|
|
||||||
version:
|
|
||||||
|
|
||||||
# Block to control included yamllint rules.
|
# Block to control included yamllint rules.
|
||||||
# See https://yamllint.readthedocs.io/en/stable/rules.html
|
# See https://yamllint.readthedocs.io/en/stable/rules.html
|
||||||
@ -99,6 +95,8 @@ yamllint:
|
|||||||
max-spaces-before: 0
|
max-spaces-before: 0
|
||||||
document-start:
|
document-start:
|
||||||
present: True
|
present: True
|
||||||
|
document-end:
|
||||||
|
present: True
|
||||||
empty-lines:
|
empty-lines:
|
||||||
max: 1
|
max: 1
|
||||||
max-end: 1
|
max-end: 1
|
||||||
|
21
docs/content/configuration/pre-commit.md
Normal file
21
docs/content/configuration/pre-commit.md
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
---
|
||||||
|
title: Pre-Commit setup
|
||||||
|
---
|
||||||
|
|
||||||
|
To use `ansible-later` with the [pre-commit](https://pre-commit.com/) framework, add the following to the `.pre-commit-config.yaml` file in your local repository.
|
||||||
|
|
||||||
|
<!-- prettier-ignore-start -->
|
||||||
|
<!-- markdownlint-disable -->
|
||||||
|
<!-- spellchecker-disable -->
|
||||||
|
|
||||||
|
{{< highlight yaml "linenos=table" >}}
|
||||||
|
- repo: https://github.com/thegeeklab/ansible-later
|
||||||
|
# change ref to the latest release from https://github.com/thegeeklab/ansible-later/releases
|
||||||
|
rev: v3.0.2
|
||||||
|
hooks:
|
||||||
|
- id: ansible-later
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
<!-- spellchecker-enable -->
|
||||||
|
<!-- markdownlint-restore -->
|
||||||
|
<!-- prettier-ignore-end -->
|
@ -2,43 +2,47 @@
|
|||||||
title: Included rules
|
title: Included rules
|
||||||
---
|
---
|
||||||
|
|
||||||
Reviews are useless without some rules or standards to check against. ansible-later comes with a set of built-in checks, which are explained in the following table.
|
Reviews are useless without some rules to check against. `ansible-later` comes with a set of built-in checks, which are explained in the following table.
|
||||||
|
|
||||||
| Rule | ID | Description | Parameter |
|
| Rule | ID | Description | Parameter |
|
||||||
| ----------------------------- | ----------- | ----------------------------------------------------------------- | ---------------------------------------------------------------------- |
|
| ----------------------------- | ------ | ----------------------------------------------------------------- | -------------------------------------------------------------------------- |
|
||||||
| CheckYamlEmptyLines | LINT0001 | YAML should not contain unnecessarily empty lines. | {max: 1, max-start: 0, max-end: 1} |
|
| CheckYamlEmptyLines | YML101 | YAML should not contain unnecessarily empty lines. | {max: 1, max-start: 0, max-end: 1} |
|
||||||
| CheckYamlIndent | LINT0002 | YAML should be correctly indented. | {spaces: 2, check-multi-line-strings: false, indent-sequences: true} |
|
| CheckYamlIndent | YML102 | YAML should be correctly indented. | {spaces: 2, check-multi-line-strings: false, indent-sequences: true} |
|
||||||
| CheckYamlHyphens | LINT0003 | YAML should use consistent number of spaces after hyphens (-). | {max-spaces-after: 1} |
|
| CheckYamlHyphens | YML103 | YAML should use consistent number of spaces after hyphens (-). | {max-spaces-after: 1} |
|
||||||
| CheckYamlDocumentStart | LINT0004 | YAML should contain document start marker. | {document-start: {present: true}} |
|
| CheckYamlDocumentStart | YML104 | YAML should contain document start marker. | {document-start: {present: true}} |
|
||||||
| CheckYamlColons | LINT0005 | YAML should use consistent number of spaces around colons. | {colons: {max-spaces-before: 0, max-spaces-after: 1}} |
|
| CheckYamlColons | YML105 | YAML should use consistent number of spaces around colons. | {colons: {max-spaces-before: 0, max-spaces-after: 1}} |
|
||||||
| CheckYamlFile | LINT0006 | Roles file should be in YAML format. | |
|
| CheckYamlFile | YML106 | Roles file should be in YAML format. | |
|
||||||
| CheckYamlHasContent | LINT0007 | Files should contain useful content. | |
|
| CheckYamlHasContent | YML107 | Files should contain useful content. | |
|
||||||
| CheckNativeYaml | LINT0008 | Use YAML format for tasks and handlers rather than key=value. | {native-yaml: {exclude: []}} |
|
| CheckNativeYaml | YML108 | Use YAML format for tasks and handlers rather than key=value. | {native-yaml: {exclude: []}} |
|
||||||
| CheckYamlDocumentEnd | LINT0009 | YAML should contain document end marker. | {document-end: {present: true}} |
|
| CheckYamlDocumentEnd | YML109 | YAML should contain document end marker. | {document-end: {present: true}} |
|
||||||
| CheckTaskSeparation | ANSIBLE0001 | Single tasks should be separated by an empty line. | |
|
| CheckYamlOctalValues | YML110 | YAML should not use forbidden implicit or explicit octal value. | {octal-values: {forbid-implicit-octal: true, forbid-explicit-octal: true}} |
|
||||||
| CheckMetaMain | ANSIBLE0002 | Meta file should contain a basic subset of parameters. | author, description, min_ansible_version, platforms, dependencies |
|
| CheckTaskSeparation | ANS101 | Single tasks should be separated by an empty line. | |
|
||||||
| CheckUniqueNamedTask | ANSIBLE0003 | Tasks and handlers must be uniquely named within a file. | |
|
| CheckMetaMain | ANS102 | Meta file should contain a basic subset of parameters. | author, description, min_ansible_version, platforms, dependencies |
|
||||||
| CheckBraces | ANSIBLE0004 | YAML should use consistent number of spaces around variables. | {double-braces: max-spaces-inside: 1, min-spaces-inside: 1} |
|
| CheckUniqueNamedTask | ANS103 | Tasks and handlers must be uniquely named within a file. | |
|
||||||
| CheckScmInSrc | ANSIBLE0005 | Use SCM key rather than `src: scm+url` in requirements file. | |
|
| CheckBraces | ANS104 | YAML should use consistent number of spaces around variables. | {double-braces: max-spaces-inside: 1, min-spaces-inside: 1} |
|
||||||
| CheckNamedTask | ANSIBLE0006 | Tasks and handlers must be named. | {named-task: {exclude: [meta, debug, block, include\_\*, import\_\*]}} |
|
| CheckScmInSrc | ANS105 | Use SCM key rather than `src: scm+url` in requirements file. | |
|
||||||
| CheckNameFormat | ANSIBLE0007 | Name of tasks and handlers must be formatted. | formats: first letter capital |
|
| CheckNamedTask | ANS106 | Tasks and handlers must be named. | {named-task: {exclude: [meta, debug, block, include\_\*, import\_\*]}} |
|
||||||
| CheckCommandInsteadofModule | ANSIBLE0008 | Commands should not be used in place of modules. | |
|
| CheckNameFormat | ANS107 | Name of tasks and handlers must be formatted. | formats: first letter capital |
|
||||||
| CheckInstallUseLatest | ANSIBLE0009 | Package managers should not install with state=latest. | |
|
| CheckCommandInsteadofModule | ANS108 | Commands should not be used in place of modules. | |
|
||||||
| CheckShellInsteadCommand | ANSIBLE0010 | Use Shell only when piping, redirecting or chaining commands. | |
|
| CheckInstallUseLatest | ANS109 | Package managers should not install with state=latest. | |
|
||||||
| CheckCommandHasChanges | ANSIBLE0011 | Commands should be idempotent and only used with some checks. | |
|
| CheckShellInsteadCommand | ANS110 | Use Shell only when piping, redirecting or chaining commands. | |
|
||||||
| CheckCompareToEmptyString | ANSIBLE0012 | Don't compare to "" - use `when: var` or `when: not var`. | |
|
| CheckCommandHasChanges | ANS111 | Commands should be idempotent and only used with some checks. | |
|
||||||
| CheckCompareToLiteralBool | ANSIBLE0013 | Don't compare to True/False - use `when: var` or `when: not var`. | |
|
| CheckCompareToEmptyString | ANS112 | Don't compare to "" - use `when: var` or `when: not var`. | |
|
||||||
| CheckLiteralBoolFormat | ANSIBLE0014 | Literal bools should be consistent. | {literal-bools: [True, False, yes, no]} |
|
| CheckCompareToLiteralBool | ANS113 | Don't compare to True/False - use `when: var` or `when: not var`. | |
|
||||||
| CheckBecomeUser | ANSIBLE0015 | Become should be combined with become_user. | |
|
| CheckLiteralBoolFormat | ANS114 | Literal bools should be consistent. | {literal-bools: [True, False, yes, no]} |
|
||||||
| CheckFilterSeparation | ANSIBLE0016 | Jinja2 filters should be separated with spaces. | |
|
| CheckBecomeUser | ANS115 | Become should be combined with become_user. | |
|
||||||
| CheckCommandInsteadOfArgument | ANSIBLE0017 | Commands should not be used in place of module arguments. | |
|
| CheckFilterSeparation | ANS116 | Jinja2 filters should be separated with spaces. | |
|
||||||
| CheckFilePermissionMissing | ANSIBLE0018 | File permissions unset or incorrect. | |
|
| CheckCommandInsteadOfArgument | ANS117 | Commands should not be used in place of module arguments. | |
|
||||||
| CheckFilePermissionOctal | ANSIBLE0019 | Octal file permissions must contain leading zero or be a string. | |
|
| CheckFilePermissionMissing | ANS118 | File permissions unset or incorrect. | |
|
||||||
| CheckGitHasVersion | ANSIBLE0020 | Git checkouts should use explicit version. | |
|
| CheckFilePermissionOctal | ANS119 | Octal file permissions must contain leading zero or be a string. | |
|
||||||
| CheckMetaChangeFromDefault | ANSIBLE0021 | Roles meta/main.yml default values should be changed. | |
|
| CheckGitHasVersion | ANS120 | Git checkouts should use explicit version. | |
|
||||||
| CheckWhenFormat | ANSIBLE0022 | Don't use Jinja2 in `when`. | |
|
| CheckMetaChangeFromDefault | ANS121 | Roles meta/main.yml default values should be changed. | |
|
||||||
| CheckNestedJinja | ANSIBLE0023 | Don't use nested Jinja2 pattern. | |
|
| CheckWhenFormat | ANS122 | Don't use Jinja2 in `when`. | |
|
||||||
| CheckLocalAction | ANSIBLE0024 | Don't use local_action. | |
|
| CheckNestedJinja | ANS123 | Don't use nested Jinja2 pattern. | |
|
||||||
| CheckRelativeRolePaths | ANSIBLE0025 | Don't use a relative path in a role. | |
|
| CheckLocalAction | ANS124 | Don't use local_action. | |
|
||||||
| CheckChangedInWhen | ANSIBLE0026 | Use handlers instead of `when: changed`. | |
|
| CheckRelativeRolePaths | ANS125 | Don't use a relative path in a role. | |
|
||||||
| CheckDeprecated | ANSIBLE9999 | Deprecated features of `ansible-later` should not be used. | |
|
| CheckChangedInWhen | ANS126 | Use handlers instead of `when: changed`. | |
|
||||||
|
| CheckChangedInWhen | ANS127 | Deprecated bare variables in loops must not be used. | |
|
||||||
|
| CheckFQCNBuiltin | ANS128 | Module actions should use full qualified collection names. | |
|
||||||
|
| CheckFQCNBuiltin | ANS129 | Check optimized playbook/tasks key order. | |
|
||||||
|
| CheckDeprecated | ANS999 | Deprecated features of `ansible-later` should not be used. | |
|
||||||
|
@ -23,5 +23,5 @@ main:
|
|||||||
sub:
|
sub:
|
||||||
- name: Candidates
|
- name: Candidates
|
||||||
ref: "/build_rules/candidates"
|
ref: "/build_rules/candidates"
|
||||||
- name: Standards checks
|
- name: Rules
|
||||||
ref: "/build_rules/standards_check"
|
ref: "/build_rules/rule"
|
||||||
|
162
docs/static/socialmedia.svg
vendored
Normal file
162
docs/static/socialmedia.svg
vendored
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 20 KiB |
BIN
docs/static/socialmedia2.png
vendored
Normal file
BIN
docs/static/socialmedia2.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 28 KiB |
1621
poetry.lock
generated
1621
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
147
pyproject.toml
147
pyproject.toml
@ -10,64 +10,40 @@ classifiers = [
|
|||||||
"Natural Language :: English",
|
"Natural Language :: English",
|
||||||
"Operating System :: POSIX",
|
"Operating System :: POSIX",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Programming Language :: Python :: 3.7",
|
|
||||||
"Programming Language :: Python :: 3.8",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
"Programming Language :: Python :: 3.9",
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
"Topic :: Utilities",
|
"Topic :: Utilities",
|
||||||
"Topic :: Software Development",
|
"Topic :: Software Development",
|
||||||
]
|
]
|
||||||
description = "Reviews ansible playbooks, roles and inventories and suggests improvements."
|
description = "Reviews ansible playbooks, roles and inventories and suggests improvements."
|
||||||
documentation = "https://ansible-later.geekdocs.de/"
|
documentation = "https://ansible-later.geekdocs.de/"
|
||||||
homepage = "https://ansible-later.geekdocs.de/"
|
homepage = "https://ansible-later.geekdocs.de/"
|
||||||
include = [
|
include = ["LICENSE"]
|
||||||
"LICENSE",
|
|
||||||
]
|
|
||||||
keywords = ["ansible", "code", "review"]
|
keywords = ["ansible", "code", "review"]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "ansible-later"
|
name = "ansible-later"
|
||||||
packages = [
|
packages = [{ include = "ansiblelater" }]
|
||||||
{include = "ansiblelater"},
|
|
||||||
]
|
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
repository = "https://github.com/thegeeklab/ansible-later/"
|
repository = "https://github.com/thegeeklab/ansible-later/"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
PyYAML = "6.0"
|
PyYAML = "6.0.2"
|
||||||
ansible = {version = "4.8.0", optional = true}
|
ansible-core = { version = "2.14.17", optional = true }
|
||||||
ansible-core = {version = "2.11.6", optional = true}
|
ansible = { version = "7.7.0", optional = true }
|
||||||
anyconfig = "0.12.0"
|
anyconfig = "0.14.0"
|
||||||
appdirs = "1.4.4"
|
appdirs = "1.4.4"
|
||||||
colorama = "0.4.4"
|
colorama = "0.4.6"
|
||||||
flake8 = "4.0.1"
|
jsonschema = "4.23.0"
|
||||||
jsonschema = "4.2.1"
|
nested-lookup = "0.2.25"
|
||||||
nested-lookup = "0.2.23"
|
pathspec = "0.12.1"
|
||||||
pathspec = "0.9.0"
|
python = "^3.9.0"
|
||||||
python = "^3.7.0"
|
python-json-logger = "2.0.7"
|
||||||
python-json-logger = "2.0.2"
|
toolz = "1.0.0"
|
||||||
toolz = "0.11.2"
|
unidiff = "0.7.5"
|
||||||
unidiff = "0.7.0"
|
yamllint = "1.35.1"
|
||||||
yamllint = "1.26.3"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
|
||||||
bandit = "1.7.1"
|
|
||||||
flake8-blind-except = "0.2.0"
|
|
||||||
flake8-builtins = "1.5.3"
|
|
||||||
flake8-docstrings = "1.6.0"
|
|
||||||
flake8-eradicate = "1.2.0"
|
|
||||||
flake8-isort = "4.1.1"
|
|
||||||
flake8-logging-format = "0.6.0"
|
|
||||||
flake8-pep3101 = "1.3.0"
|
|
||||||
flake8-polyfill = "1.0.2"
|
|
||||||
flake8-quotes = "3.3.1"
|
|
||||||
pep8-naming = "0.12.1"
|
|
||||||
pydocstyle = "6.1.1"
|
|
||||||
pytest = "6.2.5"
|
|
||||||
pytest-cov = "3.0.0"
|
|
||||||
pytest-mock = "3.6.1"
|
|
||||||
tomli = "1.2.2"
|
|
||||||
yapf = "0.31.0"
|
|
||||||
|
|
||||||
[tool.poetry.extras]
|
[tool.poetry.extras]
|
||||||
ansible = ["ansible"]
|
ansible = ["ansible"]
|
||||||
@ -76,23 +52,23 @@ ansible-core = ["ansible-core"]
|
|||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
ansible-later = "ansiblelater.__main__:main"
|
ansible-later = "ansiblelater.__main__:main"
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
ruff = "0.7.2"
|
||||||
|
pytest = "8.3.3"
|
||||||
|
pytest-mock = "3.14.0"
|
||||||
|
pytest-cov = "6.0.0"
|
||||||
|
toml = "0.10.2"
|
||||||
|
|
||||||
[tool.poetry-dynamic-versioning]
|
[tool.poetry-dynamic-versioning]
|
||||||
enable = true
|
enable = true
|
||||||
style = "semver"
|
style = "semver"
|
||||||
vcs = "git"
|
vcs = "git"
|
||||||
|
|
||||||
[tool.isort]
|
|
||||||
default_section = "THIRDPARTY"
|
|
||||||
force_single_line = true
|
|
||||||
line_length = 99
|
|
||||||
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
|
|
||||||
skip_glob = ["**/.env*", "**/env/*", "**/.venv/*", "**/docs/*"]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
addopts = "ansiblelater --cov=ansiblelater --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
|
addopts = "ansiblelater --cov=ansiblelater --cov-report=xml:coverage.xml --cov-report=term --no-cov-on-fail"
|
||||||
filterwarnings = [
|
filterwarnings = [
|
||||||
"ignore::FutureWarning",
|
"ignore::FutureWarning",
|
||||||
"ignore:.*collections.*:DeprecationWarning",
|
"ignore::DeprecationWarning",
|
||||||
"ignore:.*pep8.*:FutureWarning",
|
"ignore:.*pep8.*:FutureWarning",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -100,5 +76,74 @@ filterwarnings = [
|
|||||||
omit = ["**/test/*"]
|
omit = ["**/test/*"]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry_dynamic_versioning.backend"
|
||||||
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
|
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
exclude = [
|
||||||
|
".git",
|
||||||
|
"__pycache__",
|
||||||
|
"build",
|
||||||
|
"dist",
|
||||||
|
"test",
|
||||||
|
"*.pyc",
|
||||||
|
"*.egg-info",
|
||||||
|
".cache",
|
||||||
|
".eggs",
|
||||||
|
"env*",
|
||||||
|
]
|
||||||
|
|
||||||
|
line-length = 99
|
||||||
|
indent-width = 4
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
# Explanation of errors
|
||||||
|
#
|
||||||
|
# D100: Missing docstring in public module
|
||||||
|
# D101: Missing docstring in public class
|
||||||
|
# D102: Missing docstring in public method
|
||||||
|
# D103: Missing docstring in public function
|
||||||
|
# D105: Missing docstring in magic method
|
||||||
|
# D107: Missing docstring in __init__
|
||||||
|
# D202: No blank lines allowed after function docstring
|
||||||
|
# D203: One blank line required before class docstring
|
||||||
|
# D212: Multi-line docstring summary should start at the first line
|
||||||
|
ignore = [
|
||||||
|
"D100",
|
||||||
|
"D101",
|
||||||
|
"D102",
|
||||||
|
"D103",
|
||||||
|
"D105",
|
||||||
|
"D107",
|
||||||
|
"D202",
|
||||||
|
"D203",
|
||||||
|
"D212",
|
||||||
|
"UP038",
|
||||||
|
"RUF012",
|
||||||
|
]
|
||||||
|
select = [
|
||||||
|
"D",
|
||||||
|
"E",
|
||||||
|
"F",
|
||||||
|
"Q",
|
||||||
|
"W",
|
||||||
|
"I",
|
||||||
|
"S",
|
||||||
|
"BLE",
|
||||||
|
"N",
|
||||||
|
"UP",
|
||||||
|
"B",
|
||||||
|
"A",
|
||||||
|
"C4",
|
||||||
|
"T20",
|
||||||
|
"SIM",
|
||||||
|
"RET",
|
||||||
|
"ARG",
|
||||||
|
"ERA",
|
||||||
|
"RUF",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.format]
|
||||||
|
quote-style = "double"
|
||||||
|
indent-style = "space"
|
||||||
|
line-ending = "lf"
|
||||||
|
@ -4,8 +4,14 @@
|
|||||||
"packageRules": [
|
"packageRules": [
|
||||||
{
|
{
|
||||||
"description": "Ansible base dependencies",
|
"description": "Ansible base dependencies",
|
||||||
"groupName": "ansible packages",
|
"matchPackageNames": ["ansible", "ansible-core"],
|
||||||
"matchPackageNames": ["ansible", "ansible-core"]
|
"separateMinorPatch": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"matchManagers": ["woodpecker"],
|
||||||
|
"matchFileNames": [".woodpecker/test.yml"],
|
||||||
|
"matchPackageNames": ["docker.io/library/python"],
|
||||||
|
"enabled": false
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
22
setup.cfg
22
setup.cfg
@ -1,22 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
# Explanation of errors
|
|
||||||
#
|
|
||||||
# D100: Missing docstring in public module
|
|
||||||
# D101: Missing docstring in public class
|
|
||||||
# D102: Missing docstring in public method
|
|
||||||
# D103: Missing docstring in public function
|
|
||||||
# D105: Missing docstring in magic method
|
|
||||||
# D107: Missing docstring in __init__
|
|
||||||
# D202: No blank lines allowed after function docstring
|
|
||||||
# W503:Line break occurred before a binary operator
|
|
||||||
ignore = D100, D101, D102, D103, D107, D202, W503
|
|
||||||
max-line-length = 99
|
|
||||||
inline-quotes = double
|
|
||||||
exclude = .git, __pycache__, build, dist, test, *.pyc, *.egg-info, .cache, .eggs, env*
|
|
||||||
|
|
||||||
[yapf]
|
|
||||||
based_on_style = google
|
|
||||||
column_limit = 99
|
|
||||||
dedent_closing_brackets = true
|
|
||||||
coalesce_brackets = true
|
|
||||||
split_before_logical_operator = true
|
|
Loading…
Reference in New Issue
Block a user