mirror of
https://github.com/thegeeklab/ansible-later.git
synced 2024-11-22 04:40:42 +00:00
refctor: migrate flake8 to ruff python linter (#540)
This commit is contained in:
parent
2115efec89
commit
b31d510156
@ -26,7 +26,7 @@ local PipelineLint = {
|
|||||||
},
|
},
|
||||||
steps: [
|
steps: [
|
||||||
{
|
{
|
||||||
name: 'yapf',
|
name: 'check-format',
|
||||||
image: 'python:3.11',
|
image: 'python:3.11',
|
||||||
environment: {
|
environment: {
|
||||||
PY_COLORS: 1,
|
PY_COLORS: 1,
|
||||||
@ -40,7 +40,7 @@ local PipelineLint = {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'flake8',
|
name: 'check-coding',
|
||||||
image: 'python:3.11',
|
image: 'python:3.11',
|
||||||
environment: {
|
environment: {
|
||||||
PY_COLORS: 1,
|
PY_COLORS: 1,
|
||||||
@ -49,7 +49,7 @@ local PipelineLint = {
|
|||||||
'git fetch -tq',
|
'git fetch -tq',
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
'pip install poetry poetry-dynamic-versioning -qq',
|
||||||
'poetry install -E ansible-core',
|
'poetry install -E ansible-core',
|
||||||
'poetry run flake8 ./ansiblelater',
|
'poetry run ruff ./ansiblelater',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@ -102,36 +102,6 @@ local PipelineTest = {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
local PipelineSecurity = {
|
|
||||||
kind: 'pipeline',
|
|
||||||
name: 'security',
|
|
||||||
platform: {
|
|
||||||
os: 'linux',
|
|
||||||
arch: 'amd64',
|
|
||||||
},
|
|
||||||
steps: [
|
|
||||||
{
|
|
||||||
name: 'bandit',
|
|
||||||
image: 'python:3.11',
|
|
||||||
environment: {
|
|
||||||
PY_COLORS: 1,
|
|
||||||
},
|
|
||||||
commands: [
|
|
||||||
'git fetch -tq',
|
|
||||||
'pip install poetry poetry-dynamic-versioning -qq',
|
|
||||||
'poetry install -E ansible-core',
|
|
||||||
'poetry run bandit -r ./ansiblelater -x ./ansiblelater/test',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
depends_on: [
|
|
||||||
'test',
|
|
||||||
],
|
|
||||||
trigger: {
|
|
||||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
local PipelineBuildPackage = {
|
local PipelineBuildPackage = {
|
||||||
kind: 'pipeline',
|
kind: 'pipeline',
|
||||||
name: 'build-package',
|
name: 'build-package',
|
||||||
@ -204,7 +174,7 @@ local PipelineBuildPackage = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
depends_on: [
|
depends_on: [
|
||||||
'security',
|
'test',
|
||||||
],
|
],
|
||||||
trigger: {
|
trigger: {
|
||||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||||
@ -289,7 +259,7 @@ local PipelineBuildContainer = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
depends_on: [
|
depends_on: [
|
||||||
'security',
|
'test',
|
||||||
],
|
],
|
||||||
trigger: {
|
trigger: {
|
||||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||||
@ -463,7 +433,6 @@ local PipelineNotifications = {
|
|||||||
[
|
[
|
||||||
PipelineLint,
|
PipelineLint,
|
||||||
PipelineTest,
|
PipelineTest,
|
||||||
PipelineSecurity,
|
|
||||||
PipelineBuildPackage,
|
PipelineBuildPackage,
|
||||||
PipelineBuildContainer,
|
PipelineBuildContainer,
|
||||||
PipelineDocs,
|
PipelineDocs,
|
||||||
|
40
.drone.yml
40
.drone.yml
@ -7,7 +7,7 @@ platform:
|
|||||||
arch: amd64
|
arch: amd64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: yapf
|
- name: check-format
|
||||||
image: python:3.11
|
image: python:3.11
|
||||||
commands:
|
commands:
|
||||||
- git fetch -tq
|
- git fetch -tq
|
||||||
@ -18,13 +18,13 @@ steps:
|
|||||||
environment:
|
environment:
|
||||||
PY_COLORS: 1
|
PY_COLORS: 1
|
||||||
|
|
||||||
- name: flake8
|
- name: check-coding
|
||||||
image: python:3.11
|
image: python:3.11
|
||||||
commands:
|
commands:
|
||||||
- git fetch -tq
|
- git fetch -tq
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
- poetry install -E ansible-core
|
- poetry install -E ansible-core
|
||||||
- poetry run flake8 ./ansiblelater
|
- poetry run ruff ./ansiblelater
|
||||||
environment:
|
environment:
|
||||||
PY_COLORS: 1
|
PY_COLORS: 1
|
||||||
|
|
||||||
@ -113,34 +113,6 @@ trigger:
|
|||||||
depends_on:
|
depends_on:
|
||||||
- lint
|
- lint
|
||||||
|
|
||||||
---
|
|
||||||
kind: pipeline
|
|
||||||
name: security
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: amd64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: bandit
|
|
||||||
image: python:3.11
|
|
||||||
commands:
|
|
||||||
- git fetch -tq
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run bandit -r ./ansiblelater -x ./ansiblelater/test
|
|
||||||
environment:
|
|
||||||
PY_COLORS: 1
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/tags/**
|
|
||||||
- refs/pull/**
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- test
|
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
name: build-package
|
name: build-package
|
||||||
@ -211,7 +183,7 @@ trigger:
|
|||||||
- refs/pull/**
|
- refs/pull/**
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- security
|
- test
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
@ -295,7 +267,7 @@ trigger:
|
|||||||
- refs/pull/**
|
- refs/pull/**
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- security
|
- test
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
@ -446,6 +418,6 @@ depends_on:
|
|||||||
|
|
||||||
---
|
---
|
||||||
kind: signature
|
kind: signature
|
||||||
hmac: 58eb57b7a150a51796fafff42c5f4aa7773c2ebaf57e180479f4278adf452574
|
hmac: 1bc7f62d74ce0afa031770f617ffda20b9719ed4489061c470476ca707d1275f
|
||||||
|
|
||||||
...
|
...
|
||||||
|
@ -5,9 +5,7 @@ import argparse
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from ansiblelater import LOG
|
from ansiblelater import LOG, __version__, logger
|
||||||
from ansiblelater import __version__
|
|
||||||
from ansiblelater import logger
|
|
||||||
from ansiblelater.candidate import Candidate
|
from ansiblelater.candidate import Candidate
|
||||||
from ansiblelater.settings import Settings
|
from ansiblelater.settings import Settings
|
||||||
from ansiblelater.standard import SingleStandards
|
from ansiblelater.standard import SingleStandards
|
||||||
@ -81,9 +79,9 @@ def main():
|
|||||||
if candidate.vault:
|
if candidate.vault:
|
||||||
LOG.info(f"Not reviewing vault file {filename}")
|
LOG.info(f"Not reviewing vault file {filename}")
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
LOG.info(f"Reviewing all of {candidate}")
|
LOG.info(f"Reviewing all of {candidate}")
|
||||||
tasks.append(candidate)
|
tasks.append(candidate)
|
||||||
else:
|
else:
|
||||||
LOG.info(f"Couldn't classify file {filename}")
|
LOG.info(f"Couldn't classify file {filename}")
|
||||||
|
|
||||||
@ -91,10 +89,7 @@ def main():
|
|||||||
p.close()
|
p.close()
|
||||||
p.join()
|
p.join()
|
||||||
|
|
||||||
if not errors == 0:
|
return_code = 1 if errors != 0 else 0
|
||||||
return_code = 1
|
|
||||||
else:
|
|
||||||
return_code = 0
|
|
||||||
|
|
||||||
sys.exit(return_code)
|
sys.exit(return_code)
|
||||||
|
|
||||||
|
@ -8,14 +8,12 @@ from distutils.version import LooseVersion
|
|||||||
|
|
||||||
from ansible.plugins.loader import module_loader
|
from ansible.plugins.loader import module_loader
|
||||||
|
|
||||||
from ansiblelater import LOG
|
from ansiblelater import LOG, utils
|
||||||
from ansiblelater import utils
|
|
||||||
from ansiblelater.logger import flag_extra
|
from ansiblelater.logger import flag_extra
|
||||||
from ansiblelater.standard import SingleStandards
|
from ansiblelater.standard import SingleStandards, StandardBase
|
||||||
from ansiblelater.standard import StandardBase
|
|
||||||
|
|
||||||
|
|
||||||
class Candidate(object):
|
class Candidate:
|
||||||
"""
|
"""
|
||||||
Meta object for all files which later has to process.
|
Meta object for all files which later has to process.
|
||||||
|
|
||||||
@ -23,7 +21,7 @@ class Candidate(object):
|
|||||||
bundled with necessary meta informations for rule processing.
|
bundled with necessary meta informations for rule processing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, filename, settings={}, standards=[]):
|
def __init__(self, filename, settings={}, standards=[]): # noqa
|
||||||
self.path = filename
|
self.path = filename
|
||||||
self.binary = False
|
self.binary = False
|
||||||
self.vault = False
|
self.vault = False
|
||||||
@ -87,7 +85,7 @@ class Candidate(object):
|
|||||||
|
|
||||||
return target_standards
|
return target_standards
|
||||||
|
|
||||||
def review(self, lines=None):
|
def review(self):
|
||||||
errors = 0
|
errors = 0
|
||||||
self.standards = SingleStandards(self.config["rules"]["standards"]).rules
|
self.standards = SingleStandards(self.config["rules"]["standards"]).rules
|
||||||
self.version_config = self._get_version()
|
self.version_config = self._get_version()
|
||||||
@ -148,7 +146,7 @@ class Candidate(object):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def classify(filename, settings={}, standards=[]):
|
def classify(filename, settings={}, standards=[]): # noqa
|
||||||
parentdir = os.path.basename(os.path.dirname(filename))
|
parentdir = os.path.basename(os.path.dirname(filename))
|
||||||
basename = os.path.basename(filename)
|
basename = os.path.basename(filename)
|
||||||
ext = os.path.splitext(filename)[1][1:]
|
ext = os.path.splitext(filename)[1][1:]
|
||||||
@ -193,20 +191,20 @@ class Candidate(object):
|
|||||||
if sid:
|
if sid:
|
||||||
standard_id = f"[{sid}] "
|
standard_id = f"[{sid}] "
|
||||||
|
|
||||||
return standard_id
|
return standard_id # noqa
|
||||||
|
|
||||||
def __repr__(self): # noqa
|
def __repr__(self):
|
||||||
return f"{type(self).__name__} ({self.path})"
|
return f"{type(self).__name__} ({self.path})"
|
||||||
|
|
||||||
def __getitem__(self, item): # noqa
|
def __getitem__(self, item):
|
||||||
return self.__dict__.get(item)
|
return self.__dict__.get(item)
|
||||||
|
|
||||||
|
|
||||||
class RoleFile(Candidate):
|
class RoleFile(Candidate):
|
||||||
"""Object classified as Ansible role file."""
|
"""Object classified as Ansible role file."""
|
||||||
|
|
||||||
def __init__(self, filename, settings={}, standards=[]):
|
def __init__(self, filename, settings={}, standards=[]): # noqa
|
||||||
super(RoleFile, self).__init__(filename, settings, standards)
|
super().__init__(filename, settings, standards)
|
||||||
|
|
||||||
parentdir = os.path.dirname(os.path.abspath(filename))
|
parentdir = os.path.dirname(os.path.abspath(filename))
|
||||||
while parentdir != os.path.dirname(parentdir):
|
while parentdir != os.path.dirname(parentdir):
|
||||||
@ -226,16 +224,16 @@ class Playbook(Candidate):
|
|||||||
class Task(RoleFile):
|
class Task(RoleFile):
|
||||||
"""Object classified as Ansible task file."""
|
"""Object classified as Ansible task file."""
|
||||||
|
|
||||||
def __init__(self, filename, settings={}, standards=[]):
|
def __init__(self, filename, settings={}, standards=[]): # noqa
|
||||||
super(Task, self).__init__(filename, settings, standards)
|
super().__init__(filename, settings, standards)
|
||||||
self.filetype = "tasks"
|
self.filetype = "tasks"
|
||||||
|
|
||||||
|
|
||||||
class Handler(RoleFile):
|
class Handler(RoleFile):
|
||||||
"""Object classified as Ansible handler file."""
|
"""Object classified as Ansible handler file."""
|
||||||
|
|
||||||
def __init__(self, filename, settings={}, standards=[]):
|
def __init__(self, filename, settings={}, standards=[]): # noqa
|
||||||
super(Handler, self).__init__(filename, settings, standards)
|
super().__init__(filename, settings, standards)
|
||||||
self.filetype = "handlers"
|
self.filetype = "handlers"
|
||||||
|
|
||||||
|
|
||||||
|
@ -8,14 +8,14 @@ class LaterError(Exception):
|
|||||||
|
|
||||||
def __init__(self, msg, original):
|
def __init__(self, msg, original):
|
||||||
"""Initialize new exception."""
|
"""Initialize new exception."""
|
||||||
super(LaterError, self).__init__(f"{msg}: {original}")
|
super().__init__(f"{msg}: {original}")
|
||||||
self.original = original
|
self.original = original
|
||||||
|
|
||||||
|
|
||||||
class LaterAnsibleError(Exception):
|
class LaterAnsibleError(Exception):
|
||||||
"""Wrapper for ansible syntax errors."""
|
"""Wrapper for ansible syntax errors."""
|
||||||
|
|
||||||
def __init__(self, msg, original):
|
def __init__(self, original):
|
||||||
lines = original.message.splitlines()
|
lines = original.message.splitlines()
|
||||||
|
|
||||||
line_no = re.search("line(.*?),", lines[2])
|
line_no = re.search("line(.*?),", lines[2])
|
||||||
|
@ -30,7 +30,7 @@ colorama.init(autoreset=True, strip=(not _should_do_markup()))
|
|||||||
|
|
||||||
def flag_extra(extra):
|
def flag_extra(extra):
|
||||||
"""Ensure extra args are prefixed."""
|
"""Ensure extra args are prefixed."""
|
||||||
flagged = dict()
|
flagged = {}
|
||||||
|
|
||||||
if isinstance(extra, dict):
|
if isinstance(extra, dict):
|
||||||
for key, value in extra.items():
|
for key, value in extra.items():
|
||||||
@ -39,7 +39,7 @@ def flag_extra(extra):
|
|||||||
return flagged
|
return flagged
|
||||||
|
|
||||||
|
|
||||||
class LogFilter(object):
|
class LogFilter:
|
||||||
"""A custom log filter which excludes log messages above the logged level."""
|
"""A custom log filter which excludes log messages above the logged level."""
|
||||||
|
|
||||||
def __init__(self, level):
|
def __init__(self, level):
|
||||||
|
@ -18,12 +18,11 @@ class CheckCommandHasChanges(StandardBase):
|
|||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
if task["action"]["__ansible_module__"] in commands:
|
if task["action"]["__ansible_module__"] in commands and (
|
||||||
if (
|
"changed_when" not in task and "when" not in task
|
||||||
"changed_when" not in task and "when" not in task
|
and "when" not in task.get("__ansible_action_meta__", [])
|
||||||
and "when" not in task.get("__ansible_action_meta__", [])
|
and "creates" not in task["action"] and "removes" not in task["action"]
|
||||||
and "creates" not in task["action"] and "removes" not in task["action"]
|
):
|
||||||
):
|
errors.append(self.Error(task["__line__"], self.helptext))
|
||||||
errors.append(self.Error(task["__line__"], self.helptext))
|
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -40,9 +40,8 @@ class CheckFilePermissionOctal(StandardBase):
|
|||||||
if task["action"]["__ansible_module__"] in modules:
|
if task["action"]["__ansible_module__"] in modules:
|
||||||
mode = task["action"].get("mode", None)
|
mode = task["action"].get("mode", None)
|
||||||
|
|
||||||
if isinstance(mode, int):
|
if isinstance(mode, int) and self._is_invalid_permission(mode):
|
||||||
if self._is_invalid_permission(mode):
|
errors.append(self.Error(task["__line__"], self.helptext))
|
||||||
errors.append(self.Error(task["__line__"], self.helptext))
|
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
|
||||||
@ -55,7 +54,7 @@ class CheckFilePermissionOctal(StandardBase):
|
|||||||
group_write_without_read = ((mode >> 3) % 8 and (mode >> 3) % 8 < 4
|
group_write_without_read = ((mode >> 3) % 8 and (mode >> 3) % 8 < 4
|
||||||
and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1))
|
and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1))
|
||||||
user_write_without_read = ((mode >> 6) % 8 and (mode >> 6) % 8 < 4
|
user_write_without_read = ((mode >> 6) % 8 and (mode >> 6) % 8 < 4
|
||||||
and not (mode >> 6) % 8 == 1)
|
and (mode >> 6) % 8 != 1)
|
||||||
other_more_generous_than_group = mode % 8 > (mode >> 3) % 8
|
other_more_generous_than_group = mode % 8 > (mode >> 3) % 8
|
||||||
other_more_generous_than_user = mode % 8 > (mode >> 6) % 8
|
other_more_generous_than_user = mode % 8 > (mode >> 6) % 8
|
||||||
group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8
|
group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8
|
||||||
|
@ -16,8 +16,8 @@ class CheckMetaMain(StandardBase):
|
|||||||
keys = ["author", "description", "min_ansible_version", "platforms"]
|
keys = ["author", "description", "min_ansible_version", "platforms"]
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
has_galaxy_info = (isinstance(content, dict) and "galaxy_info" in content.keys())
|
has_galaxy_info = (isinstance(content, dict) and "galaxy_info" in content)
|
||||||
has_dependencies = (isinstance(content, dict) and "dependencies" in content.keys())
|
has_dependencies = (isinstance(content, dict) and "dependencies" in content)
|
||||||
|
|
||||||
if not has_galaxy_info:
|
if not has_galaxy_info:
|
||||||
errors.append(self.Error(None, self.helptext.format(key="galaxy_info")))
|
errors.append(self.Error(None, self.helptext.format(key="galaxy_info")))
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Author: Adrián Tóth <adtoth@redhat.com>
|
# Author: Adrián Tóth <adtoth@redhat.com>
|
||||||
#
|
#
|
||||||
# Copyright (c) 2020, Red Hat, Inc.
|
# Copyright (c) 2020, Red Hat, Inc.
|
||||||
@ -51,7 +50,7 @@ class CheckNestedJinja(StandardBase):
|
|||||||
for item in match:
|
for item in match:
|
||||||
matches.append((i, item))
|
matches.append((i, item))
|
||||||
|
|
||||||
for i, line in matches:
|
for i, _ in matches:
|
||||||
errors.append(self.Error(i, self.helptext))
|
errors.append(self.Error(i, self.helptext))
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -16,8 +16,7 @@ class CheckScmInSrc(StandardBase):
|
|||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
for role in roles:
|
for role in roles:
|
||||||
if isinstance(role, AnsibleMapping):
|
if isinstance(role, AnsibleMapping) and "+" in role.get("src"):
|
||||||
if "+" in role.get("src"):
|
errors.append(self.Error(role["__line__"], self.helptext))
|
||||||
errors.append(self.Error(role["__line__"], self.helptext))
|
|
||||||
|
|
||||||
return self.Result(candidate.path, errors)
|
return self.Result(candidate.path, errors)
|
||||||
|
@ -8,7 +8,7 @@ class CheckVersion(StandardBase):
|
|||||||
helptext = "Standards version not set. Using latest standards version {version}"
|
helptext = "Standards version not set. Using latest standards version {version}"
|
||||||
types = ["task", "handler", "rolevars", "meta", "template", "file", "playbook"]
|
types = ["task", "handler", "rolevars", "meta", "template", "file", "playbook"]
|
||||||
|
|
||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings): # noqa
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
if not candidate.version_config:
|
if not candidate.version_config:
|
||||||
|
@ -15,7 +15,7 @@ config_dir = AppDirs("ansible-later").user_config_dir
|
|||||||
default_config_file = os.path.join(config_dir, "config.yml")
|
default_config_file = os.path.join(config_dir, "config.yml")
|
||||||
|
|
||||||
|
|
||||||
class Settings(object):
|
class Settings:
|
||||||
"""
|
"""
|
||||||
Create an object with all necessary settings.
|
Create an object with all necessary settings.
|
||||||
|
|
||||||
@ -25,14 +25,13 @@ class Settings(object):
|
|||||||
- provides cli parameters
|
- provides cli parameters
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, args={}, config_file=default_config_file):
|
def __init__(self, args, config_file=default_config_file):
|
||||||
"""
|
"""
|
||||||
Initialize a new settings class.
|
Initialize a new settings class.
|
||||||
|
|
||||||
:param args: An optional dict of options, arguments and commands from the CLI.
|
:param args: An optional dict of options, arguments and commands from the CLI.
|
||||||
:param config_file: An optional path to a yaml config file.
|
:param config_file: An optional path to a yaml config file.
|
||||||
:returns: None
|
:returns: None
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.config_file = config_file
|
self.config_file = config_file
|
||||||
self.schema = None
|
self.schema = None
|
||||||
@ -42,6 +41,9 @@ class Settings(object):
|
|||||||
self._update_filelist()
|
self._update_filelist()
|
||||||
|
|
||||||
def _set_args(self, args):
|
def _set_args(self, args):
|
||||||
|
if args is None:
|
||||||
|
args = {}
|
||||||
|
|
||||||
defaults = self._get_defaults()
|
defaults = self._get_defaults()
|
||||||
self.config_file = args.get("config_file") or default_config_file
|
self.config_file = args.get("config_file") or default_config_file
|
||||||
|
|
||||||
@ -214,7 +216,7 @@ class Settings(object):
|
|||||||
del excludes[:]
|
del excludes[:]
|
||||||
|
|
||||||
filelist = []
|
filelist = []
|
||||||
for root, dirs, files in os.walk("."):
|
for root, _dirs, files in os.walk("."):
|
||||||
for filename in files:
|
for filename in files:
|
||||||
filelist.append(os.path.relpath(os.path.normpath(os.path.join(root, filename))))
|
filelist.append(os.path.relpath(os.path.normpath(os.path.join(root, filename))))
|
||||||
|
|
||||||
|
@ -3,12 +3,10 @@
|
|||||||
import copy
|
import copy
|
||||||
import importlib
|
import importlib
|
||||||
import inspect
|
import inspect
|
||||||
import io
|
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
from abc import ABCMeta
|
from abc import ABCMeta, abstractmethod
|
||||||
from abc import abstractmethod
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import toolz
|
import toolz
|
||||||
@ -16,27 +14,27 @@ import yaml
|
|||||||
from yamllint import linter
|
from yamllint import linter
|
||||||
from yamllint.config import YamlLintConfig
|
from yamllint.config import YamlLintConfig
|
||||||
|
|
||||||
from ansiblelater.exceptions import LaterAnsibleError
|
from ansiblelater.exceptions import LaterAnsibleError, LaterError
|
||||||
from ansiblelater.exceptions import LaterError
|
from ansiblelater.utils import Singleton, sysexit_with_message
|
||||||
from ansiblelater.utils import Singleton
|
from ansiblelater.utils.yamlhelper import (
|
||||||
from ansiblelater.utils import sysexit_with_message
|
UnsafeTag,
|
||||||
from ansiblelater.utils.yamlhelper import UnsafeTag
|
VaultTag,
|
||||||
from ansiblelater.utils.yamlhelper import VaultTag
|
action_tasks,
|
||||||
from ansiblelater.utils.yamlhelper import action_tasks
|
normalize_task,
|
||||||
from ansiblelater.utils.yamlhelper import normalize_task
|
normalized_yaml,
|
||||||
from ansiblelater.utils.yamlhelper import normalized_yaml
|
parse_yaml_linenumbers,
|
||||||
from ansiblelater.utils.yamlhelper import parse_yaml_linenumbers
|
)
|
||||||
|
|
||||||
|
|
||||||
class StandardMeta(type):
|
class StandardMeta(type):
|
||||||
|
|
||||||
def __call__(cls, *args, **kwargs):
|
def __call__(cls, *args):
|
||||||
mcls = type.__call__(cls, *args)
|
mcls = type.__call__(cls, *args)
|
||||||
setattr(mcls, "sid", cls.sid)
|
mcls.sid = cls.sid
|
||||||
setattr(mcls, "description", getattr(cls, "description", "__unknown__"))
|
mcls.description = getattr(cls, "description", "__unknown__")
|
||||||
setattr(mcls, "helptext", getattr(cls, "helptext", ""))
|
mcls.helptext = getattr(cls, "helptext", "")
|
||||||
setattr(mcls, "version", getattr(cls, "version", None))
|
mcls.version = getattr(cls, "version", None)
|
||||||
setattr(mcls, "types", getattr(cls, "types", []))
|
mcls.types = getattr(cls, "types", [])
|
||||||
return mcls
|
return mcls
|
||||||
|
|
||||||
|
|
||||||
@ -44,7 +42,7 @@ class StandardExtendedMeta(StandardMeta, ABCMeta):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class StandardBase(object, metaclass=StandardExtendedMeta):
|
class StandardBase(metaclass=StandardExtendedMeta):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@ -55,17 +53,17 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
def check(self, candidate, settings):
|
def check(self, candidate, settings):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __repr__(self): # noqa
|
def __repr__(self):
|
||||||
return f"Standard: {self.description} (version: {self.version}, types: {self.types})"
|
return f"Standard: {self.description} (version: {self.version}, types: {self.types})"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_tasks(candidate, settings):
|
def get_tasks(candidate, settings): # noqa
|
||||||
errors = []
|
errors = []
|
||||||
yamllines = []
|
yamllines = []
|
||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||||
except LaterError as ex:
|
except LaterError as ex:
|
||||||
e = ex.original
|
e = ex.original
|
||||||
@ -80,13 +78,13 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
return yamllines, errors
|
return yamllines, errors
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_action_tasks(candidate, settings):
|
def get_action_tasks(candidate, settings): # noqa
|
||||||
tasks = []
|
tasks = []
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||||
|
|
||||||
if yamllines:
|
if yamllines:
|
||||||
@ -132,7 +130,7 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||||
|
|
||||||
if yamllines:
|
if yamllines:
|
||||||
@ -170,7 +168,7 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
return normalized, errors
|
return normalized, errors
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_normalized_yaml(candidate, settings, options=None):
|
def get_normalized_yaml(candidate, settings, options=None): # noqa
|
||||||
errors = []
|
errors = []
|
||||||
yamllines = []
|
yamllines = []
|
||||||
|
|
||||||
@ -195,13 +193,13 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
return yamllines, errors
|
return yamllines, errors
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_raw_yaml(candidate, settings):
|
def get_raw_yaml(candidate, settings): # noqa
|
||||||
content = None
|
content = None
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
yaml.add_constructor(
|
yaml.add_constructor(
|
||||||
UnsafeTag.yaml_tag, UnsafeTag.yaml_constructor, Loader=yaml.SafeLoader
|
UnsafeTag.yaml_tag, UnsafeTag.yaml_constructor, Loader=yaml.SafeLoader
|
||||||
)
|
)
|
||||||
@ -223,7 +221,7 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
|
|
||||||
if not candidate.faulty:
|
if not candidate.faulty:
|
||||||
try:
|
try:
|
||||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
with open(candidate.path, encoding="utf-8") as f:
|
||||||
for problem in linter.run(f, YamlLintConfig(options)):
|
for problem in linter.run(f, YamlLintConfig(options)):
|
||||||
errors.append(StandardBase.Error(problem.line, problem.desc))
|
errors.append(StandardBase.Error(problem.line, problem.desc))
|
||||||
except yaml.YAMLError as e:
|
except yaml.YAMLError as e:
|
||||||
@ -246,12 +244,12 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
else:
|
else:
|
||||||
first_cmd_arg = task["action"]["__ansible_arguments__"][0]
|
first_cmd_arg = task["action"]["__ansible_arguments__"][0]
|
||||||
|
|
||||||
return first_cmd_arg
|
return first_cmd_arg # noqa
|
||||||
|
|
||||||
class Error(object):
|
class Error:
|
||||||
"""Default error object created if a rule failed."""
|
"""Default error object created if a rule failed."""
|
||||||
|
|
||||||
def __init__(self, lineno, message, error_type=None, **kwargs):
|
def __init__(self, lineno, message, **kwargs):
|
||||||
"""
|
"""
|
||||||
Initialize a new error object and returns None.
|
Initialize a new error object and returns None.
|
||||||
|
|
||||||
@ -265,19 +263,18 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
|||||||
for (key, value) in kwargs.items():
|
for (key, value) in kwargs.items():
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
def __repr__(self): # noqa
|
def __repr__(self):
|
||||||
if self.lineno:
|
if self.lineno:
|
||||||
return f"{self.lineno}: {self.message}"
|
return f"{self.lineno}: {self.message}"
|
||||||
else:
|
return f" {self.message}"
|
||||||
return f" {self.message}"
|
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
result = dict(lineno=self.lineno, message=self.message)
|
result = {"lineno": self.lineno, "message": self.message}
|
||||||
for (key, value) in self.kwargs.items():
|
for (key, value) in self.kwargs.items():
|
||||||
result[key] = value
|
result[key] = value
|
||||||
return result
|
return result
|
||||||
|
|
||||||
class Result(object):
|
class Result:
|
||||||
"""Generic result object."""
|
"""Generic result object."""
|
||||||
|
|
||||||
def __init__(self, candidate, errors=None):
|
def __init__(self, candidate, errors=None):
|
||||||
@ -308,7 +305,7 @@ class StandardLoader():
|
|||||||
sysexit_with_message(f"Failed to load roles file {filename}: \n {str(e)}")
|
sysexit_with_message(f"Failed to load roles file {filename}: \n {str(e)}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for name, obj in inspect.getmembers(module):
|
for _name, obj in inspect.getmembers(module):
|
||||||
if self._is_plugin(obj):
|
if self._is_plugin(obj):
|
||||||
self.rules.append(obj())
|
self.rules.append(obj())
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
@ -325,7 +322,7 @@ class StandardLoader():
|
|||||||
normalized_std = (list(toolz.remove(lambda x: x.sid == "", self.rules)))
|
normalized_std = (list(toolz.remove(lambda x: x.sid == "", self.rules)))
|
||||||
unique_std = len(list(toolz.unique(normalized_std, key=lambda x: x.sid)))
|
unique_std = len(list(toolz.unique(normalized_std, key=lambda x: x.sid)))
|
||||||
all_std = len(normalized_std)
|
all_std = len(normalized_std)
|
||||||
if not all_std == unique_std:
|
if all_std != unique_std:
|
||||||
sysexit_with_message(
|
sysexit_with_message(
|
||||||
"Detect duplicate ID's in standards definition. Please use unique ID's only."
|
"Detect duplicate ID's in standards definition. Please use unique ID's only."
|
||||||
)
|
)
|
||||||
|
@ -1,11 +1,8 @@
|
|||||||
"""Global utils collection."""
|
"""Global utils collection."""
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
|
from contextlib import suppress
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
@ -24,12 +21,12 @@ def count_spaces(c_string):
|
|||||||
leading_spaces = 0
|
leading_spaces = 0
|
||||||
trailing_spaces = 0
|
trailing_spaces = 0
|
||||||
|
|
||||||
for i, e in enumerate(c_string):
|
for _i, e in enumerate(c_string):
|
||||||
if not e.isspace():
|
if not e.isspace():
|
||||||
break
|
break
|
||||||
leading_spaces += 1
|
leading_spaces += 1
|
||||||
|
|
||||||
for i, e in reversed(list(enumerate(c_string))):
|
for _i, e in reversed(list(enumerate(c_string))):
|
||||||
if not e.isspace():
|
if not e.isspace():
|
||||||
break
|
break
|
||||||
trailing_spaces += 1
|
trailing_spaces += 1
|
||||||
@ -37,16 +34,6 @@ def count_spaces(c_string):
|
|||||||
return ((leading_spaces, trailing_spaces))
|
return ((leading_spaces, trailing_spaces))
|
||||||
|
|
||||||
|
|
||||||
def get_property(prop):
|
|
||||||
currentdir = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
parentdir = os.path.dirname(currentdir)
|
|
||||||
result = re.search(
|
|
||||||
rf'{prop}\s*=\s*[\'"]([^\'"]*)[\'"]',
|
|
||||||
open(os.path.join(parentdir, "__init__.py")).read()
|
|
||||||
)
|
|
||||||
return result.group(1)
|
|
||||||
|
|
||||||
|
|
||||||
def standards_latest(standards):
|
def standards_latest(standards):
|
||||||
return max([standard.version for standard in standards if standard.version] or ["0.1"],
|
return max([standard.version for standard in standards if standard.version] or ["0.1"],
|
||||||
key=LooseVersion)
|
key=LooseVersion)
|
||||||
@ -74,10 +61,8 @@ def safe_load(string):
|
|||||||
:returns: dict
|
:returns: dict
|
||||||
|
|
||||||
"""
|
"""
|
||||||
try:
|
with suppress(yaml.scanner.ScannerError):
|
||||||
return yaml.safe_load(string) or {}
|
return yaml.safe_load(string) or {}
|
||||||
except yaml.scanner.ScannerError as e:
|
|
||||||
print(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
@ -120,5 +105,5 @@ class Singleton(type):
|
|||||||
|
|
||||||
def __call__(cls, *args, **kwargs):
|
def __call__(cls, *args, **kwargs):
|
||||||
if cls not in cls._instances:
|
if cls not in cls._instances:
|
||||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
cls._instances[cls] = super().__call__(*args, **kwargs)
|
||||||
return cls._instances[cls]
|
return cls._instances[cls]
|
||||||
|
@ -24,12 +24,12 @@ import codecs
|
|||||||
import glob
|
import glob
|
||||||
import imp
|
import imp
|
||||||
import os
|
import os
|
||||||
|
from contextlib import suppress
|
||||||
|
|
||||||
import ansible.parsing.mod_args
|
import ansible.parsing.mod_args
|
||||||
import yaml
|
import yaml
|
||||||
from ansible import constants
|
from ansible import constants
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
from ansible.errors import AnsibleParserError
|
|
||||||
from ansible.parsing.dataloader import DataLoader
|
from ansible.parsing.dataloader import DataLoader
|
||||||
from ansible.parsing.mod_args import ModuleArgsParser
|
from ansible.parsing.mod_args import ModuleArgsParser
|
||||||
from ansible.parsing.yaml.constructor import AnsibleConstructor
|
from ansible.parsing.yaml.constructor import AnsibleConstructor
|
||||||
@ -37,8 +37,7 @@ from ansible.parsing.yaml.loader import AnsibleLoader
|
|||||||
from ansible.template import Templar
|
from ansible.template import Templar
|
||||||
from yaml.composer import Composer
|
from yaml.composer import Composer
|
||||||
|
|
||||||
from ansiblelater.exceptions import LaterAnsibleError
|
from ansiblelater.exceptions import LaterAnsibleError, LaterError
|
||||||
from ansiblelater.exceptions import LaterError
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Try to import the Ansible 2 module first, it's the future-proof one
|
# Try to import the Ansible 2 module first, it's the future-proof one
|
||||||
@ -155,8 +154,8 @@ def tokenize(line):
|
|||||||
tokens = tokens[1:]
|
tokens = tokens[1:]
|
||||||
command = tokens[0].replace(":", "")
|
command = tokens[0].replace(":", "")
|
||||||
|
|
||||||
args = list()
|
args = []
|
||||||
kwargs = dict()
|
kwargs = {}
|
||||||
nonkvfound = False
|
nonkvfound = False
|
||||||
for arg in tokens[1:]:
|
for arg in tokens[1:]:
|
||||||
if "=" in arg and not nonkvfound:
|
if "=" in arg and not nonkvfound:
|
||||||
@ -171,10 +170,11 @@ def tokenize(line):
|
|||||||
def _playbook_items(pb_data):
|
def _playbook_items(pb_data):
|
||||||
if isinstance(pb_data, dict):
|
if isinstance(pb_data, dict):
|
||||||
return pb_data.items()
|
return pb_data.items()
|
||||||
elif not pb_data:
|
|
||||||
|
if not pb_data:
|
||||||
return []
|
return []
|
||||||
else:
|
|
||||||
return [item for play in pb_data for item in play.items()]
|
return [item for play in pb_data for item in play.items()]
|
||||||
|
|
||||||
|
|
||||||
def find_children(playbook, playbook_dir):
|
def find_children(playbook, playbook_dir):
|
||||||
@ -186,7 +186,7 @@ def find_children(playbook, playbook_dir):
|
|||||||
try:
|
try:
|
||||||
playbook_ds = parse_yaml_from_file(playbook[0])
|
playbook_ds = parse_yaml_from_file(playbook[0])
|
||||||
except AnsibleError as e:
|
except AnsibleError as e:
|
||||||
raise SystemExit(str(e))
|
raise SystemExit(str(e)) from e
|
||||||
results = []
|
results = []
|
||||||
basedir = os.path.dirname(playbook[0])
|
basedir = os.path.dirname(playbook[0])
|
||||||
items = _playbook_items(playbook_ds)
|
items = _playbook_items(playbook_ds)
|
||||||
@ -194,7 +194,7 @@ def find_children(playbook, playbook_dir):
|
|||||||
for child in play_children(basedir, item, playbook[1], playbook_dir):
|
for child in play_children(basedir, item, playbook[1], playbook_dir):
|
||||||
if "$" in child["path"] or "{{" in child["path"]:
|
if "$" in child["path"] or "{{" in child["path"]:
|
||||||
continue
|
continue
|
||||||
valid_tokens = list()
|
valid_tokens = []
|
||||||
for token in split_args(child["path"]):
|
for token in split_args(child["path"]):
|
||||||
if "=" in token:
|
if "=" in token:
|
||||||
break
|
break
|
||||||
@ -205,20 +205,17 @@ def find_children(playbook, playbook_dir):
|
|||||||
|
|
||||||
|
|
||||||
def template(basedir, value, variables, fail_on_undefined=False, **kwargs):
|
def template(basedir, value, variables, fail_on_undefined=False, **kwargs):
|
||||||
try:
|
# Hack to skip the following exception when using to_json filter on a variable.
|
||||||
|
# I guess the filter doesn't like empty vars...
|
||||||
|
with suppress(AnsibleError, ValueError):
|
||||||
value = ansible_template(
|
value = ansible_template(
|
||||||
os.path.abspath(basedir), value, variables,
|
os.path.abspath(basedir), value, variables,
|
||||||
**dict(kwargs, fail_on_undefined=fail_on_undefined)
|
**dict(kwargs, fail_on_undefined=fail_on_undefined)
|
||||||
)
|
)
|
||||||
# Hack to skip the following exception when using to_json filter on a variable.
|
|
||||||
# I guess the filter doesn't like empty vars...
|
|
||||||
except (AnsibleError, ValueError):
|
|
||||||
# templating failed, so just keep value as is.
|
|
||||||
pass
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def play_children(basedir, item, parent_type, playbook_dir):
|
def play_children(basedir, item, parent_type):
|
||||||
delegate_map = {
|
delegate_map = {
|
||||||
"tasks": _taskshandlers_children,
|
"tasks": _taskshandlers_children,
|
||||||
"pre_tasks": _taskshandlers_children,
|
"pre_tasks": _taskshandlers_children,
|
||||||
@ -234,15 +231,13 @@ def play_children(basedir, item, parent_type, playbook_dir):
|
|||||||
play_library = os.path.join(os.path.abspath(basedir), "library")
|
play_library = os.path.join(os.path.abspath(basedir), "library")
|
||||||
_load_library_if_exists(play_library)
|
_load_library_if_exists(play_library)
|
||||||
|
|
||||||
if k in delegate_map:
|
if k in delegate_map and v:
|
||||||
if v:
|
v = template(
|
||||||
v = template(
|
os.path.abspath(basedir),
|
||||||
os.path.abspath(basedir),
|
v, {"playbook_dir": os.path.abspath(basedir)},
|
||||||
v,
|
fail_on_undefined=False
|
||||||
dict(playbook_dir=os.path.abspath(basedir)),
|
)
|
||||||
fail_on_undefined=False
|
return delegate_map[k](basedir, k, v, parent_type)
|
||||||
)
|
|
||||||
return delegate_map[k](basedir, k, v, parent_type)
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
@ -298,14 +293,11 @@ def append_children(taskhandler, basedir, k, parent_type, results):
|
|||||||
# when taskshandlers_children is called for playbooks, the
|
# when taskshandlers_children is called for playbooks, the
|
||||||
# actual type of the included tasks is the section containing the
|
# actual type of the included tasks is the section containing the
|
||||||
# include, e.g. tasks, pre_tasks, or handlers.
|
# include, e.g. tasks, pre_tasks, or handlers.
|
||||||
if parent_type == "playbook":
|
playbook_section = k if parent_type == "playbook" else parent_type
|
||||||
playbook_section = k
|
|
||||||
else:
|
|
||||||
playbook_section = parent_type
|
|
||||||
results.append({"path": path_dwim(basedir, taskhandler), "type": playbook_section})
|
results.append({"path": path_dwim(basedir, taskhandler), "type": playbook_section})
|
||||||
|
|
||||||
|
|
||||||
def _roles_children(basedir, k, v, parent_type, main="main"):
|
def _roles_children(basedir, k, v, parent_type, main="main"): # noqa
|
||||||
results = []
|
results = []
|
||||||
for role in v:
|
for role in v:
|
||||||
if isinstance(role, dict):
|
if isinstance(role, dict):
|
||||||
@ -381,7 +373,7 @@ def rolename(filepath):
|
|||||||
return ""
|
return ""
|
||||||
role = filepath[idx + 6:]
|
role = filepath[idx + 6:]
|
||||||
role = role[:role.find("/")]
|
role = role[:role.find("/")]
|
||||||
return role
|
return role # noqa
|
||||||
|
|
||||||
|
|
||||||
def _kv_to_dict(v):
|
def _kv_to_dict(v):
|
||||||
@ -389,23 +381,27 @@ def _kv_to_dict(v):
|
|||||||
return (dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs))
|
return (dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs))
|
||||||
|
|
||||||
|
|
||||||
def normalize_task(task, filename, custom_modules=[]):
|
def normalize_task(task, filename, custom_modules=None):
|
||||||
"""Ensure tasks have an action key and strings are converted to python objects."""
|
"""Ensure tasks have an action key and strings are converted to python objects."""
|
||||||
|
|
||||||
|
if custom_modules is None:
|
||||||
|
custom_modules = []
|
||||||
|
|
||||||
ansible_action_type = task.get("__ansible_action_type__", "task")
|
ansible_action_type = task.get("__ansible_action_type__", "task")
|
||||||
if "__ansible_action_type__" in task:
|
if "__ansible_action_type__" in task:
|
||||||
del (task["__ansible_action_type__"])
|
del (task["__ansible_action_type__"])
|
||||||
|
|
||||||
# temp. extract metadata
|
# temp. extract metadata
|
||||||
ansible_meta = dict()
|
ansible_meta = {}
|
||||||
for key in ["__line__", "__file__", "__ansible_action_meta__"]:
|
for key in ["__line__", "__file__", "__ansible_action_meta__"]:
|
||||||
default = None
|
default = None
|
||||||
|
|
||||||
if key == "__ansible_action_meta__":
|
if key == "__ansible_action_meta__":
|
||||||
default = dict()
|
default = {}
|
||||||
|
|
||||||
ansible_meta[key] = task.pop(key, default)
|
ansible_meta[key] = task.pop(key, default)
|
||||||
|
|
||||||
normalized = dict()
|
normalized = {}
|
||||||
|
|
||||||
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
|
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
|
||||||
builtin = list(set(builtin + custom_modules))
|
builtin = list(set(builtin + custom_modules))
|
||||||
@ -415,7 +411,7 @@ def normalize_task(task, filename, custom_modules=[]):
|
|||||||
try:
|
try:
|
||||||
action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
|
action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
|
||||||
except AnsibleParserError as e:
|
except AnsibleParserError as e:
|
||||||
raise LaterAnsibleError("syntax error", e)
|
raise LaterAnsibleError("syntax error", e) from e
|
||||||
|
|
||||||
# denormalize shell -> command conversion
|
# denormalize shell -> command conversion
|
||||||
if "_uses_shell" in arguments:
|
if "_uses_shell" in arguments:
|
||||||
@ -427,16 +423,16 @@ def normalize_task(task, filename, custom_modules=[]):
|
|||||||
# we don"t want to re-assign these values, which were
|
# we don"t want to re-assign these values, which were
|
||||||
# determined by the ModuleArgsParser() above
|
# determined by the ModuleArgsParser() above
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
normalized[k] = v
|
|
||||||
|
|
||||||
normalized["action"] = dict(__ansible_module__=action)
|
normalized[k] = v
|
||||||
|
|
||||||
|
normalized["action"] = {"__ansible_module__": action}
|
||||||
|
|
||||||
if "_raw_params" in arguments:
|
if "_raw_params" in arguments:
|
||||||
normalized["action"]["__ansible_arguments__"] = arguments["_raw_params"].strip().split()
|
normalized["action"]["__ansible_arguments__"] = arguments["_raw_params"].strip().split()
|
||||||
del (arguments["_raw_params"])
|
del (arguments["_raw_params"])
|
||||||
else:
|
else:
|
||||||
normalized["action"]["__ansible_arguments__"] = list()
|
normalized["action"]["__ansible_arguments__"] = []
|
||||||
normalized["action"].update(arguments)
|
normalized["action"].update(arguments)
|
||||||
|
|
||||||
normalized[FILENAME_KEY] = filename
|
normalized[FILENAME_KEY] = filename
|
||||||
@ -451,7 +447,7 @@ def normalize_task(task, filename, custom_modules=[]):
|
|||||||
|
|
||||||
|
|
||||||
def action_tasks(yaml, file):
|
def action_tasks(yaml, file):
|
||||||
tasks = list()
|
tasks = []
|
||||||
if file["filetype"] in ["tasks", "handlers"]:
|
if file["filetype"] in ["tasks", "handlers"]:
|
||||||
tasks = add_action_type(yaml, file["filetype"])
|
tasks = add_action_type(yaml, file["filetype"])
|
||||||
else:
|
else:
|
||||||
@ -474,15 +470,14 @@ def task_to_str(task):
|
|||||||
return name
|
return name
|
||||||
action = task.get("action")
|
action = task.get("action")
|
||||||
args = " ".join([
|
args = " ".join([
|
||||||
u"{0}={1}".format(k, v)
|
f"{k}={v}" for (k, v) in action.items()
|
||||||
for (k, v) in action.items()
|
|
||||||
if k not in ["__ansible_module__", "__ansible_arguments__"]
|
if k not in ["__ansible_module__", "__ansible_arguments__"]
|
||||||
] + action.get("__ansible_arguments__"))
|
] + action.get("__ansible_arguments__"))
|
||||||
return u"{0} {1}".format(action["__ansible_module__"], args)
|
return "{} {}".format(action["__ansible_module__"], args)
|
||||||
|
|
||||||
|
|
||||||
def extract_from_list(blocks, candidates):
|
def extract_from_list(blocks, candidates):
|
||||||
results = list()
|
results = []
|
||||||
for block in blocks:
|
for block in blocks:
|
||||||
for candidate in candidates:
|
for candidate in candidates:
|
||||||
delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"]
|
delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"]
|
||||||
@ -500,7 +495,7 @@ def extract_from_list(blocks, candidates):
|
|||||||
|
|
||||||
|
|
||||||
def add_action_type(actions, action_type, action_meta=None):
|
def add_action_type(actions, action_type, action_meta=None):
|
||||||
results = list()
|
results = []
|
||||||
for action in actions:
|
for action in actions:
|
||||||
action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
|
action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
|
||||||
if action_meta:
|
if action_meta:
|
||||||
@ -528,7 +523,7 @@ def parse_yaml_linenumbers(data, filename):
|
|||||||
try:
|
try:
|
||||||
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
|
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
|
||||||
except yaml.constructor.ConstructorError as e:
|
except yaml.constructor.ConstructorError as e:
|
||||||
raise LaterError("syntax error", e)
|
raise LaterError("syntax error", e) from e
|
||||||
|
|
||||||
if hasattr(node, "__line__"):
|
if hasattr(node, "__line__"):
|
||||||
mapping[LINE_NUMBER_KEY] = node.__line__
|
mapping[LINE_NUMBER_KEY] = node.__line__
|
||||||
@ -544,10 +539,10 @@ def parse_yaml_linenumbers(data, filename):
|
|||||||
loader.construct_mapping = construct_mapping
|
loader.construct_mapping = construct_mapping
|
||||||
data = loader.get_single_data() or []
|
data = loader.get_single_data() or []
|
||||||
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
||||||
raise LaterError("syntax error", e)
|
raise LaterError("syntax error", e) from e
|
||||||
except (yaml.composer.ComposerError) as e:
|
except (yaml.composer.ComposerError) as e:
|
||||||
e.problem = f"{e.context} {e.problem}"
|
e.problem = f"{e.context} {e.problem}"
|
||||||
raise LaterError("syntax error", e)
|
raise LaterError("syntax error", e) from e
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@ -572,14 +567,14 @@ def normalized_yaml(file, options):
|
|||||||
for line in removes:
|
for line in removes:
|
||||||
lines.remove(line)
|
lines.remove(line)
|
||||||
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
||||||
raise LaterError("syntax error", e)
|
raise LaterError("syntax error", e) from e
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
|
|
||||||
class UnsafeTag:
|
class UnsafeTag:
|
||||||
"""Handle custom yaml unsafe tag."""
|
"""Handle custom yaml unsafe tag."""
|
||||||
|
|
||||||
yaml_tag = u"!unsafe"
|
yaml_tag = "!unsafe"
|
||||||
|
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.unsafe = value
|
self.unsafe = value
|
||||||
@ -592,7 +587,7 @@ class UnsafeTag:
|
|||||||
class VaultTag:
|
class VaultTag:
|
||||||
"""Handle custom yaml vault tag."""
|
"""Handle custom yaml vault tag."""
|
||||||
|
|
||||||
yaml_tag = u"!vault"
|
yaml_tag = "!vault"
|
||||||
|
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.unsafe = value
|
self.unsafe = value
|
||||||
|
382
poetry.lock
generated
382
poetry.lock
generated
@ -68,17 +68,6 @@ files = [
|
|||||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "atomicwrites"
|
|
||||||
version = "1.4.1"
|
|
||||||
description = "Atomic file writes."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
|
||||||
files = [
|
|
||||||
{file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "22.2.0"
|
version = "22.2.0"
|
||||||
@ -98,29 +87,6 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-
|
|||||||
tests = ["attrs[tests-no-zope]", "zope.interface"]
|
tests = ["attrs[tests-no-zope]", "zope.interface"]
|
||||||
tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"]
|
tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bandit"
|
|
||||||
version = "1.7.4"
|
|
||||||
description = "Security oriented static analyser for python code."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"},
|
|
||||||
{file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""}
|
|
||||||
GitPython = ">=1.0.1"
|
|
||||||
PyYAML = ">=5.3.1"
|
|
||||||
stevedore = ">=1.20.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"]
|
|
||||||
toml = ["toml"]
|
|
||||||
yaml = ["PyYAML"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cffi"
|
name = "cffi"
|
||||||
version = "1.15.1"
|
version = "1.15.1"
|
||||||
@ -324,17 +290,20 @@ test-randomorder = ["pytest-randomly"]
|
|||||||
tox = ["tox"]
|
tox = ["tox"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "eradicate"
|
name = "exceptiongroup"
|
||||||
version = "2.1.0"
|
version = "1.1.0"
|
||||||
description = "Removes commented-out code."
|
description = "Backport of PEP 654 (exception groups)"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "eradicate-2.1.0-py3-none-any.whl", hash = "sha256:8bfaca181db9227dc88bdbce4d051a9627604c2243e7d85324f6d6ce0fd08bb2"},
|
{file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"},
|
||||||
{file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"},
|
{file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
test = ["pytest (>=6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flake8"
|
name = "flake8"
|
||||||
version = "5.0.4"
|
version = "5.0.4"
|
||||||
@ -352,180 +321,6 @@ mccabe = ">=0.7.0,<0.8.0"
|
|||||||
pycodestyle = ">=2.9.0,<2.10.0"
|
pycodestyle = ">=2.9.0,<2.10.0"
|
||||||
pyflakes = ">=2.5.0,<2.6.0"
|
pyflakes = ">=2.5.0,<2.6.0"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-blind-except"
|
|
||||||
version = "0.2.1"
|
|
||||||
description = "A flake8 extension that checks for blind except: statements"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "flake8-blind-except-0.2.1.tar.gz", hash = "sha256:f25a575a9dcb3eeb3c760bf9c22db60b8b5a23120224ed1faa9a43f75dd7dd16"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-builtins"
|
|
||||||
version = "2.0.0"
|
|
||||||
description = "Check for python builtins being used as variables or parameters."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "flake8-builtins-2.0.0.tar.gz", hash = "sha256:98833fa16139a75cd4913003492a9bd9a61c6f8ac146c3db12a2ebaf420dade3"},
|
|
||||||
{file = "flake8_builtins-2.0.0-py3-none-any.whl", hash = "sha256:39bfa3badb5e8d22f92baf4e0ea1b816707245233846932d6b13e81fc6f673e8"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
flake8 = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
test = ["pytest"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-docstrings"
|
|
||||||
version = "1.6.0"
|
|
||||||
description = "Extension for flake8 which uses pydocstyle to check docstrings"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"},
|
|
||||||
{file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
flake8 = ">=3"
|
|
||||||
pydocstyle = ">=2.1"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-eradicate"
|
|
||||||
version = "1.3.0"
|
|
||||||
description = "Flake8 plugin to find commented out code"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6,<4.0"
|
|
||||||
files = [
|
|
||||||
{file = "flake8-eradicate-1.3.0.tar.gz", hash = "sha256:e4c98f00d17dc8653e3388cac2624cd81e9735de2fd4a8dcf99029633ebd7a63"},
|
|
||||||
{file = "flake8_eradicate-1.3.0-py3-none-any.whl", hash = "sha256:85a71e0c5f4e07f7c6c5fec520483561fd6bd295417d622855bdeade99242e3d"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
attrs = "*"
|
|
||||||
eradicate = ">=2.0,<3.0"
|
|
||||||
flake8 = ">=3.5,<6"
|
|
||||||
setuptools = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-isort"
|
|
||||||
version = "6.0.0"
|
|
||||||
description = "flake8 plugin that integrates isort ."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "flake8-isort-6.0.0.tar.gz", hash = "sha256:537f453a660d7e903f602ecfa36136b140de279df58d02eb1b6a0c84e83c528c"},
|
|
||||||
{file = "flake8_isort-6.0.0-py3-none-any.whl", hash = "sha256:aa0cac02a62c7739e370ce6b9c31743edac904bae4b157274511fc8a19c75bbc"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
flake8 = "*"
|
|
||||||
isort = ">=5.0.0,<6"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
test = ["pytest"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-logging-format"
|
|
||||||
version = "0.7.5"
|
|
||||||
description = ""
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "flake8-logging-format-0.7.5.tar.gz", hash = "sha256:54f7e349c934ce5c594f251885bc2240e99f6b48752a672a8fc7e3d1388352bb"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
lint = ["flake8"]
|
|
||||||
test = ["PyHamcrest", "pytest", "pytest-cov"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-pep3101"
|
|
||||||
version = "2.0.0"
|
|
||||||
description = "Checks for old string formatting."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "flake8-pep3101-2.0.0.tar.gz", hash = "sha256:ae2ee1758734a473ca971b4bf9ff09c961b6099916db91fdb6b9718328dfcacb"},
|
|
||||||
{file = "flake8_pep3101-2.0.0-py3-none-any.whl", hash = "sha256:1d818e1f53c6d26e875714f2f041ec15fbb23c17e2268dbbb024e9c3383541cd"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
flake8 = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
test = ["pytest"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-polyfill"
|
|
||||||
version = "1.0.2"
|
|
||||||
description = "Polyfill package for Flake8 plugins"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"},
|
|
||||||
{file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
flake8 = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-quotes"
|
|
||||||
version = "3.3.1"
|
|
||||||
description = "Flake8 lint for quotes."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "flake8-quotes-3.3.1.tar.gz", hash = "sha256:633adca6fb8a08131536af0d750b44d6985b9aba46f498871e21588c3e6f525a"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
flake8 = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "gitdb"
|
|
||||||
version = "4.0.10"
|
|
||||||
description = "Git Object Database"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"},
|
|
||||||
{file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
smmap = ">=3.0.1,<6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "gitpython"
|
|
||||||
version = "3.1.30"
|
|
||||||
description = "GitPython is a python library used to interact with Git repositories"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"},
|
|
||||||
{file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
gitdb = ">=4.0.1,<5"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
@ -538,24 +333,6 @@ files = [
|
|||||||
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "isort"
|
|
||||||
version = "5.12.0"
|
|
||||||
description = "A Python utility / library to sort Python imports."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8.0"
|
|
||||||
files = [
|
|
||||||
{file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
|
|
||||||
{file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
colors = ["colorama (>=0.4.3)"]
|
|
||||||
pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
|
|
||||||
plugins = ["setuptools"]
|
|
||||||
requirements-deprecated-finder = ["pip-api", "pipreqs"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jinja2"
|
name = "jinja2"
|
||||||
version = "3.1.2"
|
version = "3.1.2"
|
||||||
@ -704,33 +481,6 @@ files = [
|
|||||||
{file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"},
|
{file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pbr"
|
|
||||||
version = "5.11.1"
|
|
||||||
description = "Python Build Reasonableness"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.6"
|
|
||||||
files = [
|
|
||||||
{file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"},
|
|
||||||
{file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pep8-naming"
|
|
||||||
version = "0.13.2"
|
|
||||||
description = "Check PEP-8 naming conventions, plugin for flake8"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"},
|
|
||||||
{file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
flake8 = ">=3.9.1"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pluggy"
|
name = "pluggy"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
@ -747,18 +497,6 @@ files = [
|
|||||||
dev = ["pre-commit", "tox"]
|
dev = ["pre-commit", "tox"]
|
||||||
testing = ["pytest", "pytest-benchmark"]
|
testing = ["pytest", "pytest-benchmark"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "py"
|
|
||||||
version = "1.11.0"
|
|
||||||
description = "library with cross-python path, ini-parsing, io, code, log facilities"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
files = [
|
|
||||||
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
|
||||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pycodestyle"
|
name = "pycodestyle"
|
||||||
version = "2.9.1"
|
version = "2.9.1"
|
||||||
@ -783,24 +521,6 @@ files = [
|
|||||||
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pydocstyle"
|
|
||||||
version = "6.1.1"
|
|
||||||
description = "Python docstring style checker"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
files = [
|
|
||||||
{file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
|
|
||||||
{file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
snowballstemmer = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
toml = ["toml"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyflakes"
|
name = "pyflakes"
|
||||||
version = "2.5.0"
|
version = "2.5.0"
|
||||||
@ -852,25 +572,24 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytest"
|
name = "pytest"
|
||||||
version = "7.1.2"
|
version = "7.2.1"
|
||||||
description = "pytest: simple powerful testing with Python"
|
description = "pytest: simple powerful testing with Python"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"},
|
{file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"},
|
||||||
{file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"},
|
{file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
|
|
||||||
attrs = ">=19.2.0"
|
attrs = ">=19.2.0"
|
||||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||||
|
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||||
iniconfig = "*"
|
iniconfig = "*"
|
||||||
packaging = "*"
|
packaging = "*"
|
||||||
pluggy = ">=0.12,<2.0"
|
pluggy = ">=0.12,<2.0"
|
||||||
py = ">=1.8.2"
|
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
|
||||||
tomli = ">=1.0.0"
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
|
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
|
||||||
@ -896,14 +615,14 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytest-mock"
|
name = "pytest-mock"
|
||||||
version = "3.8.2"
|
version = "3.10.0"
|
||||||
description = "Thin-wrapper around the mock package for easier use with pytest"
|
description = "Thin-wrapper around the mock package for easier use with pytest"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "pytest-mock-3.8.2.tar.gz", hash = "sha256:77f03f4554392558700295e05aed0b1096a20d4a60a4f3ddcde58b0c31c8fca2"},
|
{file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"},
|
||||||
{file = "pytest_mock-3.8.2-py3-none-any.whl", hash = "sha256:8a9e226d6c0ef09fcf20c94eb3405c388af438a90f3e39687f84166da82d5948"},
|
{file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -992,6 +711,32 @@ lint = ["black", "flake8", "isort", "mypy", "types-requests"]
|
|||||||
release = ["build", "towncrier", "twine"]
|
release = ["build", "towncrier", "twine"]
|
||||||
test = ["commentjson", "packaging", "pytest"]
|
test = ["commentjson", "packaging", "pytest"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ruff"
|
||||||
|
version = "0.0.244"
|
||||||
|
description = "An extremely fast Python linter, written in Rust."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "ruff-0.0.244-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5d65a0adffa51314cf9f1036c51dbcde0462b23b49a3d8e3a696a221f9f46f54"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:3d6bf5094f2c447f5ff8d10c670dc1bc8b7f70cb5f4e43afe1d0624b934c1284"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f54790b297d5df8120a348c91426a0375c40f62880d30438e46922399b29bf"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:88e263e3d7267b4b10f5c1fc1446c5d6b47824c6d78e5c3a97ef79c83d9cb837"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8044c79098e3f2deaf970ab468bf5661b193163369bfe5bbda636e6363aa7932"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:258e5e3386a8efdff9f253395cc03a3a88204442ac8db50aeb0a529e2862d57b"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd09c523aeed4d81f358093dc4df384a4db42ff5f9627c9506c26c2becbe19a7"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c98f0d4a4e052e8b0e27b47e83563026d749b07a21a097780cd283c2f885ad3c"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2359f840c95364d779b86a822fe025fa416eb14adc661c1263bc39e90065f0bd"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8b85ced1e75b7cf1dd90d0708f8e46e2d58fc124334492cc5103f24d832a3922"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9c2d49c2021bf80f3e66968c1a41f89061911ffb7ed1f0d39a3204a45fc97ba7"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-musllinux_1_2_i686.whl", hash = "sha256:da77d573c7a5b27bad43468fb7e47e78e22715426beb4e673106d24a9a584838"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9f16fc3380753310af2a10e2867dfc133849e51c545561ec0a389aa93b3058b0"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-win32.whl", hash = "sha256:b3fc70a4c5d5a0ab8e5b3c3e818ca224913eee84f65bf63ee212af2bbd5f1792"},
|
||||||
|
{file = "ruff-0.0.244-py3-none-win_amd64.whl", hash = "sha256:78bbc5d1cca0a8752f6e4b3f4485f4c4f2428543a0777d1bde865aa43bdab190"},
|
||||||
|
{file = "ruff-0.0.244.tar.gz", hash = "sha256:7c05773e990348a6d7628b9b7294fe76303bc870dd94d9c34154bc1560053050"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "setuptools"
|
name = "setuptools"
|
||||||
version = "67.2.0"
|
version = "67.2.0"
|
||||||
@ -1021,45 +766,6 @@ files = [
|
|||||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "smmap"
|
|
||||||
version = "5.0.0"
|
|
||||||
description = "A pure Python implementation of a sliding window memory map manager"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
files = [
|
|
||||||
{file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"},
|
|
||||||
{file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "snowballstemmer"
|
|
||||||
version = "2.2.0"
|
|
||||||
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
|
|
||||||
{file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "stevedore"
|
|
||||||
version = "4.1.1"
|
|
||||||
description = "Manage dynamic plugins for Python applications"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "stevedore-4.1.1-py3-none-any.whl", hash = "sha256:aa6436565c069b2946fe4ebff07f5041e0c8bf18c7376dd29edf80cf7d524e4e"},
|
|
||||||
{file = "stevedore-4.1.1.tar.gz", hash = "sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
pbr = ">=2.0.0,<2.1.0 || >2.1.0"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.10.2"
|
version = "0.10.2"
|
||||||
@ -1144,4 +850,4 @@ ansible-core = ["ansible-core"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.9.0"
|
python-versions = "^3.9.0"
|
||||||
content-hash = "2f0c4e4f4dbe856eaa1f1769d392330fd91ec743ddef8dfc68f752cbc12a0f49"
|
content-hash = "cdc6322cee0cea0daae4910c3fea067f1088332ffcf1799e5fc5dacd803764d6"
|
||||||
|
107
pyproject.toml
107
pyproject.toml
@ -49,26 +49,6 @@ toolz = "0.12.0"
|
|||||||
unidiff = "0.7.4"
|
unidiff = "0.7.4"
|
||||||
yamllint = "1.29.0"
|
yamllint = "1.29.0"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
|
||||||
bandit = "1.7.4"
|
|
||||||
flake8-blind-except = "0.2.1"
|
|
||||||
flake8-builtins = "2.0.0"
|
|
||||||
flake8-docstrings = "1.6.0"
|
|
||||||
flake8-eradicate = "1.3.0"
|
|
||||||
flake8-isort = "6.0.0"
|
|
||||||
flake8-logging-format = "0.7.5"
|
|
||||||
flake8-pep3101 = "2.0.0"
|
|
||||||
flake8-polyfill = "1.0.2"
|
|
||||||
flake8-quotes = "3.3.1"
|
|
||||||
pep8-naming = "0.13.2"
|
|
||||||
pydocstyle = "6.1.1"
|
|
||||||
pytest = "7.1.2"
|
|
||||||
pytest-cov = "4.0.0"
|
|
||||||
pytest-mock = "3.8.2"
|
|
||||||
tomli = "2.0.1"
|
|
||||||
yapf = "0.32.0"
|
|
||||||
toml = "0.10.2"
|
|
||||||
|
|
||||||
[tool.poetry.extras]
|
[tool.poetry.extras]
|
||||||
ansible = ["ansible"]
|
ansible = ["ansible"]
|
||||||
ansible-core = ["ansible-core"]
|
ansible-core = ["ansible-core"]
|
||||||
@ -76,23 +56,24 @@ ansible-core = ["ansible-core"]
|
|||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
ansible-later = "ansiblelater.__main__:main"
|
ansible-later = "ansiblelater.__main__:main"
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
ruff = "0.0.244"
|
||||||
|
pytest = "7.2.1"
|
||||||
|
pytest-mock = "3.10.0"
|
||||||
|
pytest-cov = "4.0.0"
|
||||||
|
toml = "0.10.2"
|
||||||
|
yapf = "0.32.0"
|
||||||
|
|
||||||
[tool.poetry-dynamic-versioning]
|
[tool.poetry-dynamic-versioning]
|
||||||
enable = true
|
enable = true
|
||||||
style = "semver"
|
style = "semver"
|
||||||
vcs = "git"
|
vcs = "git"
|
||||||
|
|
||||||
[tool.isort]
|
|
||||||
default_section = "THIRDPARTY"
|
|
||||||
force_single_line = true
|
|
||||||
line_length = 99
|
|
||||||
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
|
|
||||||
skip_glob = ["**/.env*", "**/env/*", "**/.venv/*", "**/docs/*"]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
addopts = "ansiblelater --cov=ansiblelater --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
|
addopts = "ansiblelater --cov=ansiblelater --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
|
||||||
filterwarnings = [
|
filterwarnings = [
|
||||||
"ignore::FutureWarning",
|
"ignore::FutureWarning",
|
||||||
"ignore:.*collections.*:DeprecationWarning",
|
"ignore::DeprecationWarning",
|
||||||
"ignore:.*pep8.*:FutureWarning",
|
"ignore:.*pep8.*:FutureWarning",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -102,3 +83,73 @@ omit = ["**/test/*"]
|
|||||||
[build-system]
|
[build-system]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
|
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
exclude = [
|
||||||
|
".git",
|
||||||
|
"__pycache__",
|
||||||
|
"build",
|
||||||
|
"dist",
|
||||||
|
"test",
|
||||||
|
"*.pyc",
|
||||||
|
"*.egg-info",
|
||||||
|
".cache",
|
||||||
|
".eggs",
|
||||||
|
"env*",
|
||||||
|
]
|
||||||
|
# Explanation of errors
|
||||||
|
#
|
||||||
|
# D100: Missing docstring in public module
|
||||||
|
# D101: Missing docstring in public class
|
||||||
|
# D102: Missing docstring in public method
|
||||||
|
# D103: Missing docstring in public function
|
||||||
|
# D105: Missing docstring in magic method
|
||||||
|
# D107: Missing docstring in __init__
|
||||||
|
# D202: No blank lines allowed after function docstring
|
||||||
|
# D203: One blank line required before class docstring
|
||||||
|
# D212: Multi-line docstring summary should start at the first line
|
||||||
|
ignore = [
|
||||||
|
"D100",
|
||||||
|
"D101",
|
||||||
|
"D102",
|
||||||
|
"D103",
|
||||||
|
"D105",
|
||||||
|
"D107",
|
||||||
|
"D202",
|
||||||
|
"D203",
|
||||||
|
"D212",
|
||||||
|
]
|
||||||
|
line-length = 99
|
||||||
|
select = [
|
||||||
|
"D",
|
||||||
|
"E",
|
||||||
|
"F",
|
||||||
|
"Q",
|
||||||
|
"W",
|
||||||
|
"I",
|
||||||
|
"S",
|
||||||
|
"BLE",
|
||||||
|
"N",
|
||||||
|
"UP",
|
||||||
|
"B",
|
||||||
|
"A",
|
||||||
|
"C4",
|
||||||
|
"T20",
|
||||||
|
"SIM",
|
||||||
|
"RET",
|
||||||
|
"ARG",
|
||||||
|
"ERA",
|
||||||
|
"RUF",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.flake8-quotes]
|
||||||
|
inline-quotes = "double"
|
||||||
|
|
||||||
|
[tool.yapf]
|
||||||
|
based_on_style = "google"
|
||||||
|
column_limit = 99
|
||||||
|
dedent_closing_brackets = true
|
||||||
|
coalesce_brackets = true
|
||||||
|
split_before_logical_operator = true
|
||||||
|
indent_dictionary_value = true
|
||||||
|
allow_split_before_dict_value = false
|
||||||
|
26
setup.cfg
26
setup.cfg
@ -1,26 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
# Explanation of errors
|
|
||||||
#
|
|
||||||
# D100: Missing docstring in public module
|
|
||||||
# D101: Missing docstring in public class
|
|
||||||
# D102: Missing docstring in public method
|
|
||||||
# D103: Missing docstring in public function
|
|
||||||
# D105: Missing docstring in magic method
|
|
||||||
# D107: Missing docstring in __init__
|
|
||||||
# D202: No blank lines allowed after function docstring
|
|
||||||
# G001: Logging statements should not use string.format() for their first argument
|
|
||||||
# G004: Logging statements should not use f"..." for their first argument
|
|
||||||
# W503: Line break occurred before a binary operator
|
|
||||||
ignore = D100, D101, D102, D103, D107, D202, G001, G004, W503
|
|
||||||
max-line-length = 99
|
|
||||||
inline-quotes = double
|
|
||||||
exclude = .git, __pycache__, build, dist, test, *.pyc, *.egg-info, .cache, .eggs, env*
|
|
||||||
|
|
||||||
[yapf]
|
|
||||||
based_on_style = google
|
|
||||||
column_limit = 99
|
|
||||||
dedent_closing_brackets = true
|
|
||||||
coalesce_brackets = true
|
|
||||||
split_before_logical_operator = true
|
|
||||||
indent_dictionary_value = true
|
|
||||||
allow_split_before_dict_value = false
|
|
Loading…
Reference in New Issue
Block a user