mirror of
https://github.com/thegeeklab/ansible-later.git
synced 2024-11-24 13:50:41 +00:00
refctor: migrate flake8 to ruff python linter (#540)
This commit is contained in:
parent
2115efec89
commit
b31d510156
@ -26,7 +26,7 @@ local PipelineLint = {
|
||||
},
|
||||
steps: [
|
||||
{
|
||||
name: 'yapf',
|
||||
name: 'check-format',
|
||||
image: 'python:3.11',
|
||||
environment: {
|
||||
PY_COLORS: 1,
|
||||
@ -40,7 +40,7 @@ local PipelineLint = {
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'flake8',
|
||||
name: 'check-coding',
|
||||
image: 'python:3.11',
|
||||
environment: {
|
||||
PY_COLORS: 1,
|
||||
@ -49,7 +49,7 @@ local PipelineLint = {
|
||||
'git fetch -tq',
|
||||
'pip install poetry poetry-dynamic-versioning -qq',
|
||||
'poetry install -E ansible-core',
|
||||
'poetry run flake8 ./ansiblelater',
|
||||
'poetry run ruff ./ansiblelater',
|
||||
],
|
||||
},
|
||||
],
|
||||
@ -102,36 +102,6 @@ local PipelineTest = {
|
||||
},
|
||||
};
|
||||
|
||||
local PipelineSecurity = {
|
||||
kind: 'pipeline',
|
||||
name: 'security',
|
||||
platform: {
|
||||
os: 'linux',
|
||||
arch: 'amd64',
|
||||
},
|
||||
steps: [
|
||||
{
|
||||
name: 'bandit',
|
||||
image: 'python:3.11',
|
||||
environment: {
|
||||
PY_COLORS: 1,
|
||||
},
|
||||
commands: [
|
||||
'git fetch -tq',
|
||||
'pip install poetry poetry-dynamic-versioning -qq',
|
||||
'poetry install -E ansible-core',
|
||||
'poetry run bandit -r ./ansiblelater -x ./ansiblelater/test',
|
||||
],
|
||||
},
|
||||
],
|
||||
depends_on: [
|
||||
'test',
|
||||
],
|
||||
trigger: {
|
||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||
},
|
||||
};
|
||||
|
||||
local PipelineBuildPackage = {
|
||||
kind: 'pipeline',
|
||||
name: 'build-package',
|
||||
@ -204,7 +174,7 @@ local PipelineBuildPackage = {
|
||||
},
|
||||
],
|
||||
depends_on: [
|
||||
'security',
|
||||
'test',
|
||||
],
|
||||
trigger: {
|
||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||
@ -289,7 +259,7 @@ local PipelineBuildContainer = {
|
||||
},
|
||||
],
|
||||
depends_on: [
|
||||
'security',
|
||||
'test',
|
||||
],
|
||||
trigger: {
|
||||
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||
@ -463,7 +433,6 @@ local PipelineNotifications = {
|
||||
[
|
||||
PipelineLint,
|
||||
PipelineTest,
|
||||
PipelineSecurity,
|
||||
PipelineBuildPackage,
|
||||
PipelineBuildContainer,
|
||||
PipelineDocs,
|
||||
|
40
.drone.yml
40
.drone.yml
@ -7,7 +7,7 @@ platform:
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: yapf
|
||||
- name: check-format
|
||||
image: python:3.11
|
||||
commands:
|
||||
- git fetch -tq
|
||||
@ -18,13 +18,13 @@ steps:
|
||||
environment:
|
||||
PY_COLORS: 1
|
||||
|
||||
- name: flake8
|
||||
- name: check-coding
|
||||
image: python:3.11
|
||||
commands:
|
||||
- git fetch -tq
|
||||
- pip install poetry poetry-dynamic-versioning -qq
|
||||
- poetry install -E ansible-core
|
||||
- poetry run flake8 ./ansiblelater
|
||||
- poetry run ruff ./ansiblelater
|
||||
environment:
|
||||
PY_COLORS: 1
|
||||
|
||||
@ -113,34 +113,6 @@ trigger:
|
||||
depends_on:
|
||||
- lint
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: security
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: bandit
|
||||
image: python:3.11
|
||||
commands:
|
||||
- git fetch -tq
|
||||
- pip install poetry poetry-dynamic-versioning -qq
|
||||
- poetry install -E ansible-core
|
||||
- poetry run bandit -r ./ansiblelater -x ./ansiblelater/test
|
||||
environment:
|
||||
PY_COLORS: 1
|
||||
|
||||
trigger:
|
||||
ref:
|
||||
- refs/heads/main
|
||||
- refs/tags/**
|
||||
- refs/pull/**
|
||||
|
||||
depends_on:
|
||||
- test
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: build-package
|
||||
@ -211,7 +183,7 @@ trigger:
|
||||
- refs/pull/**
|
||||
|
||||
depends_on:
|
||||
- security
|
||||
- test
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
@ -295,7 +267,7 @@ trigger:
|
||||
- refs/pull/**
|
||||
|
||||
depends_on:
|
||||
- security
|
||||
- test
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
@ -446,6 +418,6 @@ depends_on:
|
||||
|
||||
---
|
||||
kind: signature
|
||||
hmac: 58eb57b7a150a51796fafff42c5f4aa7773c2ebaf57e180479f4278adf452574
|
||||
hmac: 1bc7f62d74ce0afa031770f617ffda20b9719ed4489061c470476ca707d1275f
|
||||
|
||||
...
|
||||
|
@ -5,9 +5,7 @@ import argparse
|
||||
import multiprocessing
|
||||
import sys
|
||||
|
||||
from ansiblelater import LOG
|
||||
from ansiblelater import __version__
|
||||
from ansiblelater import logger
|
||||
from ansiblelater import LOG, __version__, logger
|
||||
from ansiblelater.candidate import Candidate
|
||||
from ansiblelater.settings import Settings
|
||||
from ansiblelater.standard import SingleStandards
|
||||
@ -81,9 +79,9 @@ def main():
|
||||
if candidate.vault:
|
||||
LOG.info(f"Not reviewing vault file {filename}")
|
||||
continue
|
||||
else:
|
||||
LOG.info(f"Reviewing all of {candidate}")
|
||||
tasks.append(candidate)
|
||||
|
||||
LOG.info(f"Reviewing all of {candidate}")
|
||||
tasks.append(candidate)
|
||||
else:
|
||||
LOG.info(f"Couldn't classify file {filename}")
|
||||
|
||||
@ -91,10 +89,7 @@ def main():
|
||||
p.close()
|
||||
p.join()
|
||||
|
||||
if not errors == 0:
|
||||
return_code = 1
|
||||
else:
|
||||
return_code = 0
|
||||
return_code = 1 if errors != 0 else 0
|
||||
|
||||
sys.exit(return_code)
|
||||
|
||||
|
@ -8,14 +8,12 @@ from distutils.version import LooseVersion
|
||||
|
||||
from ansible.plugins.loader import module_loader
|
||||
|
||||
from ansiblelater import LOG
|
||||
from ansiblelater import utils
|
||||
from ansiblelater import LOG, utils
|
||||
from ansiblelater.logger import flag_extra
|
||||
from ansiblelater.standard import SingleStandards
|
||||
from ansiblelater.standard import StandardBase
|
||||
from ansiblelater.standard import SingleStandards, StandardBase
|
||||
|
||||
|
||||
class Candidate(object):
|
||||
class Candidate:
|
||||
"""
|
||||
Meta object for all files which later has to process.
|
||||
|
||||
@ -23,7 +21,7 @@ class Candidate(object):
|
||||
bundled with necessary meta informations for rule processing.
|
||||
"""
|
||||
|
||||
def __init__(self, filename, settings={}, standards=[]):
|
||||
def __init__(self, filename, settings={}, standards=[]): # noqa
|
||||
self.path = filename
|
||||
self.binary = False
|
||||
self.vault = False
|
||||
@ -87,7 +85,7 @@ class Candidate(object):
|
||||
|
||||
return target_standards
|
||||
|
||||
def review(self, lines=None):
|
||||
def review(self):
|
||||
errors = 0
|
||||
self.standards = SingleStandards(self.config["rules"]["standards"]).rules
|
||||
self.version_config = self._get_version()
|
||||
@ -148,7 +146,7 @@ class Candidate(object):
|
||||
return errors
|
||||
|
||||
@staticmethod
|
||||
def classify(filename, settings={}, standards=[]):
|
||||
def classify(filename, settings={}, standards=[]): # noqa
|
||||
parentdir = os.path.basename(os.path.dirname(filename))
|
||||
basename = os.path.basename(filename)
|
||||
ext = os.path.splitext(filename)[1][1:]
|
||||
@ -193,20 +191,20 @@ class Candidate(object):
|
||||
if sid:
|
||||
standard_id = f"[{sid}] "
|
||||
|
||||
return standard_id
|
||||
return standard_id # noqa
|
||||
|
||||
def __repr__(self): # noqa
|
||||
def __repr__(self):
|
||||
return f"{type(self).__name__} ({self.path})"
|
||||
|
||||
def __getitem__(self, item): # noqa
|
||||
def __getitem__(self, item):
|
||||
return self.__dict__.get(item)
|
||||
|
||||
|
||||
class RoleFile(Candidate):
|
||||
"""Object classified as Ansible role file."""
|
||||
|
||||
def __init__(self, filename, settings={}, standards=[]):
|
||||
super(RoleFile, self).__init__(filename, settings, standards)
|
||||
def __init__(self, filename, settings={}, standards=[]): # noqa
|
||||
super().__init__(filename, settings, standards)
|
||||
|
||||
parentdir = os.path.dirname(os.path.abspath(filename))
|
||||
while parentdir != os.path.dirname(parentdir):
|
||||
@ -226,16 +224,16 @@ class Playbook(Candidate):
|
||||
class Task(RoleFile):
|
||||
"""Object classified as Ansible task file."""
|
||||
|
||||
def __init__(self, filename, settings={}, standards=[]):
|
||||
super(Task, self).__init__(filename, settings, standards)
|
||||
def __init__(self, filename, settings={}, standards=[]): # noqa
|
||||
super().__init__(filename, settings, standards)
|
||||
self.filetype = "tasks"
|
||||
|
||||
|
||||
class Handler(RoleFile):
|
||||
"""Object classified as Ansible handler file."""
|
||||
|
||||
def __init__(self, filename, settings={}, standards=[]):
|
||||
super(Handler, self).__init__(filename, settings, standards)
|
||||
def __init__(self, filename, settings={}, standards=[]): # noqa
|
||||
super().__init__(filename, settings, standards)
|
||||
self.filetype = "handlers"
|
||||
|
||||
|
||||
|
@ -8,14 +8,14 @@ class LaterError(Exception):
|
||||
|
||||
def __init__(self, msg, original):
|
||||
"""Initialize new exception."""
|
||||
super(LaterError, self).__init__(f"{msg}: {original}")
|
||||
super().__init__(f"{msg}: {original}")
|
||||
self.original = original
|
||||
|
||||
|
||||
class LaterAnsibleError(Exception):
|
||||
"""Wrapper for ansible syntax errors."""
|
||||
|
||||
def __init__(self, msg, original):
|
||||
def __init__(self, original):
|
||||
lines = original.message.splitlines()
|
||||
|
||||
line_no = re.search("line(.*?),", lines[2])
|
||||
|
@ -30,7 +30,7 @@ colorama.init(autoreset=True, strip=(not _should_do_markup()))
|
||||
|
||||
def flag_extra(extra):
|
||||
"""Ensure extra args are prefixed."""
|
||||
flagged = dict()
|
||||
flagged = {}
|
||||
|
||||
if isinstance(extra, dict):
|
||||
for key, value in extra.items():
|
||||
@ -39,7 +39,7 @@ def flag_extra(extra):
|
||||
return flagged
|
||||
|
||||
|
||||
class LogFilter(object):
|
||||
class LogFilter:
|
||||
"""A custom log filter which excludes log messages above the logged level."""
|
||||
|
||||
def __init__(self, level):
|
||||
|
@ -18,12 +18,11 @@ class CheckCommandHasChanges(StandardBase):
|
||||
|
||||
if not errors:
|
||||
for task in tasks:
|
||||
if task["action"]["__ansible_module__"] in commands:
|
||||
if (
|
||||
"changed_when" not in task and "when" not in task
|
||||
and "when" not in task.get("__ansible_action_meta__", [])
|
||||
and "creates" not in task["action"] and "removes" not in task["action"]
|
||||
):
|
||||
errors.append(self.Error(task["__line__"], self.helptext))
|
||||
if task["action"]["__ansible_module__"] in commands and (
|
||||
"changed_when" not in task and "when" not in task
|
||||
and "when" not in task.get("__ansible_action_meta__", [])
|
||||
and "creates" not in task["action"] and "removes" not in task["action"]
|
||||
):
|
||||
errors.append(self.Error(task["__line__"], self.helptext))
|
||||
|
||||
return self.Result(candidate.path, errors)
|
||||
|
@ -40,9 +40,8 @@ class CheckFilePermissionOctal(StandardBase):
|
||||
if task["action"]["__ansible_module__"] in modules:
|
||||
mode = task["action"].get("mode", None)
|
||||
|
||||
if isinstance(mode, int):
|
||||
if self._is_invalid_permission(mode):
|
||||
errors.append(self.Error(task["__line__"], self.helptext))
|
||||
if isinstance(mode, int) and self._is_invalid_permission(mode):
|
||||
errors.append(self.Error(task["__line__"], self.helptext))
|
||||
|
||||
return self.Result(candidate.path, errors)
|
||||
|
||||
@ -55,7 +54,7 @@ class CheckFilePermissionOctal(StandardBase):
|
||||
group_write_without_read = ((mode >> 3) % 8 and (mode >> 3) % 8 < 4
|
||||
and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1))
|
||||
user_write_without_read = ((mode >> 6) % 8 and (mode >> 6) % 8 < 4
|
||||
and not (mode >> 6) % 8 == 1)
|
||||
and (mode >> 6) % 8 != 1)
|
||||
other_more_generous_than_group = mode % 8 > (mode >> 3) % 8
|
||||
other_more_generous_than_user = mode % 8 > (mode >> 6) % 8
|
||||
group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8
|
||||
|
@ -16,8 +16,8 @@ class CheckMetaMain(StandardBase):
|
||||
keys = ["author", "description", "min_ansible_version", "platforms"]
|
||||
|
||||
if not errors:
|
||||
has_galaxy_info = (isinstance(content, dict) and "galaxy_info" in content.keys())
|
||||
has_dependencies = (isinstance(content, dict) and "dependencies" in content.keys())
|
||||
has_galaxy_info = (isinstance(content, dict) and "galaxy_info" in content)
|
||||
has_dependencies = (isinstance(content, dict) and "dependencies" in content)
|
||||
|
||||
if not has_galaxy_info:
|
||||
errors.append(self.Error(None, self.helptext.format(key="galaxy_info")))
|
||||
|
@ -1,4 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Author: Adrián Tóth <adtoth@redhat.com>
|
||||
#
|
||||
# Copyright (c) 2020, Red Hat, Inc.
|
||||
@ -51,7 +50,7 @@ class CheckNestedJinja(StandardBase):
|
||||
for item in match:
|
||||
matches.append((i, item))
|
||||
|
||||
for i, line in matches:
|
||||
for i, _ in matches:
|
||||
errors.append(self.Error(i, self.helptext))
|
||||
|
||||
return self.Result(candidate.path, errors)
|
||||
|
@ -16,8 +16,7 @@ class CheckScmInSrc(StandardBase):
|
||||
|
||||
if not errors:
|
||||
for role in roles:
|
||||
if isinstance(role, AnsibleMapping):
|
||||
if "+" in role.get("src"):
|
||||
errors.append(self.Error(role["__line__"], self.helptext))
|
||||
if isinstance(role, AnsibleMapping) and "+" in role.get("src"):
|
||||
errors.append(self.Error(role["__line__"], self.helptext))
|
||||
|
||||
return self.Result(candidate.path, errors)
|
||||
|
@ -8,7 +8,7 @@ class CheckVersion(StandardBase):
|
||||
helptext = "Standards version not set. Using latest standards version {version}"
|
||||
types = ["task", "handler", "rolevars", "meta", "template", "file", "playbook"]
|
||||
|
||||
def check(self, candidate, settings):
|
||||
def check(self, candidate, settings): # noqa
|
||||
errors = []
|
||||
|
||||
if not candidate.version_config:
|
||||
|
@ -15,7 +15,7 @@ config_dir = AppDirs("ansible-later").user_config_dir
|
||||
default_config_file = os.path.join(config_dir, "config.yml")
|
||||
|
||||
|
||||
class Settings(object):
|
||||
class Settings:
|
||||
"""
|
||||
Create an object with all necessary settings.
|
||||
|
||||
@ -25,14 +25,13 @@ class Settings(object):
|
||||
- provides cli parameters
|
||||
"""
|
||||
|
||||
def __init__(self, args={}, config_file=default_config_file):
|
||||
def __init__(self, args, config_file=default_config_file):
|
||||
"""
|
||||
Initialize a new settings class.
|
||||
|
||||
:param args: An optional dict of options, arguments and commands from the CLI.
|
||||
:param config_file: An optional path to a yaml config file.
|
||||
:returns: None
|
||||
|
||||
"""
|
||||
self.config_file = config_file
|
||||
self.schema = None
|
||||
@ -42,6 +41,9 @@ class Settings(object):
|
||||
self._update_filelist()
|
||||
|
||||
def _set_args(self, args):
|
||||
if args is None:
|
||||
args = {}
|
||||
|
||||
defaults = self._get_defaults()
|
||||
self.config_file = args.get("config_file") or default_config_file
|
||||
|
||||
@ -214,7 +216,7 @@ class Settings(object):
|
||||
del excludes[:]
|
||||
|
||||
filelist = []
|
||||
for root, dirs, files in os.walk("."):
|
||||
for root, _dirs, files in os.walk("."):
|
||||
for filename in files:
|
||||
filelist.append(os.path.relpath(os.path.normpath(os.path.join(root, filename))))
|
||||
|
||||
|
@ -3,12 +3,10 @@
|
||||
import copy
|
||||
import importlib
|
||||
import inspect
|
||||
import io
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
from abc import ABCMeta
|
||||
from abc import abstractmethod
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections import defaultdict
|
||||
|
||||
import toolz
|
||||
@ -16,27 +14,27 @@ import yaml
|
||||
from yamllint import linter
|
||||
from yamllint.config import YamlLintConfig
|
||||
|
||||
from ansiblelater.exceptions import LaterAnsibleError
|
||||
from ansiblelater.exceptions import LaterError
|
||||
from ansiblelater.utils import Singleton
|
||||
from ansiblelater.utils import sysexit_with_message
|
||||
from ansiblelater.utils.yamlhelper import UnsafeTag
|
||||
from ansiblelater.utils.yamlhelper import VaultTag
|
||||
from ansiblelater.utils.yamlhelper import action_tasks
|
||||
from ansiblelater.utils.yamlhelper import normalize_task
|
||||
from ansiblelater.utils.yamlhelper import normalized_yaml
|
||||
from ansiblelater.utils.yamlhelper import parse_yaml_linenumbers
|
||||
from ansiblelater.exceptions import LaterAnsibleError, LaterError
|
||||
from ansiblelater.utils import Singleton, sysexit_with_message
|
||||
from ansiblelater.utils.yamlhelper import (
|
||||
UnsafeTag,
|
||||
VaultTag,
|
||||
action_tasks,
|
||||
normalize_task,
|
||||
normalized_yaml,
|
||||
parse_yaml_linenumbers,
|
||||
)
|
||||
|
||||
|
||||
class StandardMeta(type):
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
def __call__(cls, *args):
|
||||
mcls = type.__call__(cls, *args)
|
||||
setattr(mcls, "sid", cls.sid)
|
||||
setattr(mcls, "description", getattr(cls, "description", "__unknown__"))
|
||||
setattr(mcls, "helptext", getattr(cls, "helptext", ""))
|
||||
setattr(mcls, "version", getattr(cls, "version", None))
|
||||
setattr(mcls, "types", getattr(cls, "types", []))
|
||||
mcls.sid = cls.sid
|
||||
mcls.description = getattr(cls, "description", "__unknown__")
|
||||
mcls.helptext = getattr(cls, "helptext", "")
|
||||
mcls.version = getattr(cls, "version", None)
|
||||
mcls.types = getattr(cls, "types", [])
|
||||
return mcls
|
||||
|
||||
|
||||
@ -44,7 +42,7 @@ class StandardExtendedMeta(StandardMeta, ABCMeta):
|
||||
pass
|
||||
|
||||
|
||||
class StandardBase(object, metaclass=StandardExtendedMeta):
|
||||
class StandardBase(metaclass=StandardExtendedMeta):
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
@ -55,17 +53,17 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
||||
def check(self, candidate, settings):
|
||||
pass
|
||||
|
||||
def __repr__(self): # noqa
|
||||
def __repr__(self):
|
||||
return f"Standard: {self.description} (version: {self.version}, types: {self.types})"
|
||||
|
||||
@staticmethod
|
||||
def get_tasks(candidate, settings):
|
||||
def get_tasks(candidate, settings): # noqa
|
||||
errors = []
|
||||
yamllines = []
|
||||
|
||||
if not candidate.faulty:
|
||||
try:
|
||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
||||
with open(candidate.path, encoding="utf-8") as f:
|
||||
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||
except LaterError as ex:
|
||||
e = ex.original
|
||||
@ -80,13 +78,13 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
||||
return yamllines, errors
|
||||
|
||||
@staticmethod
|
||||
def get_action_tasks(candidate, settings):
|
||||
def get_action_tasks(candidate, settings): # noqa
|
||||
tasks = []
|
||||
errors = []
|
||||
|
||||
if not candidate.faulty:
|
||||
try:
|
||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
||||
with open(candidate.path, encoding="utf-8") as f:
|
||||
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||
|
||||
if yamllines:
|
||||
@ -132,7 +130,7 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
||||
|
||||
if not candidate.faulty:
|
||||
try:
|
||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
||||
with open(candidate.path, encoding="utf-8") as f:
|
||||
yamllines = parse_yaml_linenumbers(f, candidate.path)
|
||||
|
||||
if yamllines:
|
||||
@ -170,7 +168,7 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
||||
return normalized, errors
|
||||
|
||||
@staticmethod
|
||||
def get_normalized_yaml(candidate, settings, options=None):
|
||||
def get_normalized_yaml(candidate, settings, options=None): # noqa
|
||||
errors = []
|
||||
yamllines = []
|
||||
|
||||
@ -195,13 +193,13 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
||||
return yamllines, errors
|
||||
|
||||
@staticmethod
|
||||
def get_raw_yaml(candidate, settings):
|
||||
def get_raw_yaml(candidate, settings): # noqa
|
||||
content = None
|
||||
errors = []
|
||||
|
||||
if not candidate.faulty:
|
||||
try:
|
||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
||||
with open(candidate.path, encoding="utf-8") as f:
|
||||
yaml.add_constructor(
|
||||
UnsafeTag.yaml_tag, UnsafeTag.yaml_constructor, Loader=yaml.SafeLoader
|
||||
)
|
||||
@ -223,7 +221,7 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
||||
|
||||
if not candidate.faulty:
|
||||
try:
|
||||
with io.open(candidate.path, mode="r", encoding="utf-8") as f:
|
||||
with open(candidate.path, encoding="utf-8") as f:
|
||||
for problem in linter.run(f, YamlLintConfig(options)):
|
||||
errors.append(StandardBase.Error(problem.line, problem.desc))
|
||||
except yaml.YAMLError as e:
|
||||
@ -246,12 +244,12 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
||||
else:
|
||||
first_cmd_arg = task["action"]["__ansible_arguments__"][0]
|
||||
|
||||
return first_cmd_arg
|
||||
return first_cmd_arg # noqa
|
||||
|
||||
class Error(object):
|
||||
class Error:
|
||||
"""Default error object created if a rule failed."""
|
||||
|
||||
def __init__(self, lineno, message, error_type=None, **kwargs):
|
||||
def __init__(self, lineno, message, **kwargs):
|
||||
"""
|
||||
Initialize a new error object and returns None.
|
||||
|
||||
@ -265,19 +263,18 @@ class StandardBase(object, metaclass=StandardExtendedMeta):
|
||||
for (key, value) in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def __repr__(self): # noqa
|
||||
def __repr__(self):
|
||||
if self.lineno:
|
||||
return f"{self.lineno}: {self.message}"
|
||||
else:
|
||||
return f" {self.message}"
|
||||
return f" {self.message}"
|
||||
|
||||
def to_dict(self):
|
||||
result = dict(lineno=self.lineno, message=self.message)
|
||||
result = {"lineno": self.lineno, "message": self.message}
|
||||
for (key, value) in self.kwargs.items():
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
class Result(object):
|
||||
class Result:
|
||||
"""Generic result object."""
|
||||
|
||||
def __init__(self, candidate, errors=None):
|
||||
@ -308,7 +305,7 @@ class StandardLoader():
|
||||
sysexit_with_message(f"Failed to load roles file {filename}: \n {str(e)}")
|
||||
|
||||
try:
|
||||
for name, obj in inspect.getmembers(module):
|
||||
for _name, obj in inspect.getmembers(module):
|
||||
if self._is_plugin(obj):
|
||||
self.rules.append(obj())
|
||||
except TypeError as e:
|
||||
@ -325,7 +322,7 @@ class StandardLoader():
|
||||
normalized_std = (list(toolz.remove(lambda x: x.sid == "", self.rules)))
|
||||
unique_std = len(list(toolz.unique(normalized_std, key=lambda x: x.sid)))
|
||||
all_std = len(normalized_std)
|
||||
if not all_std == unique_std:
|
||||
if all_std != unique_std:
|
||||
sysexit_with_message(
|
||||
"Detect duplicate ID's in standards definition. Please use unique ID's only."
|
||||
)
|
||||
|
@ -1,11 +1,8 @@
|
||||
"""Global utils collection."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
import yaml
|
||||
@ -24,12 +21,12 @@ def count_spaces(c_string):
|
||||
leading_spaces = 0
|
||||
trailing_spaces = 0
|
||||
|
||||
for i, e in enumerate(c_string):
|
||||
for _i, e in enumerate(c_string):
|
||||
if not e.isspace():
|
||||
break
|
||||
leading_spaces += 1
|
||||
|
||||
for i, e in reversed(list(enumerate(c_string))):
|
||||
for _i, e in reversed(list(enumerate(c_string))):
|
||||
if not e.isspace():
|
||||
break
|
||||
trailing_spaces += 1
|
||||
@ -37,16 +34,6 @@ def count_spaces(c_string):
|
||||
return ((leading_spaces, trailing_spaces))
|
||||
|
||||
|
||||
def get_property(prop):
|
||||
currentdir = os.path.dirname(os.path.realpath(__file__))
|
||||
parentdir = os.path.dirname(currentdir)
|
||||
result = re.search(
|
||||
rf'{prop}\s*=\s*[\'"]([^\'"]*)[\'"]',
|
||||
open(os.path.join(parentdir, "__init__.py")).read()
|
||||
)
|
||||
return result.group(1)
|
||||
|
||||
|
||||
def standards_latest(standards):
|
||||
return max([standard.version for standard in standards if standard.version] or ["0.1"],
|
||||
key=LooseVersion)
|
||||
@ -74,10 +61,8 @@ def safe_load(string):
|
||||
:returns: dict
|
||||
|
||||
"""
|
||||
try:
|
||||
with suppress(yaml.scanner.ScannerError):
|
||||
return yaml.safe_load(string) or {}
|
||||
except yaml.scanner.ScannerError as e:
|
||||
print(str(e))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@ -120,5 +105,5 @@ class Singleton(type):
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||
cls._instances[cls] = super().__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
@ -24,12 +24,12 @@ import codecs
|
||||
import glob
|
||||
import imp
|
||||
import os
|
||||
from contextlib import suppress
|
||||
|
||||
import ansible.parsing.mod_args
|
||||
import yaml
|
||||
from ansible import constants
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.errors import AnsibleParserError
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.parsing.mod_args import ModuleArgsParser
|
||||
from ansible.parsing.yaml.constructor import AnsibleConstructor
|
||||
@ -37,8 +37,7 @@ from ansible.parsing.yaml.loader import AnsibleLoader
|
||||
from ansible.template import Templar
|
||||
from yaml.composer import Composer
|
||||
|
||||
from ansiblelater.exceptions import LaterAnsibleError
|
||||
from ansiblelater.exceptions import LaterError
|
||||
from ansiblelater.exceptions import LaterAnsibleError, LaterError
|
||||
|
||||
try:
|
||||
# Try to import the Ansible 2 module first, it's the future-proof one
|
||||
@ -155,8 +154,8 @@ def tokenize(line):
|
||||
tokens = tokens[1:]
|
||||
command = tokens[0].replace(":", "")
|
||||
|
||||
args = list()
|
||||
kwargs = dict()
|
||||
args = []
|
||||
kwargs = {}
|
||||
nonkvfound = False
|
||||
for arg in tokens[1:]:
|
||||
if "=" in arg and not nonkvfound:
|
||||
@ -171,10 +170,11 @@ def tokenize(line):
|
||||
def _playbook_items(pb_data):
|
||||
if isinstance(pb_data, dict):
|
||||
return pb_data.items()
|
||||
elif not pb_data:
|
||||
|
||||
if not pb_data:
|
||||
return []
|
||||
else:
|
||||
return [item for play in pb_data for item in play.items()]
|
||||
|
||||
return [item for play in pb_data for item in play.items()]
|
||||
|
||||
|
||||
def find_children(playbook, playbook_dir):
|
||||
@ -186,7 +186,7 @@ def find_children(playbook, playbook_dir):
|
||||
try:
|
||||
playbook_ds = parse_yaml_from_file(playbook[0])
|
||||
except AnsibleError as e:
|
||||
raise SystemExit(str(e))
|
||||
raise SystemExit(str(e)) from e
|
||||
results = []
|
||||
basedir = os.path.dirname(playbook[0])
|
||||
items = _playbook_items(playbook_ds)
|
||||
@ -194,7 +194,7 @@ def find_children(playbook, playbook_dir):
|
||||
for child in play_children(basedir, item, playbook[1], playbook_dir):
|
||||
if "$" in child["path"] or "{{" in child["path"]:
|
||||
continue
|
||||
valid_tokens = list()
|
||||
valid_tokens = []
|
||||
for token in split_args(child["path"]):
|
||||
if "=" in token:
|
||||
break
|
||||
@ -205,20 +205,17 @@ def find_children(playbook, playbook_dir):
|
||||
|
||||
|
||||
def template(basedir, value, variables, fail_on_undefined=False, **kwargs):
|
||||
try:
|
||||
# Hack to skip the following exception when using to_json filter on a variable.
|
||||
# I guess the filter doesn't like empty vars...
|
||||
with suppress(AnsibleError, ValueError):
|
||||
value = ansible_template(
|
||||
os.path.abspath(basedir), value, variables,
|
||||
**dict(kwargs, fail_on_undefined=fail_on_undefined)
|
||||
)
|
||||
# Hack to skip the following exception when using to_json filter on a variable.
|
||||
# I guess the filter doesn't like empty vars...
|
||||
except (AnsibleError, ValueError):
|
||||
# templating failed, so just keep value as is.
|
||||
pass
|
||||
return value
|
||||
|
||||
|
||||
def play_children(basedir, item, parent_type, playbook_dir):
|
||||
def play_children(basedir, item, parent_type):
|
||||
delegate_map = {
|
||||
"tasks": _taskshandlers_children,
|
||||
"pre_tasks": _taskshandlers_children,
|
||||
@ -234,15 +231,13 @@ def play_children(basedir, item, parent_type, playbook_dir):
|
||||
play_library = os.path.join(os.path.abspath(basedir), "library")
|
||||
_load_library_if_exists(play_library)
|
||||
|
||||
if k in delegate_map:
|
||||
if v:
|
||||
v = template(
|
||||
os.path.abspath(basedir),
|
||||
v,
|
||||
dict(playbook_dir=os.path.abspath(basedir)),
|
||||
fail_on_undefined=False
|
||||
)
|
||||
return delegate_map[k](basedir, k, v, parent_type)
|
||||
if k in delegate_map and v:
|
||||
v = template(
|
||||
os.path.abspath(basedir),
|
||||
v, {"playbook_dir": os.path.abspath(basedir)},
|
||||
fail_on_undefined=False
|
||||
)
|
||||
return delegate_map[k](basedir, k, v, parent_type)
|
||||
return []
|
||||
|
||||
|
||||
@ -298,14 +293,11 @@ def append_children(taskhandler, basedir, k, parent_type, results):
|
||||
# when taskshandlers_children is called for playbooks, the
|
||||
# actual type of the included tasks is the section containing the
|
||||
# include, e.g. tasks, pre_tasks, or handlers.
|
||||
if parent_type == "playbook":
|
||||
playbook_section = k
|
||||
else:
|
||||
playbook_section = parent_type
|
||||
playbook_section = k if parent_type == "playbook" else parent_type
|
||||
results.append({"path": path_dwim(basedir, taskhandler), "type": playbook_section})
|
||||
|
||||
|
||||
def _roles_children(basedir, k, v, parent_type, main="main"):
|
||||
def _roles_children(basedir, k, v, parent_type, main="main"): # noqa
|
||||
results = []
|
||||
for role in v:
|
||||
if isinstance(role, dict):
|
||||
@ -381,7 +373,7 @@ def rolename(filepath):
|
||||
return ""
|
||||
role = filepath[idx + 6:]
|
||||
role = role[:role.find("/")]
|
||||
return role
|
||||
return role # noqa
|
||||
|
||||
|
||||
def _kv_to_dict(v):
|
||||
@ -389,23 +381,27 @@ def _kv_to_dict(v):
|
||||
return (dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs))
|
||||
|
||||
|
||||
def normalize_task(task, filename, custom_modules=[]):
|
||||
def normalize_task(task, filename, custom_modules=None):
|
||||
"""Ensure tasks have an action key and strings are converted to python objects."""
|
||||
|
||||
if custom_modules is None:
|
||||
custom_modules = []
|
||||
|
||||
ansible_action_type = task.get("__ansible_action_type__", "task")
|
||||
if "__ansible_action_type__" in task:
|
||||
del (task["__ansible_action_type__"])
|
||||
|
||||
# temp. extract metadata
|
||||
ansible_meta = dict()
|
||||
ansible_meta = {}
|
||||
for key in ["__line__", "__file__", "__ansible_action_meta__"]:
|
||||
default = None
|
||||
|
||||
if key == "__ansible_action_meta__":
|
||||
default = dict()
|
||||
default = {}
|
||||
|
||||
ansible_meta[key] = task.pop(key, default)
|
||||
|
||||
normalized = dict()
|
||||
normalized = {}
|
||||
|
||||
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
|
||||
builtin = list(set(builtin + custom_modules))
|
||||
@ -415,7 +411,7 @@ def normalize_task(task, filename, custom_modules=[]):
|
||||
try:
|
||||
action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
|
||||
except AnsibleParserError as e:
|
||||
raise LaterAnsibleError("syntax error", e)
|
||||
raise LaterAnsibleError("syntax error", e) from e
|
||||
|
||||
# denormalize shell -> command conversion
|
||||
if "_uses_shell" in arguments:
|
||||
@ -427,16 +423,16 @@ def normalize_task(task, filename, custom_modules=[]):
|
||||
# we don"t want to re-assign these values, which were
|
||||
# determined by the ModuleArgsParser() above
|
||||
continue
|
||||
else:
|
||||
normalized[k] = v
|
||||
|
||||
normalized["action"] = dict(__ansible_module__=action)
|
||||
normalized[k] = v
|
||||
|
||||
normalized["action"] = {"__ansible_module__": action}
|
||||
|
||||
if "_raw_params" in arguments:
|
||||
normalized["action"]["__ansible_arguments__"] = arguments["_raw_params"].strip().split()
|
||||
del (arguments["_raw_params"])
|
||||
else:
|
||||
normalized["action"]["__ansible_arguments__"] = list()
|
||||
normalized["action"]["__ansible_arguments__"] = []
|
||||
normalized["action"].update(arguments)
|
||||
|
||||
normalized[FILENAME_KEY] = filename
|
||||
@ -451,7 +447,7 @@ def normalize_task(task, filename, custom_modules=[]):
|
||||
|
||||
|
||||
def action_tasks(yaml, file):
|
||||
tasks = list()
|
||||
tasks = []
|
||||
if file["filetype"] in ["tasks", "handlers"]:
|
||||
tasks = add_action_type(yaml, file["filetype"])
|
||||
else:
|
||||
@ -474,15 +470,14 @@ def task_to_str(task):
|
||||
return name
|
||||
action = task.get("action")
|
||||
args = " ".join([
|
||||
u"{0}={1}".format(k, v)
|
||||
for (k, v) in action.items()
|
||||
f"{k}={v}" for (k, v) in action.items()
|
||||
if k not in ["__ansible_module__", "__ansible_arguments__"]
|
||||
] + action.get("__ansible_arguments__"))
|
||||
return u"{0} {1}".format(action["__ansible_module__"], args)
|
||||
return "{} {}".format(action["__ansible_module__"], args)
|
||||
|
||||
|
||||
def extract_from_list(blocks, candidates):
|
||||
results = list()
|
||||
results = []
|
||||
for block in blocks:
|
||||
for candidate in candidates:
|
||||
delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"]
|
||||
@ -500,7 +495,7 @@ def extract_from_list(blocks, candidates):
|
||||
|
||||
|
||||
def add_action_type(actions, action_type, action_meta=None):
|
||||
results = list()
|
||||
results = []
|
||||
for action in actions:
|
||||
action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
|
||||
if action_meta:
|
||||
@ -528,7 +523,7 @@ def parse_yaml_linenumbers(data, filename):
|
||||
try:
|
||||
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
|
||||
except yaml.constructor.ConstructorError as e:
|
||||
raise LaterError("syntax error", e)
|
||||
raise LaterError("syntax error", e) from e
|
||||
|
||||
if hasattr(node, "__line__"):
|
||||
mapping[LINE_NUMBER_KEY] = node.__line__
|
||||
@ -544,10 +539,10 @@ def parse_yaml_linenumbers(data, filename):
|
||||
loader.construct_mapping = construct_mapping
|
||||
data = loader.get_single_data() or []
|
||||
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
||||
raise LaterError("syntax error", e)
|
||||
raise LaterError("syntax error", e) from e
|
||||
except (yaml.composer.ComposerError) as e:
|
||||
e.problem = f"{e.context} {e.problem}"
|
||||
raise LaterError("syntax error", e)
|
||||
raise LaterError("syntax error", e) from e
|
||||
return data
|
||||
|
||||
|
||||
@ -572,14 +567,14 @@ def normalized_yaml(file, options):
|
||||
for line in removes:
|
||||
lines.remove(line)
|
||||
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
||||
raise LaterError("syntax error", e)
|
||||
raise LaterError("syntax error", e) from e
|
||||
return lines
|
||||
|
||||
|
||||
class UnsafeTag:
|
||||
"""Handle custom yaml unsafe tag."""
|
||||
|
||||
yaml_tag = u"!unsafe"
|
||||
yaml_tag = "!unsafe"
|
||||
|
||||
def __init__(self, value):
|
||||
self.unsafe = value
|
||||
@ -592,7 +587,7 @@ class UnsafeTag:
|
||||
class VaultTag:
|
||||
"""Handle custom yaml vault tag."""
|
||||
|
||||
yaml_tag = u"!vault"
|
||||
yaml_tag = "!vault"
|
||||
|
||||
def __init__(self, value):
|
||||
self.unsafe = value
|
||||
|
382
poetry.lock
generated
382
poetry.lock
generated
@ -68,17 +68,6 @@ files = [
|
||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atomicwrites"
|
||||
version = "1.4.1"
|
||||
description = "Atomic file writes."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "22.2.0"
|
||||
@ -98,29 +87,6 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-
|
||||
tests = ["attrs[tests-no-zope]", "zope.interface"]
|
||||
tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"]
|
||||
|
||||
[[package]]
|
||||
name = "bandit"
|
||||
version = "1.7.4"
|
||||
description = "Security oriented static analyser for python code."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"},
|
||||
{file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""}
|
||||
GitPython = ">=1.0.1"
|
||||
PyYAML = ">=5.3.1"
|
||||
stevedore = ">=1.20.0"
|
||||
|
||||
[package.extras]
|
||||
test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"]
|
||||
toml = ["toml"]
|
||||
yaml = ["PyYAML"]
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "1.15.1"
|
||||
@ -324,17 +290,20 @@ test-randomorder = ["pytest-randomly"]
|
||||
tox = ["tox"]
|
||||
|
||||
[[package]]
|
||||
name = "eradicate"
|
||||
version = "2.1.0"
|
||||
description = "Removes commented-out code."
|
||||
name = "exceptiongroup"
|
||||
version = "1.1.0"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "eradicate-2.1.0-py3-none-any.whl", hash = "sha256:8bfaca181db9227dc88bdbce4d051a9627604c2243e7d85324f6d6ce0fd08bb2"},
|
||||
{file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"},
|
||||
{file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"},
|
||||
{file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8"
|
||||
version = "5.0.4"
|
||||
@ -352,180 +321,6 @@ mccabe = ">=0.7.0,<0.8.0"
|
||||
pycodestyle = ">=2.9.0,<2.10.0"
|
||||
pyflakes = ">=2.5.0,<2.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-blind-except"
|
||||
version = "0.2.1"
|
||||
description = "A flake8 extension that checks for blind except: statements"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "flake8-blind-except-0.2.1.tar.gz", hash = "sha256:f25a575a9dcb3eeb3c760bf9c22db60b8b5a23120224ed1faa9a43f75dd7dd16"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-builtins"
|
||||
version = "2.0.0"
|
||||
description = "Check for python builtins being used as variables or parameters."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "flake8-builtins-2.0.0.tar.gz", hash = "sha256:98833fa16139a75cd4913003492a9bd9a61c6f8ac146c3db12a2ebaf420dade3"},
|
||||
{file = "flake8_builtins-2.0.0-py3-none-any.whl", hash = "sha256:39bfa3badb5e8d22f92baf4e0ea1b816707245233846932d6b13e81fc6f673e8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-docstrings"
|
||||
version = "1.6.0"
|
||||
description = "Extension for flake8 which uses pydocstyle to check docstrings"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"},
|
||||
{file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = ">=3"
|
||||
pydocstyle = ">=2.1"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-eradicate"
|
||||
version = "1.3.0"
|
||||
description = "Flake8 plugin to find commented out code"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6,<4.0"
|
||||
files = [
|
||||
{file = "flake8-eradicate-1.3.0.tar.gz", hash = "sha256:e4c98f00d17dc8653e3388cac2624cd81e9735de2fd4a8dcf99029633ebd7a63"},
|
||||
{file = "flake8_eradicate-1.3.0-py3-none-any.whl", hash = "sha256:85a71e0c5f4e07f7c6c5fec520483561fd6bd295417d622855bdeade99242e3d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = "*"
|
||||
eradicate = ">=2.0,<3.0"
|
||||
flake8 = ">=3.5,<6"
|
||||
setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-isort"
|
||||
version = "6.0.0"
|
||||
description = "flake8 plugin that integrates isort ."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "flake8-isort-6.0.0.tar.gz", hash = "sha256:537f453a660d7e903f602ecfa36136b140de279df58d02eb1b6a0c84e83c528c"},
|
||||
{file = "flake8_isort-6.0.0-py3-none-any.whl", hash = "sha256:aa0cac02a62c7739e370ce6b9c31743edac904bae4b157274511fc8a19c75bbc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
isort = ">=5.0.0,<6"
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-logging-format"
|
||||
version = "0.7.5"
|
||||
description = ""
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "flake8-logging-format-0.7.5.tar.gz", hash = "sha256:54f7e349c934ce5c594f251885bc2240e99f6b48752a672a8fc7e3d1388352bb"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
lint = ["flake8"]
|
||||
test = ["PyHamcrest", "pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-pep3101"
|
||||
version = "2.0.0"
|
||||
description = "Checks for old string formatting."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "flake8-pep3101-2.0.0.tar.gz", hash = "sha256:ae2ee1758734a473ca971b4bf9ff09c961b6099916db91fdb6b9718328dfcacb"},
|
||||
{file = "flake8_pep3101-2.0.0-py3-none-any.whl", hash = "sha256:1d818e1f53c6d26e875714f2f041ec15fbb23c17e2268dbbb024e9c3383541cd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-polyfill"
|
||||
version = "1.0.2"
|
||||
description = "Polyfill package for Flake8 plugins"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"},
|
||||
{file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-quotes"
|
||||
version = "3.3.1"
|
||||
description = "Flake8 lint for quotes."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "flake8-quotes-3.3.1.tar.gz", hash = "sha256:633adca6fb8a08131536af0d750b44d6985b9aba46f498871e21588c3e6f525a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "gitdb"
|
||||
version = "4.0.10"
|
||||
description = "Git Object Database"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"},
|
||||
{file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
smmap = ">=3.0.1,<6"
|
||||
|
||||
[[package]]
|
||||
name = "gitpython"
|
||||
version = "3.1.30"
|
||||
description = "GitPython is a python library used to interact with Git repositories"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"},
|
||||
{file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
gitdb = ">=4.0.1,<5"
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
@ -538,24 +333,6 @@ files = [
|
||||
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "isort"
|
||||
version = "5.12.0"
|
||||
description = "A Python utility / library to sort Python imports."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
|
||||
{file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
colors = ["colorama (>=0.4.3)"]
|
||||
pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
|
||||
plugins = ["setuptools"]
|
||||
requirements-deprecated-finder = ["pip-api", "pipreqs"]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "3.1.2"
|
||||
@ -704,33 +481,6 @@ files = [
|
||||
{file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pbr"
|
||||
version = "5.11.1"
|
||||
description = "Python Build Reasonableness"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.6"
|
||||
files = [
|
||||
{file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"},
|
||||
{file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pep8-naming"
|
||||
version = "0.13.2"
|
||||
description = "Check PEP-8 naming conventions, plugin for flake8"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"},
|
||||
{file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = ">=3.9.1"
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.0.0"
|
||||
@ -747,18 +497,6 @@ files = [
|
||||
dev = ["pre-commit", "tox"]
|
||||
testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "py"
|
||||
version = "1.11.0"
|
||||
description = "library with cross-python path, ini-parsing, io, code, log facilities"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
files = [
|
||||
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycodestyle"
|
||||
version = "2.9.1"
|
||||
@ -783,24 +521,6 @@ files = [
|
||||
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydocstyle"
|
||||
version = "6.1.1"
|
||||
description = "Python docstring style checker"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
|
||||
{file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
snowballstemmer = "*"
|
||||
|
||||
[package.extras]
|
||||
toml = ["toml"]
|
||||
|
||||
[[package]]
|
||||
name = "pyflakes"
|
||||
version = "2.5.0"
|
||||
@ -852,25 +572,24 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "7.1.2"
|
||||
version = "7.2.1"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"},
|
||||
{file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"},
|
||||
{file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"},
|
||||
{file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
|
||||
attrs = ">=19.2.0"
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||
iniconfig = "*"
|
||||
packaging = "*"
|
||||
pluggy = ">=0.12,<2.0"
|
||||
py = ">=1.8.2"
|
||||
tomli = ">=1.0.0"
|
||||
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
|
||||
@ -896,14 +615,14 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.8.2"
|
||||
version = "3.10.0"
|
||||
description = "Thin-wrapper around the mock package for easier use with pytest"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-mock-3.8.2.tar.gz", hash = "sha256:77f03f4554392558700295e05aed0b1096a20d4a60a4f3ddcde58b0c31c8fca2"},
|
||||
{file = "pytest_mock-3.8.2-py3-none-any.whl", hash = "sha256:8a9e226d6c0ef09fcf20c94eb3405c388af438a90f3e39687f84166da82d5948"},
|
||||
{file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"},
|
||||
{file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -992,6 +711,32 @@ lint = ["black", "flake8", "isort", "mypy", "types-requests"]
|
||||
release = ["build", "towncrier", "twine"]
|
||||
test = ["commentjson", "packaging", "pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.244"
|
||||
description = "An extremely fast Python linter, written in Rust."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.0.244-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5d65a0adffa51314cf9f1036c51dbcde0462b23b49a3d8e3a696a221f9f46f54"},
|
||||
{file = "ruff-0.0.244-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:3d6bf5094f2c447f5ff8d10c670dc1bc8b7f70cb5f4e43afe1d0624b934c1284"},
|
||||
{file = "ruff-0.0.244-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f54790b297d5df8120a348c91426a0375c40f62880d30438e46922399b29bf"},
|
||||
{file = "ruff-0.0.244-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:88e263e3d7267b4b10f5c1fc1446c5d6b47824c6d78e5c3a97ef79c83d9cb837"},
|
||||
{file = "ruff-0.0.244-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8044c79098e3f2deaf970ab468bf5661b193163369bfe5bbda636e6363aa7932"},
|
||||
{file = "ruff-0.0.244-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:258e5e3386a8efdff9f253395cc03a3a88204442ac8db50aeb0a529e2862d57b"},
|
||||
{file = "ruff-0.0.244-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd09c523aeed4d81f358093dc4df384a4db42ff5f9627c9506c26c2becbe19a7"},
|
||||
{file = "ruff-0.0.244-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c98f0d4a4e052e8b0e27b47e83563026d749b07a21a097780cd283c2f885ad3c"},
|
||||
{file = "ruff-0.0.244-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2359f840c95364d779b86a822fe025fa416eb14adc661c1263bc39e90065f0bd"},
|
||||
{file = "ruff-0.0.244-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8b85ced1e75b7cf1dd90d0708f8e46e2d58fc124334492cc5103f24d832a3922"},
|
||||
{file = "ruff-0.0.244-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9c2d49c2021bf80f3e66968c1a41f89061911ffb7ed1f0d39a3204a45fc97ba7"},
|
||||
{file = "ruff-0.0.244-py3-none-musllinux_1_2_i686.whl", hash = "sha256:da77d573c7a5b27bad43468fb7e47e78e22715426beb4e673106d24a9a584838"},
|
||||
{file = "ruff-0.0.244-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9f16fc3380753310af2a10e2867dfc133849e51c545561ec0a389aa93b3058b0"},
|
||||
{file = "ruff-0.0.244-py3-none-win32.whl", hash = "sha256:b3fc70a4c5d5a0ab8e5b3c3e818ca224913eee84f65bf63ee212af2bbd5f1792"},
|
||||
{file = "ruff-0.0.244-py3-none-win_amd64.whl", hash = "sha256:78bbc5d1cca0a8752f6e4b3f4485f4c4f2428543a0777d1bde865aa43bdab190"},
|
||||
{file = "ruff-0.0.244.tar.gz", hash = "sha256:7c05773e990348a6d7628b9b7294fe76303bc870dd94d9c34154bc1560053050"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "67.2.0"
|
||||
@ -1021,45 +766,6 @@ files = [
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smmap"
|
||||
version = "5.0.0"
|
||||
description = "A pure Python implementation of a sliding window memory map manager"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"},
|
||||
{file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "snowballstemmer"
|
||||
version = "2.2.0"
|
||||
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
|
||||
{file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stevedore"
|
||||
version = "4.1.1"
|
||||
description = "Manage dynamic plugins for Python applications"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "stevedore-4.1.1-py3-none-any.whl", hash = "sha256:aa6436565c069b2946fe4ebff07f5041e0c8bf18c7376dd29edf80cf7d524e4e"},
|
||||
{file = "stevedore-4.1.1.tar.gz", hash = "sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pbr = ">=2.0.0,<2.1.0 || >2.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.10.2"
|
||||
@ -1144,4 +850,4 @@ ansible-core = ["ansible-core"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.9.0"
|
||||
content-hash = "2f0c4e4f4dbe856eaa1f1769d392330fd91ec743ddef8dfc68f752cbc12a0f49"
|
||||
content-hash = "cdc6322cee0cea0daae4910c3fea067f1088332ffcf1799e5fc5dacd803764d6"
|
||||
|
107
pyproject.toml
107
pyproject.toml
@ -49,26 +49,6 @@ toolz = "0.12.0"
|
||||
unidiff = "0.7.4"
|
||||
yamllint = "1.29.0"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
bandit = "1.7.4"
|
||||
flake8-blind-except = "0.2.1"
|
||||
flake8-builtins = "2.0.0"
|
||||
flake8-docstrings = "1.6.0"
|
||||
flake8-eradicate = "1.3.0"
|
||||
flake8-isort = "6.0.0"
|
||||
flake8-logging-format = "0.7.5"
|
||||
flake8-pep3101 = "2.0.0"
|
||||
flake8-polyfill = "1.0.2"
|
||||
flake8-quotes = "3.3.1"
|
||||
pep8-naming = "0.13.2"
|
||||
pydocstyle = "6.1.1"
|
||||
pytest = "7.1.2"
|
||||
pytest-cov = "4.0.0"
|
||||
pytest-mock = "3.8.2"
|
||||
tomli = "2.0.1"
|
||||
yapf = "0.32.0"
|
||||
toml = "0.10.2"
|
||||
|
||||
[tool.poetry.extras]
|
||||
ansible = ["ansible"]
|
||||
ansible-core = ["ansible-core"]
|
||||
@ -76,23 +56,24 @@ ansible-core = ["ansible-core"]
|
||||
[tool.poetry.scripts]
|
||||
ansible-later = "ansiblelater.__main__:main"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "0.0.244"
|
||||
pytest = "7.2.1"
|
||||
pytest-mock = "3.10.0"
|
||||
pytest-cov = "4.0.0"
|
||||
toml = "0.10.2"
|
||||
yapf = "0.32.0"
|
||||
|
||||
[tool.poetry-dynamic-versioning]
|
||||
enable = true
|
||||
style = "semver"
|
||||
vcs = "git"
|
||||
|
||||
[tool.isort]
|
||||
default_section = "THIRDPARTY"
|
||||
force_single_line = true
|
||||
line_length = 99
|
||||
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
|
||||
skip_glob = ["**/.env*", "**/env/*", "**/.venv/*", "**/docs/*"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "ansiblelater --cov=ansiblelater --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
|
||||
filterwarnings = [
|
||||
"ignore::FutureWarning",
|
||||
"ignore:.*collections.*:DeprecationWarning",
|
||||
"ignore::DeprecationWarning",
|
||||
"ignore:.*pep8.*:FutureWarning",
|
||||
]
|
||||
|
||||
@ -102,3 +83,73 @@ omit = ["**/test/*"]
|
||||
[build-system]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
|
||||
|
||||
[tool.ruff]
|
||||
exclude = [
|
||||
".git",
|
||||
"__pycache__",
|
||||
"build",
|
||||
"dist",
|
||||
"test",
|
||||
"*.pyc",
|
||||
"*.egg-info",
|
||||
".cache",
|
||||
".eggs",
|
||||
"env*",
|
||||
]
|
||||
# Explanation of errors
|
||||
#
|
||||
# D100: Missing docstring in public module
|
||||
# D101: Missing docstring in public class
|
||||
# D102: Missing docstring in public method
|
||||
# D103: Missing docstring in public function
|
||||
# D105: Missing docstring in magic method
|
||||
# D107: Missing docstring in __init__
|
||||
# D202: No blank lines allowed after function docstring
|
||||
# D203: One blank line required before class docstring
|
||||
# D212: Multi-line docstring summary should start at the first line
|
||||
ignore = [
|
||||
"D100",
|
||||
"D101",
|
||||
"D102",
|
||||
"D103",
|
||||
"D105",
|
||||
"D107",
|
||||
"D202",
|
||||
"D203",
|
||||
"D212",
|
||||
]
|
||||
line-length = 99
|
||||
select = [
|
||||
"D",
|
||||
"E",
|
||||
"F",
|
||||
"Q",
|
||||
"W",
|
||||
"I",
|
||||
"S",
|
||||
"BLE",
|
||||
"N",
|
||||
"UP",
|
||||
"B",
|
||||
"A",
|
||||
"C4",
|
||||
"T20",
|
||||
"SIM",
|
||||
"RET",
|
||||
"ARG",
|
||||
"ERA",
|
||||
"RUF",
|
||||
]
|
||||
|
||||
[tool.ruff.flake8-quotes]
|
||||
inline-quotes = "double"
|
||||
|
||||
[tool.yapf]
|
||||
based_on_style = "google"
|
||||
column_limit = 99
|
||||
dedent_closing_brackets = true
|
||||
coalesce_brackets = true
|
||||
split_before_logical_operator = true
|
||||
indent_dictionary_value = true
|
||||
allow_split_before_dict_value = false
|
||||
|
26
setup.cfg
26
setup.cfg
@ -1,26 +0,0 @@
|
||||
[flake8]
|
||||
# Explanation of errors
|
||||
#
|
||||
# D100: Missing docstring in public module
|
||||
# D101: Missing docstring in public class
|
||||
# D102: Missing docstring in public method
|
||||
# D103: Missing docstring in public function
|
||||
# D105: Missing docstring in magic method
|
||||
# D107: Missing docstring in __init__
|
||||
# D202: No blank lines allowed after function docstring
|
||||
# G001: Logging statements should not use string.format() for their first argument
|
||||
# G004: Logging statements should not use f"..." for their first argument
|
||||
# W503: Line break occurred before a binary operator
|
||||
ignore = D100, D101, D102, D103, D107, D202, G001, G004, W503
|
||||
max-line-length = 99
|
||||
inline-quotes = double
|
||||
exclude = .git, __pycache__, build, dist, test, *.pyc, *.egg-info, .cache, .eggs, env*
|
||||
|
||||
[yapf]
|
||||
based_on_style = google
|
||||
column_limit = 99
|
||||
dedent_closing_brackets = true
|
||||
coalesce_brackets = true
|
||||
split_before_logical_operator = true
|
||||
indent_dictionary_value = true
|
||||
allow_split_before_dict_value = false
|
Loading…
Reference in New Issue
Block a user