force single line in isort and fix pep8 issues

This commit is contained in:
Robert Kaussow 2019-04-10 16:09:37 +02:00
parent 0ee05b9fc0
commit bdeded30f2
11 changed files with 193 additions and 173 deletions

View File

@ -4,8 +4,11 @@
import argparse import argparse
import multiprocessing import multiprocessing
from ansiblelater import LOG, __version__, logger from ansiblelater import LOG
from ansiblelater.command import base, candidates from ansiblelater import __version__
from ansiblelater import logger
from ansiblelater.command import base
from ansiblelater.command import candidates
def main(): def main():

View File

@ -7,7 +7,8 @@ from distutils.version import LooseVersion
import ansible import ansible
from ansiblelater import settings, utils from ansiblelater import settings
from ansiblelater import utils
def get_settings(args): def get_settings(args):

View File

@ -8,8 +8,8 @@ from distutils.version import LooseVersion
from six import iteritems from six import iteritems
from ansiblelater import LOG, utils from ansiblelater import LOG
from ansiblelater.exceptions import LaterAnsibleError, LaterError # noqa from ansiblelater import utils
from ansiblelater.logger import flag_extra from ansiblelater.logger import flag_extra
try: try:

View File

@ -1,25 +1,29 @@
from ansiblelater.rules.ansiblefiles import (check_become_user, """Example standards definition."""
check_braces_spaces,
check_command_has_changes, from ansiblelater.rules.ansiblefiles import check_become_user
check_command_instead_of_module, from ansiblelater.rules.ansiblefiles import check_braces_spaces
check_compare_to_literal_bool, from ansiblelater.rules.ansiblefiles import check_command_has_changes
check_empty_string_compare, from ansiblelater.rules.ansiblefiles import check_command_instead_of_module
check_filter_separation, from ansiblelater.rules.ansiblefiles import check_compare_to_literal_bool
check_install_use_latest, from ansiblelater.rules.ansiblefiles import check_empty_string_compare
check_literal_bool_format, from ansiblelater.rules.ansiblefiles import check_filter_separation
check_name_format, from ansiblelater.rules.ansiblefiles import check_install_use_latest
check_named_task, from ansiblelater.rules.ansiblefiles import check_literal_bool_format
check_shell_instead_command, from ansiblelater.rules.ansiblefiles import check_name_format
check_unique_named_task) from ansiblelater.rules.ansiblefiles import check_named_task
from ansiblelater.rules.rolefiles import check_meta_main, check_scm_in_src from ansiblelater.rules.ansiblefiles import check_shell_instead_command
from ansiblelater.rules.ansiblefiles import check_unique_named_task
from ansiblelater.rules.rolefiles import check_meta_main
from ansiblelater.rules.rolefiles import check_scm_in_src
from ansiblelater.rules.taskfiles import check_line_between_tasks from ansiblelater.rules.taskfiles import check_line_between_tasks
from ansiblelater.rules.yamlfiles import (check_native_yaml, check_yaml_colons, from ansiblelater.rules.yamlfiles import check_native_yaml
check_yaml_document_start, from ansiblelater.rules.yamlfiles import check_yaml_colons
check_yaml_empty_lines, from ansiblelater.rules.yamlfiles import check_yaml_document_start
check_yaml_file, from ansiblelater.rules.yamlfiles import check_yaml_empty_lines
check_yaml_has_content, from ansiblelater.rules.yamlfiles import check_yaml_file
check_yaml_hyphens, from ansiblelater.rules.yamlfiles import check_yaml_has_content
check_yaml_indent) from ansiblelater.rules.yamlfiles import check_yaml_hyphens
from ansiblelater.rules.yamlfiles import check_yaml_indent
from ansiblelater.standard import Standard from ansiblelater.standard import Standard
tasks_should_be_separated = Standard(dict( tasks_should_be_separated = Standard(dict(

View File

@ -4,10 +4,11 @@ import os
import re import re
from collections import defaultdict from collections import defaultdict
from ansiblelater.command.candidates import Error, Result from ansiblelater.command.candidates import Error
from ansiblelater.command.candidates import Result
from ansiblelater.utils import count_spaces from ansiblelater.utils import count_spaces
from ansiblelater.utils.rulehelper import (get_normalized_tasks, from ansiblelater.utils.rulehelper import get_normalized_tasks
get_normalized_yaml) from ansiblelater.utils.rulehelper import get_normalized_yaml
def check_braces_spaces(candidate, settings): def check_braces_spaces(candidate, settings):

View File

@ -2,8 +2,10 @@
from nested_lookup import nested_lookup from nested_lookup import nested_lookup
from ansiblelater.command.candidates import Error, Result from ansiblelater.command.candidates import Error
from ansiblelater.utils.rulehelper import get_raw_yaml, get_tasks from ansiblelater.command.candidates import Result
from ansiblelater.utils.rulehelper import get_raw_yaml
from ansiblelater.utils.rulehelper import get_tasks
def check_meta_main(candidate, settings): def check_meta_main(candidate, settings):

View File

@ -3,7 +3,8 @@
import re import re
from collections import defaultdict from collections import defaultdict
from ansiblelater.command.candidates import Error, Result from ansiblelater.command.candidates import Error
from ansiblelater.command.candidates import Result
from ansiblelater.utils.rulehelper import get_normalized_yaml from ansiblelater.utils.rulehelper import get_normalized_yaml

View File

@ -5,10 +5,12 @@ import os
import yaml import yaml
from ansiblelater.command.candidates import Error, Result from ansiblelater.command.candidates import Error
from ansiblelater.utils.rulehelper import (get_action_tasks, from ansiblelater.command.candidates import Result
get_normalized_task, from ansiblelater.utils.rulehelper import get_action_tasks
get_normalized_yaml, run_yamllint) from ansiblelater.utils.rulehelper import get_normalized_task
from ansiblelater.utils.rulehelper import get_normalized_yaml
from ansiblelater.utils.rulehelper import run_yamllint
def check_yaml_has_content(candidate, settings): def check_yaml_has_content(candidate, settings):

View File

@ -8,10 +8,13 @@ from yamllint import linter
from yamllint.config import YamlLintConfig from yamllint.config import YamlLintConfig
from ansiblelater.command.candidates import Error from ansiblelater.command.candidates import Error
from ansiblelater.exceptions import LaterAnsibleError, LaterError from ansiblelater.exceptions import LaterAnsibleError
from ansiblelater.exceptions import LaterError
from .yamlhelper import (action_tasks, normalize_task, normalized_yaml, from .yamlhelper import action_tasks
parse_yaml_linenumbers) from .yamlhelper import normalize_task
from .yamlhelper import normalized_yaml
from .yamlhelper import parse_yaml_linenumbers
def get_tasks(candidate, settings): def get_tasks(candidate, settings):

View File

@ -20,17 +20,27 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE. # THE SOFTWARE.
import codecs
import glob import glob
import imp import imp
import os
import codecs
import inspect import inspect
import os
import six
import ansible.parsing.mod_args import ansible.parsing.mod_args
import six
import yaml
from ansible import constants from ansible import constants
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansiblelater.exceptions import LaterAnsibleError, LaterError from ansible.errors import AnsibleParserError
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.constructor import AnsibleConstructor
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.template import Templar
from yaml.composer import Composer
from ansiblelater.exceptions import LaterAnsibleError
from ansiblelater.exceptions import LaterError
try: try:
# Try to import the Ansible 2 module first, it's the future-proof one # Try to import the Ansible 2 module first, it's the future-proof one
@ -40,25 +50,15 @@ except ImportError:
# Fallback on the Ansible 1.9 module # Fallback on the Ansible 1.9 module
from ansible.module_utils.splitter import split_args from ansible.module_utils.splitter import split_args
import yaml
from yaml.composer import Composer
from ansible.parsing.dataloader import DataLoader
from ansible.template import Templar
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.constructor import AnsibleConstructor
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.errors import AnsibleParserError
# ansible-later doesn't need/want to know about encrypted secrets, but it needs # ansible-later doesn't need/want to know about encrypted secrets, but it needs
# Ansible 2.3+ allows encrypted secrets within yaml files, so we pass a string # Ansible 2.3+ allows encrypted secrets within yaml files, so we pass a string
# as the password to enable such yaml files to be opened and parsed successfully. # as the password to enable such yaml files to be opened and parsed successfully.
DEFAULT_VAULT_PASSWORD = 'x' DEFAULT_VAULT_PASSWORD = "x"
def parse_yaml_from_file(filepath): def parse_yaml_from_file(filepath):
dl = DataLoader() dl = DataLoader()
if hasattr(dl, 'set_vault_password'): if hasattr(dl, "set_vault_password"):
dl.set_vault_password(DEFAULT_VAULT_PASSWORD) dl.set_vault_password(DEFAULT_VAULT_PASSWORD)
return dl.load_from_file(filepath) return dl.load_from_file(filepath)
@ -81,28 +81,28 @@ try:
except ImportError: except ImportError:
from ansible.plugins.loader import module_loader from ansible.plugins.loader import module_loader
LINE_NUMBER_KEY = '__line__' LINE_NUMBER_KEY = "__line__"
FILENAME_KEY = '__file__' FILENAME_KEY = "__file__"
VALID_KEYS = [ VALID_KEYS = [
'name', 'action', 'when', 'async', 'poll', 'notify', "name", "action", "when", "async", "poll", "notify",
'first_available_file', 'include', 'import_playbook', "first_available_file", "include", "import_playbook",
'tags', 'register', 'ignore_errors', 'delegate_to', "tags", "register", "ignore_errors", "delegate_to",
'local_action', 'transport', 'remote_user', 'sudo', "local_action", "transport", "remote_user", "sudo",
'sudo_user', 'sudo_pass', 'when', 'connection', 'environment', 'args', 'always_run', "sudo_user", "sudo_pass", "when", "connection", "environment", "args", "always_run",
'any_errors_fatal', 'changed_when', 'failed_when', 'check_mode', 'delay', "any_errors_fatal", "changed_when", "failed_when", "check_mode", "delay",
'retries', 'until', 'su', 'su_user', 'su_pass', 'no_log', 'run_once', "retries", "until", "su", "su_user", "su_pass", "no_log", "run_once",
'become', 'become_user', 'become_method', FILENAME_KEY, "become", "become_user", "become_method", FILENAME_KEY,
] ]
BLOCK_NAME_TO_ACTION_TYPE_MAP = { BLOCK_NAME_TO_ACTION_TYPE_MAP = {
'tasks': 'task', "tasks": "task",
'handlers': 'handler', "handlers": "handler",
'pre_tasks': 'task', "pre_tasks": "task",
'post_tasks': 'task', "post_tasks": "task",
'block': 'meta', "block": "meta",
'rescue': 'meta', "rescue": "meta",
'always': 'meta', "always": "meta",
} }
@ -110,9 +110,9 @@ def load_plugins(directory):
result = [] result = []
fh = None fh = None
for pluginfile in glob.glob(os.path.join(directory, '[A-Za-z]*.py')): for pluginfile in glob.glob(os.path.join(directory, "[A-Za-z]*.py")):
pluginname = os.path.basename(pluginfile.replace('.py', '')) pluginname = os.path.basename(pluginfile.replace(".py", ""))
try: try:
fh, filename, desc = imp.find_module(pluginname, [directory]) fh, filename, desc = imp.find_module(pluginname, [directory])
mod = imp.load_module(pluginname, fh, filename, desc) mod = imp.load_module(pluginname, fh, filename, desc)
@ -126,9 +126,9 @@ def load_plugins(directory):
def tokenize(line): def tokenize(line):
tokens = line.lstrip().split(" ") tokens = line.lstrip().split(" ")
if tokens[0] == '-': if tokens[0] == "-":
tokens = tokens[1:] tokens = tokens[1:]
if tokens[0] == 'action:' or tokens[0] == 'local_action:': if tokens[0] == "action:" or tokens[0] == "local_action:":
tokens = tokens[1:] tokens = tokens[1:]
command = tokens[0].replace(":", "") command = tokens[0].replace(":", "")
@ -157,8 +157,8 @@ def _playbook_items(pb_data):
def find_children(playbook, playbook_dir): def find_children(playbook, playbook_dir):
if not os.path.exists(playbook[0]): if not os.path.exists(playbook[0]):
return [] return []
if playbook[1] == 'role': if playbook[1] == "role":
playbook_ds = {'roles': [{'role': playbook[0]}]} playbook_ds = {"roles": [{"role": playbook[0]}]}
else: else:
try: try:
playbook_ds = parse_yaml_from_file(playbook[0]) playbook_ds = parse_yaml_from_file(playbook[0])
@ -169,24 +169,24 @@ def find_children(playbook, playbook_dir):
items = _playbook_items(playbook_ds) items = _playbook_items(playbook_ds)
for item in items: for item in items:
for child in play_children(basedir, item, playbook[1], playbook_dir): for child in play_children(basedir, item, playbook[1], playbook_dir):
if "$" in child['path'] or "{{" in child['path']: if "$" in child["path"] or "{{" in child["path"]:
continue continue
valid_tokens = list() valid_tokens = list()
for token in split_args(child['path']): for token in split_args(child["path"]):
if '=' in token: if "=" in token:
break break
valid_tokens.append(token) valid_tokens.append(token)
path = ' '.join(valid_tokens) path = " ".join(valid_tokens)
results.append({ results.append({
'path': path_dwim(basedir, path), "path": path_dwim(basedir, path),
'type': child['type'] "type": child["type"]
}) })
return results return results
def template(basedir, value, vars, fail_on_undefined=False, **kwargs): def template(basedir, value, variables, fail_on_undefined=False, **kwargs):
try: try:
value = ansible_template(os.path.abspath(basedir), value, vars, value = ansible_template(os.path.abspath(basedir), value, variables,
**dict(kwargs, fail_on_undefined=fail_on_undefined)) **dict(kwargs, fail_on_undefined=fail_on_undefined))
# Hack to skip the following exception when using to_json filter on a variable. # Hack to skip the following exception when using to_json filter on a variable.
# I guess the filter doesn't like empty vars... # I guess the filter doesn't like empty vars...
@ -198,18 +198,18 @@ def template(basedir, value, vars, fail_on_undefined=False, **kwargs):
def play_children(basedir, item, parent_type, playbook_dir): def play_children(basedir, item, parent_type, playbook_dir):
delegate_map = { delegate_map = {
'tasks': _taskshandlers_children, "tasks": _taskshandlers_children,
'pre_tasks': _taskshandlers_children, "pre_tasks": _taskshandlers_children,
'post_tasks': _taskshandlers_children, "post_tasks": _taskshandlers_children,
'block': _taskshandlers_children, "block": _taskshandlers_children,
'include': _include_children, "include": _include_children,
'import_playbook': _include_children, "import_playbook": _include_children,
'roles': _roles_children, "roles": _roles_children,
'dependencies': _roles_children, "dependencies": _roles_children,
'handlers': _taskshandlers_children, "handlers": _taskshandlers_children,
} }
(k, v) = item (k, v) = item
play_library = os.path.join(os.path.abspath(basedir), 'library') play_library = os.path.join(os.path.abspath(basedir), "library")
_load_library_if_exists(play_library) _load_library_if_exists(play_library)
if k in delegate_map: if k in delegate_map:
@ -227,35 +227,35 @@ def _include_children(basedir, k, v, parent_type):
(command, args, kwargs) = tokenize("{0}: {1}".format(k, v)) (command, args, kwargs) = tokenize("{0}: {1}".format(k, v))
result = path_dwim(basedir, args[0]) result = path_dwim(basedir, args[0])
if not os.path.exists(result) and not basedir.endswith('tasks'): if not os.path.exists(result) and not basedir.endswith("tasks"):
result = path_dwim(os.path.join(basedir, '..', 'tasks'), v) result = path_dwim(os.path.join(basedir, "..", "tasks"), v)
return [{'path': result, 'type': parent_type}] return [{"path": result, "type": parent_type}]
def _taskshandlers_children(basedir, k, v, parent_type): def _taskshandlers_children(basedir, k, v, parent_type):
results = [] results = []
for th in v: for th in v:
if 'include' in th: if "include" in th:
append_children(th['include'], basedir, k, parent_type, results) append_children(th["include"], basedir, k, parent_type, results)
elif 'include_tasks' in th: elif "include_tasks" in th:
append_children(th['include_tasks'], basedir, k, parent_type, results) append_children(th["include_tasks"], basedir, k, parent_type, results)
elif 'import_playbook' in th: elif "import_playbook" in th:
append_children(th['import_playbook'], basedir, k, parent_type, results) append_children(th["import_playbook"], basedir, k, parent_type, results)
elif 'import_tasks' in th: elif "import_tasks" in th:
append_children(th['import_tasks'], basedir, k, parent_type, results) append_children(th["import_tasks"], basedir, k, parent_type, results)
elif 'import_role' in th: elif "import_role" in th:
results.extend(_roles_children(basedir, k, [th['import_role'].get('name')], parent_type, results.extend(_roles_children(basedir, k, [th["import_role"].get("name")], parent_type,
main=th['import_role'].get('tasks_from', 'main'))) main=th["import_role"].get("tasks_from", "main")))
elif 'include_role' in th: elif "include_role" in th:
results.extend(_roles_children(basedir, k, [th['include_role'].get('name')], results.extend(_roles_children(basedir, k, [th["include_role"].get("name")],
parent_type, parent_type,
main=th['include_role'].get('tasks_from', 'main'))) main=th["include_role"].get("tasks_from", "main")))
elif 'block' in th: elif "block" in th:
results.extend(_taskshandlers_children(basedir, k, th['block'], parent_type)) results.extend(_taskshandlers_children(basedir, k, th["block"], parent_type))
if 'rescue' in th: if "rescue" in th:
results.extend(_taskshandlers_children(basedir, k, th['rescue'], parent_type)) results.extend(_taskshandlers_children(basedir, k, th["rescue"], parent_type))
if 'always' in th: if "always" in th:
results.extend(_taskshandlers_children(basedir, k, th['always'], parent_type)) results.extend(_taskshandlers_children(basedir, k, th["always"], parent_type))
return results return results
@ -263,28 +263,28 @@ def append_children(taskhandler, basedir, k, parent_type, results):
# when taskshandlers_children is called for playbooks, the # when taskshandlers_children is called for playbooks, the
# actual type of the included tasks is the section containing the # actual type of the included tasks is the section containing the
# include, e.g. tasks, pre_tasks, or handlers. # include, e.g. tasks, pre_tasks, or handlers.
if parent_type == 'playbook': if parent_type == "playbook":
playbook_section = k playbook_section = k
else: else:
playbook_section = parent_type playbook_section = parent_type
results.append({ results.append({
'path': path_dwim(basedir, taskhandler), "path": path_dwim(basedir, taskhandler),
'type': playbook_section "type": playbook_section
}) })
def _roles_children(basedir, k, v, parent_type, main='main'): def _roles_children(basedir, k, v, parent_type, main="main"):
results = [] results = []
for role in v: for role in v:
if isinstance(role, dict): if isinstance(role, dict):
if 'role' in role or 'name' in role: if "role" in role or "name" in role:
if 'tags' not in role or 'skip_ansible_later' not in role['tags']: if "tags" not in role or "skip_ansible_later" not in role["tags"]:
results.extend(_look_for_role_files(basedir, results.extend(_look_for_role_files(basedir,
role.get('role', role.get('name')), role.get("role", role.get("name")),
main=main)) main=main))
else: else:
raise SystemExit('role dict {0} does not contain a "role" ' raise SystemExit("role dict {0} does not contain a 'role' "
'or "name" key'.format(role)) "or 'name' key".format(role))
else: else:
results.extend(_look_for_role_files(basedir, role, main=main)) results.extend(_look_for_role_files(basedir, role, main=main))
return results return results
@ -300,13 +300,13 @@ def _rolepath(basedir, role):
possible_paths = [ possible_paths = [
# if included from a playbook # if included from a playbook
path_dwim(basedir, os.path.join('roles', role)), path_dwim(basedir, os.path.join("roles", role)),
path_dwim(basedir, role), path_dwim(basedir, role),
# if included from roles/[role]/meta/main.yml # if included from roles/[role]/meta/main.yml
path_dwim( path_dwim(
basedir, os.path.join('..', '..', '..', 'roles', role) basedir, os.path.join("..", "..", "..", "roles", role)
), ),
path_dwim(basedir, os.path.join('..', '..', role)) path_dwim(basedir, os.path.join("..", "..", role))
] ]
if constants.DEFAULT_ROLES_PATH: if constants.DEFAULT_ROLES_PATH:
@ -323,33 +323,33 @@ def _rolepath(basedir, role):
break break
if role_path: if role_path:
_load_library_if_exists(os.path.join(role_path, 'library')) _load_library_if_exists(os.path.join(role_path, "library"))
return role_path return role_path
def _look_for_role_files(basedir, role, main='main'): def _look_for_role_files(basedir, role, main="main"):
role_path = _rolepath(basedir, role) role_path = _rolepath(basedir, role)
if not role_path: if not role_path:
return [] return []
results = [] results = []
for th in ['tasks', 'handlers', 'meta']: for th in ["tasks", "handlers", "meta"]:
for ext in ('.yml', '.yaml'): for ext in (".yml", ".yaml"):
thpath = os.path.join(role_path, th, main + ext) thpath = os.path.join(role_path, th, main + ext)
if os.path.exists(thpath): if os.path.exists(thpath):
results.append({'path': thpath, 'type': th}) results.append({"path": thpath, "type": th})
break break
return results return results
def rolename(filepath): def rolename(filepath):
idx = filepath.find('roles/') idx = filepath.find("roles/")
if idx < 0: if idx < 0:
return '' return ""
role = filepath[idx + 6:] role = filepath[idx + 6:]
role = role[:role.find('/')] role = role[:role.find("/")]
return role return role
@ -359,11 +359,11 @@ def _kv_to_dict(v):
def normalize_task(task, filename, custom_modules=[]): def normalize_task(task, filename, custom_modules=[]):
'''Ensures tasks have an action key and strings are converted to python objects''' """Ensure tasks have an action key and strings are converted to python objects."""
ansible_action_type = task.get('__ansible_action_type__', 'task') ansible_action_type = task.get("__ansible_action_type__", "task")
ansible_action_meta = task.get('__ansible_action_meta__', dict()) ansible_action_meta = task.get("__ansible_action_meta__", dict())
if '__ansible_action_type__' in task: if "__ansible_action_type__" in task:
del(task['__ansible_action_type__']) del(task["__ansible_action_type__"])
normalized = dict() normalized = dict()
# TODO: Workaround for custom modules # TODO: Workaround for custom modules
@ -372,53 +372,53 @@ def normalize_task(task, filename, custom_modules=[]):
ansible.parsing.mod_args.BUILTIN_TASKS = frozenset(builtin) ansible.parsing.mod_args.BUILTIN_TASKS = frozenset(builtin)
mod_arg_parser = ModuleArgsParser(task) mod_arg_parser = ModuleArgsParser(task)
try: try:
action, arguments, normalized['delegate_to'] = mod_arg_parser.parse() action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
except AnsibleParserError as e: except AnsibleParserError as e:
raise LaterAnsibleError("syntax error", e) raise LaterAnsibleError("syntax error", e)
# denormalize shell -> command conversion # denormalize shell -> command conversion
if '_uses_shell' in arguments: if "_uses_shell" in arguments:
action = 'shell' action = "shell"
del(arguments['_uses_shell']) del(arguments["_uses_shell"])
for (k, v) in list(task.items()): for (k, v) in list(task.items()):
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action: if k in ("action", "local_action", "args", "delegate_to") or k == action:
# we don't want to re-assign these values, which were # we don"t want to re-assign these values, which were
# determined by the ModuleArgsParser() above # determined by the ModuleArgsParser() above
continue continue
else: else:
normalized[k] = v normalized[k] = v
normalized['action'] = dict(__ansible_module__=action) normalized["action"] = dict(__ansible_module__=action)
if '_raw_params' in arguments: if "_raw_params" in arguments:
normalized['action']['__ansible_arguments__'] = arguments['_raw_params'].split(' ') normalized["action"]["__ansible_arguments__"] = arguments["_raw_params"].split(" ")
del(arguments['_raw_params']) del(arguments["_raw_params"])
else: else:
normalized['action']['__ansible_arguments__'] = list() normalized["action"]["__ansible_arguments__"] = list()
normalized['action'].update(arguments) normalized["action"].update(arguments)
normalized[FILENAME_KEY] = filename normalized[FILENAME_KEY] = filename
normalized['__ansible_action_type__'] = ansible_action_type normalized["__ansible_action_type__"] = ansible_action_type
normalized['__ansible_action_meta__'] = ansible_action_meta normalized["__ansible_action_meta__"] = ansible_action_meta
return normalized return normalized
def action_tasks(yaml, file): def action_tasks(yaml, file):
tasks = list() tasks = list()
if file['filetype'] in ['tasks', 'handlers']: if file["filetype"] in ["tasks", "handlers"]:
tasks = add_action_type(yaml, file['filetype']) tasks = add_action_type(yaml, file["filetype"])
else: else:
tasks.extend(extract_from_list(yaml, ['tasks', 'handlers', 'pre_tasks', 'post_tasks'])) tasks.extend(extract_from_list(yaml, ["tasks", "handlers", "pre_tasks", "post_tasks"]))
# Add sub-elements of block/rescue/always to tasks list # Add sub-elements of block/rescue/always to tasks list
tasks.extend(extract_from_list(tasks, ['block', 'rescue', 'always'])) tasks.extend(extract_from_list(tasks, ["block", "rescue", "always"]))
# Remove block/rescue/always elements from tasks list # Remove block/rescue/always elements from tasks list
block_rescue_always = ('block', 'rescue', 'always') block_rescue_always = ("block", "rescue", "always")
tasks[:] = [task for task in tasks if all(k not in task for k in block_rescue_always)] tasks[:] = [task for task in tasks if all(k not in task for k in block_rescue_always)]
return [task for task in tasks if set( return [task for task in tasks if set(
['include', 'include_tasks', 'import_playbook', 'import_tasks']).isdisjoint(task.keys())] ["include", "include_tasks", "import_playbook", "import_tasks"]).isdisjoint(task.keys())]
def task_to_str(task): def task_to_str(task):
@ -436,7 +436,7 @@ def extract_from_list(blocks, candidates):
results = list() results = list()
for block in blocks: for block in blocks:
for candidate in candidates: for candidate in candidates:
delete_meta_keys = [candidate, '__line__', '__file__', '__ansible_action_type__'] delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"]
if isinstance(block, dict) and candidate in block: if isinstance(block, dict) and candidate in block:
if isinstance(block[candidate], list): if isinstance(block[candidate], list):
meta_data = dict(block) meta_data = dict(block)
@ -453,19 +453,20 @@ def extract_from_list(blocks, candidates):
def add_action_type(actions, action_type, action_meta=None): def add_action_type(actions, action_type, action_meta=None):
results = list() results = list()
for action in actions: for action in actions:
action['__ansible_action_type__'] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type] action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
if action_meta: if action_meta:
action['__ansible_action_meta__'] = action_meta action["__ansible_action_meta__"] = action_meta
results.append(action) results.append(action)
return results return results
def parse_yaml_linenumbers(data, filename): def parse_yaml_linenumbers(data, filename):
"""Parses yaml as ansible.utils.parse_yaml but with linenumbers. """
Parse yaml as ansible.utils.parse_yaml but with linenumbers.
The line numbers are stored in each node's LINE_NUMBER_KEY key. The line numbers are stored in each node's LINE_NUMBER_KEY key.
"""
"""
def compose_node(parent, index): def compose_node(parent, index):
# the line number where the previous token has ended (plus empty lines) # the line number where the previous token has ended (plus empty lines)
line = loader.line line = loader.line
@ -475,7 +476,7 @@ def parse_yaml_linenumbers(data, filename):
def construct_mapping(node, deep=False): def construct_mapping(node, deep=False):
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
if hasattr(node, '__line__'): if hasattr(node, "__line__"):
mapping[LINE_NUMBER_KEY] = node.__line__ mapping[LINE_NUMBER_KEY] = node.__line__
else: else:
mapping[LINE_NUMBER_KEY] = mapping._line_number mapping[LINE_NUMBER_KEY] = mapping._line_number
@ -484,8 +485,8 @@ def parse_yaml_linenumbers(data, filename):
try: try:
kwargs = {} kwargs = {}
if 'vault_password' in inspect.getargspec(AnsibleLoader.__init__).args: if "vault_password" in inspect.getargspec(AnsibleLoader.__init__).args:
kwargs['vault_password'] = DEFAULT_VAULT_PASSWORD kwargs["vault_password"] = DEFAULT_VAULT_PASSWORD
loader = AnsibleLoader(data, **kwargs) loader = AnsibleLoader(data, **kwargs)
loader.compose_node = compose_node loader.compose_node = compose_node
loader.construct_mapping = construct_mapping loader.construct_mapping = construct_mapping
@ -503,7 +504,7 @@ def normalized_yaml(file, options):
removes = [] removes = []
try: try:
with codecs.open(file, mode='rb', encoding='utf-8') as f: with codecs.open(file, mode="rb", encoding="utf-8") as f:
lines = list(enumerate(f.readlines(), start=1)) lines = list(enumerate(f.readlines(), start=1))
for i, line in lines: for i, line in lines:

View File

@ -17,6 +17,8 @@ application-import-names = ansiblelater
default_section = THIRDPARTY default_section = THIRDPARTY
known_first_party = ansiblelater known_first_party = ansiblelater
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
force_single_line = true
line_length = 100
[tool:pytest] [tool:pytest]
filterwarnings = filterwarnings =