2019-04-10 12:50:48 +00:00
|
|
|
"""Utils for YAML file operations."""
|
|
|
|
|
2018-12-19 10:19:07 +00:00
|
|
|
# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>
|
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
|
|
# THE SOFTWARE.
|
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
import codecs
|
2018-12-19 10:19:07 +00:00
|
|
|
import glob
|
|
|
|
import imp
|
2019-04-10 14:09:37 +00:00
|
|
|
import os
|
2023-02-10 07:51:17 +00:00
|
|
|
from contextlib import suppress
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
import ansible.parsing.mod_args
|
2019-04-10 14:09:37 +00:00
|
|
|
import yaml
|
2018-12-19 10:19:07 +00:00
|
|
|
from ansible import constants
|
2023-02-10 07:51:17 +00:00
|
|
|
from ansible.errors import AnsibleError, AnsibleParserError
|
2019-04-10 14:09:37 +00:00
|
|
|
from ansible.parsing.dataloader import DataLoader
|
|
|
|
from ansible.parsing.mod_args import ModuleArgsParser
|
|
|
|
from ansible.parsing.yaml.constructor import AnsibleConstructor
|
|
|
|
from ansible.parsing.yaml.loader import AnsibleLoader
|
|
|
|
from ansible.template import Templar
|
|
|
|
from yaml.composer import Composer
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
from ansiblelater.exceptions import LaterAnsibleError, LaterError
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
# Try to import the Ansible 2 module first, it's the future-proof one
|
|
|
|
from ansible.parsing.splitter import split_args
|
|
|
|
|
|
|
|
except ImportError:
|
|
|
|
# Fallback on the Ansible 1.9 module
|
|
|
|
from ansible.module_utils.splitter import split_args
|
|
|
|
|
|
|
|
|
|
|
|
def parse_yaml_from_file(filepath):
|
|
|
|
dl = DataLoader()
|
|
|
|
return dl.load_from_file(filepath)
|
|
|
|
|
|
|
|
|
|
|
|
def path_dwim(basedir, given):
|
|
|
|
dl = DataLoader()
|
|
|
|
dl.set_basedir(basedir)
|
|
|
|
return dl.path_dwim(given)
|
|
|
|
|
|
|
|
|
|
|
|
def ansible_template(basedir, varname, templatevars, **kwargs):
|
|
|
|
dl = DataLoader()
|
|
|
|
dl.set_basedir(basedir)
|
|
|
|
templar = Templar(dl, variables=templatevars)
|
|
|
|
return templar.template(varname, **kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
from ansible.plugins import module_loader
|
|
|
|
except ImportError:
|
2023-05-31 08:19:45 +00:00
|
|
|
from ansible.plugins.loader import init_plugin_loader, module_loader
|
|
|
|
init_plugin_loader()
|
2018-12-19 10:19:07 +00:00
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
LINE_NUMBER_KEY = "__line__"
|
|
|
|
FILENAME_KEY = "__file__"
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
VALID_KEYS = [
|
2020-04-05 12:33:43 +00:00
|
|
|
"name",
|
|
|
|
"action",
|
|
|
|
"when",
|
|
|
|
"async",
|
|
|
|
"poll",
|
|
|
|
"notify",
|
|
|
|
"first_available_file",
|
|
|
|
"include",
|
|
|
|
"import_playbook",
|
|
|
|
"tags",
|
|
|
|
"register",
|
|
|
|
"ignore_errors",
|
|
|
|
"delegate_to",
|
|
|
|
"local_action",
|
|
|
|
"transport",
|
|
|
|
"remote_user",
|
|
|
|
"sudo",
|
|
|
|
"sudo_user",
|
|
|
|
"sudo_pass",
|
|
|
|
"when",
|
|
|
|
"connection",
|
|
|
|
"environment",
|
|
|
|
"args",
|
|
|
|
"always_run",
|
|
|
|
"any_errors_fatal",
|
|
|
|
"changed_when",
|
|
|
|
"failed_when",
|
|
|
|
"check_mode",
|
|
|
|
"delay",
|
|
|
|
"retries",
|
|
|
|
"until",
|
|
|
|
"su",
|
|
|
|
"su_user",
|
|
|
|
"su_pass",
|
|
|
|
"no_log",
|
|
|
|
"run_once",
|
|
|
|
"become",
|
|
|
|
"become_user",
|
|
|
|
"become_method",
|
|
|
|
FILENAME_KEY,
|
2018-12-19 10:19:07 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
BLOCK_NAME_TO_ACTION_TYPE_MAP = {
|
2019-04-10 14:09:37 +00:00
|
|
|
"tasks": "task",
|
|
|
|
"handlers": "handler",
|
|
|
|
"pre_tasks": "task",
|
|
|
|
"post_tasks": "task",
|
|
|
|
"block": "meta",
|
|
|
|
"rescue": "meta",
|
|
|
|
"always": "meta",
|
2018-12-19 10:19:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def load_plugins(directory):
|
|
|
|
result = []
|
|
|
|
fh = None
|
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
for pluginfile in glob.glob(os.path.join(directory, "[A-Za-z]*.py")):
|
2018-12-19 10:19:07 +00:00
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
pluginname = os.path.basename(pluginfile.replace(".py", ""))
|
2018-12-19 10:19:07 +00:00
|
|
|
try:
|
|
|
|
fh, filename, desc = imp.find_module(pluginname, [directory])
|
|
|
|
mod = imp.load_module(pluginname, fh, filename, desc)
|
|
|
|
obj = getattr(mod, pluginname)()
|
|
|
|
result.append(obj)
|
|
|
|
finally:
|
|
|
|
if fh:
|
|
|
|
fh.close()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def tokenize(line):
|
|
|
|
tokens = line.lstrip().split(" ")
|
2019-04-10 14:09:37 +00:00
|
|
|
if tokens[0] == "-":
|
2018-12-19 10:19:07 +00:00
|
|
|
tokens = tokens[1:]
|
2019-04-10 14:09:37 +00:00
|
|
|
if tokens[0] == "action:" or tokens[0] == "local_action:":
|
2018-12-19 10:19:07 +00:00
|
|
|
tokens = tokens[1:]
|
|
|
|
command = tokens[0].replace(":", "")
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
args = []
|
|
|
|
kwargs = {}
|
2018-12-19 10:19:07 +00:00
|
|
|
nonkvfound = False
|
|
|
|
for arg in tokens[1:]:
|
|
|
|
if "=" in arg and not nonkvfound:
|
|
|
|
kv = arg.split("=", 1)
|
|
|
|
kwargs[kv[0]] = kv[1]
|
|
|
|
else:
|
|
|
|
nonkvfound = True
|
|
|
|
args.append(arg)
|
|
|
|
return (command, args, kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
def _playbook_items(pb_data):
|
|
|
|
if isinstance(pb_data, dict):
|
|
|
|
return pb_data.items()
|
2023-02-10 07:51:17 +00:00
|
|
|
|
|
|
|
if not pb_data:
|
2018-12-19 10:19:07 +00:00
|
|
|
return []
|
2023-02-10 07:51:17 +00:00
|
|
|
|
|
|
|
return [item for play in pb_data for item in play.items()]
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def find_children(playbook, playbook_dir):
|
|
|
|
if not os.path.exists(playbook[0]):
|
|
|
|
return []
|
2019-04-10 14:09:37 +00:00
|
|
|
if playbook[1] == "role":
|
|
|
|
playbook_ds = {"roles": [{"role": playbook[0]}]}
|
2018-12-19 10:19:07 +00:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
playbook_ds = parse_yaml_from_file(playbook[0])
|
|
|
|
except AnsibleError as e:
|
2023-02-10 07:51:17 +00:00
|
|
|
raise SystemExit(str(e)) from e
|
2018-12-19 10:19:07 +00:00
|
|
|
results = []
|
|
|
|
basedir = os.path.dirname(playbook[0])
|
|
|
|
items = _playbook_items(playbook_ds)
|
|
|
|
for item in items:
|
|
|
|
for child in play_children(basedir, item, playbook[1], playbook_dir):
|
2019-04-10 14:09:37 +00:00
|
|
|
if "$" in child["path"] or "{{" in child["path"]:
|
2018-12-19 10:19:07 +00:00
|
|
|
continue
|
2023-02-10 07:51:17 +00:00
|
|
|
valid_tokens = []
|
2019-04-10 14:09:37 +00:00
|
|
|
for token in split_args(child["path"]):
|
|
|
|
if "=" in token:
|
2018-12-19 10:19:07 +00:00
|
|
|
break
|
|
|
|
valid_tokens.append(token)
|
2019-04-10 14:09:37 +00:00
|
|
|
path = " ".join(valid_tokens)
|
2020-04-05 12:33:43 +00:00
|
|
|
results.append({"path": path_dwim(basedir, path), "type": child["type"]})
|
2018-12-19 10:19:07 +00:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
def template(basedir, value, variables, fail_on_undefined=False, **kwargs):
|
2023-02-10 07:51:17 +00:00
|
|
|
# Hack to skip the following exception when using to_json filter on a variable.
|
|
|
|
# I guess the filter doesn't like empty vars...
|
|
|
|
with suppress(AnsibleError, ValueError):
|
2020-04-05 12:33:43 +00:00
|
|
|
value = ansible_template(
|
|
|
|
os.path.abspath(basedir), value, variables,
|
|
|
|
**dict(kwargs, fail_on_undefined=fail_on_undefined)
|
|
|
|
)
|
2018-12-19 10:19:07 +00:00
|
|
|
return value
|
|
|
|
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
def play_children(basedir, item, parent_type):
|
2018-12-19 10:19:07 +00:00
|
|
|
delegate_map = {
|
2019-04-10 14:09:37 +00:00
|
|
|
"tasks": _taskshandlers_children,
|
|
|
|
"pre_tasks": _taskshandlers_children,
|
|
|
|
"post_tasks": _taskshandlers_children,
|
|
|
|
"block": _taskshandlers_children,
|
|
|
|
"include": _include_children,
|
|
|
|
"import_playbook": _include_children,
|
|
|
|
"roles": _roles_children,
|
|
|
|
"dependencies": _roles_children,
|
|
|
|
"handlers": _taskshandlers_children,
|
2018-12-19 10:19:07 +00:00
|
|
|
}
|
|
|
|
(k, v) = item
|
2019-04-10 14:09:37 +00:00
|
|
|
play_library = os.path.join(os.path.abspath(basedir), "library")
|
2018-12-19 10:19:07 +00:00
|
|
|
_load_library_if_exists(play_library)
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
if k in delegate_map and v:
|
|
|
|
v = template(
|
|
|
|
os.path.abspath(basedir),
|
|
|
|
v, {"playbook_dir": os.path.abspath(basedir)},
|
|
|
|
fail_on_undefined=False
|
|
|
|
)
|
|
|
|
return delegate_map[k](basedir, k, v, parent_type)
|
2018-12-19 10:19:07 +00:00
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
def _include_children(basedir, k, v, parent_type):
|
|
|
|
# handle include: filename.yml tags=blah
|
2023-01-09 10:59:25 +00:00
|
|
|
(command, args, kwargs) = tokenize(f"{k}: {v}")
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
result = path_dwim(basedir, args[0])
|
2019-04-10 14:09:37 +00:00
|
|
|
if not os.path.exists(result) and not basedir.endswith("tasks"):
|
|
|
|
result = path_dwim(os.path.join(basedir, "..", "tasks"), v)
|
|
|
|
return [{"path": result, "type": parent_type}]
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _taskshandlers_children(basedir, k, v, parent_type):
|
|
|
|
results = []
|
|
|
|
for th in v:
|
2019-04-10 14:09:37 +00:00
|
|
|
if "include" in th:
|
|
|
|
append_children(th["include"], basedir, k, parent_type, results)
|
|
|
|
elif "include_tasks" in th:
|
|
|
|
append_children(th["include_tasks"], basedir, k, parent_type, results)
|
|
|
|
elif "import_playbook" in th:
|
|
|
|
append_children(th["import_playbook"], basedir, k, parent_type, results)
|
|
|
|
elif "import_tasks" in th:
|
|
|
|
append_children(th["import_tasks"], basedir, k, parent_type, results)
|
|
|
|
elif "import_role" in th:
|
2020-04-05 12:33:43 +00:00
|
|
|
results.extend(
|
|
|
|
_roles_children(
|
|
|
|
basedir,
|
|
|
|
k, [th["import_role"].get("name")],
|
|
|
|
parent_type,
|
|
|
|
main=th["import_role"].get("tasks_from", "main")
|
|
|
|
)
|
|
|
|
)
|
2019-04-10 14:09:37 +00:00
|
|
|
elif "include_role" in th:
|
2020-04-05 12:33:43 +00:00
|
|
|
results.extend(
|
|
|
|
_roles_children(
|
|
|
|
basedir,
|
|
|
|
k, [th["include_role"].get("name")],
|
|
|
|
parent_type,
|
|
|
|
main=th["include_role"].get("tasks_from", "main")
|
|
|
|
)
|
|
|
|
)
|
2019-04-10 14:09:37 +00:00
|
|
|
elif "block" in th:
|
|
|
|
results.extend(_taskshandlers_children(basedir, k, th["block"], parent_type))
|
|
|
|
if "rescue" in th:
|
|
|
|
results.extend(_taskshandlers_children(basedir, k, th["rescue"], parent_type))
|
|
|
|
if "always" in th:
|
|
|
|
results.extend(_taskshandlers_children(basedir, k, th["always"], parent_type))
|
2018-12-19 10:19:07 +00:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def append_children(taskhandler, basedir, k, parent_type, results):
|
|
|
|
# when taskshandlers_children is called for playbooks, the
|
|
|
|
# actual type of the included tasks is the section containing the
|
|
|
|
# include, e.g. tasks, pre_tasks, or handlers.
|
2023-02-10 07:51:17 +00:00
|
|
|
playbook_section = k if parent_type == "playbook" else parent_type
|
2020-04-05 12:33:43 +00:00
|
|
|
results.append({"path": path_dwim(basedir, taskhandler), "type": playbook_section})
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
def _roles_children(basedir, k, v, parent_type, main="main"): # noqa
|
2018-12-19 10:19:07 +00:00
|
|
|
results = []
|
|
|
|
for role in v:
|
|
|
|
if isinstance(role, dict):
|
2019-04-10 14:09:37 +00:00
|
|
|
if "role" in role or "name" in role:
|
|
|
|
if "tags" not in role or "skip_ansible_later" not in role["tags"]:
|
2020-04-05 12:33:43 +00:00
|
|
|
results.extend(
|
|
|
|
_look_for_role_files(
|
|
|
|
basedir, role.get("role", role.get("name")), main=main
|
|
|
|
)
|
|
|
|
)
|
2018-12-19 10:19:07 +00:00
|
|
|
else:
|
2023-01-09 10:59:25 +00:00
|
|
|
raise SystemExit(f"role dict {role} does not contain a 'role' or 'name' key")
|
2018-12-19 10:19:07 +00:00
|
|
|
else:
|
|
|
|
results.extend(_look_for_role_files(basedir, role, main=main))
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def _load_library_if_exists(path):
|
|
|
|
if os.path.exists(path):
|
|
|
|
module_loader.add_directory(path)
|
|
|
|
|
|
|
|
|
|
|
|
def _rolepath(basedir, role):
|
|
|
|
role_path = None
|
|
|
|
|
|
|
|
possible_paths = [
|
|
|
|
# if included from a playbook
|
2019-04-10 14:09:37 +00:00
|
|
|
path_dwim(basedir, os.path.join("roles", role)),
|
2018-12-19 10:19:07 +00:00
|
|
|
path_dwim(basedir, role),
|
|
|
|
# if included from roles/[role]/meta/main.yml
|
2020-04-05 12:33:43 +00:00
|
|
|
path_dwim(basedir, os.path.join("..", "..", "..", "roles", role)),
|
2019-04-10 14:09:37 +00:00
|
|
|
path_dwim(basedir, os.path.join("..", "..", role))
|
2018-12-19 10:19:07 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
if constants.DEFAULT_ROLES_PATH:
|
|
|
|
search_locations = constants.DEFAULT_ROLES_PATH
|
2021-08-08 19:25:03 +00:00
|
|
|
if isinstance(search_locations, str):
|
2018-12-19 10:19:07 +00:00
|
|
|
search_locations = search_locations.split(os.pathsep)
|
|
|
|
for loc in search_locations:
|
|
|
|
loc = os.path.expanduser(loc)
|
|
|
|
possible_paths.append(path_dwim(loc, role))
|
|
|
|
|
|
|
|
for path_option in possible_paths:
|
|
|
|
if os.path.isdir(path_option):
|
|
|
|
role_path = path_option
|
|
|
|
break
|
|
|
|
|
|
|
|
if role_path:
|
2019-04-10 14:09:37 +00:00
|
|
|
_load_library_if_exists(os.path.join(role_path, "library"))
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
return role_path
|
|
|
|
|
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
def _look_for_role_files(basedir, role, main="main"):
|
2018-12-19 10:19:07 +00:00
|
|
|
role_path = _rolepath(basedir, role)
|
|
|
|
if not role_path:
|
|
|
|
return []
|
|
|
|
|
|
|
|
results = []
|
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
for th in ["tasks", "handlers", "meta"]:
|
|
|
|
for ext in (".yml", ".yaml"):
|
2018-12-19 10:19:07 +00:00
|
|
|
thpath = os.path.join(role_path, th, main + ext)
|
|
|
|
if os.path.exists(thpath):
|
2019-04-10 14:09:37 +00:00
|
|
|
results.append({"path": thpath, "type": th})
|
2018-12-19 10:19:07 +00:00
|
|
|
break
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def rolename(filepath):
|
2019-04-10 14:09:37 +00:00
|
|
|
idx = filepath.find("roles/")
|
2018-12-19 10:19:07 +00:00
|
|
|
if idx < 0:
|
2019-04-10 14:09:37 +00:00
|
|
|
return ""
|
2018-12-19 10:19:07 +00:00
|
|
|
role = filepath[idx + 6:]
|
2019-04-10 14:09:37 +00:00
|
|
|
role = role[:role.find("/")]
|
2023-03-27 11:18:25 +00:00
|
|
|
return role
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _kv_to_dict(v):
|
|
|
|
(command, args, kwargs) = tokenize(v)
|
|
|
|
return (dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs))
|
|
|
|
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
def normalize_task(task, filename, custom_modules=None):
|
2019-04-10 14:09:37 +00:00
|
|
|
"""Ensure tasks have an action key and strings are converted to python objects."""
|
2023-02-10 07:51:17 +00:00
|
|
|
|
|
|
|
if custom_modules is None:
|
|
|
|
custom_modules = []
|
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
ansible_action_type = task.get("__ansible_action_type__", "task")
|
|
|
|
if "__ansible_action_type__" in task:
|
2020-04-05 12:33:43 +00:00
|
|
|
del (task["__ansible_action_type__"])
|
2018-12-19 10:19:07 +00:00
|
|
|
|
2020-09-26 12:13:09 +00:00
|
|
|
# temp. extract metadata
|
2023-02-10 07:51:17 +00:00
|
|
|
ansible_meta = {}
|
2020-09-26 12:13:09 +00:00
|
|
|
for key in ["__line__", "__file__", "__ansible_action_meta__"]:
|
|
|
|
default = None
|
|
|
|
|
|
|
|
if key == "__ansible_action_meta__":
|
2023-02-10 07:51:17 +00:00
|
|
|
default = {}
|
2020-09-26 12:13:09 +00:00
|
|
|
|
|
|
|
ansible_meta[key] = task.pop(key, default)
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
normalized = {}
|
2020-09-26 12:13:09 +00:00
|
|
|
|
2018-12-19 10:19:07 +00:00
|
|
|
builtin = list(ansible.parsing.mod_args.BUILTIN_TASKS)
|
|
|
|
builtin = list(set(builtin + custom_modules))
|
|
|
|
ansible.parsing.mod_args.BUILTIN_TASKS = frozenset(builtin)
|
|
|
|
mod_arg_parser = ModuleArgsParser(task)
|
2020-09-26 12:13:09 +00:00
|
|
|
|
2018-12-19 10:19:07 +00:00
|
|
|
try:
|
2019-04-10 14:09:37 +00:00
|
|
|
action, arguments, normalized["delegate_to"] = mod_arg_parser.parse()
|
2018-12-19 10:19:07 +00:00
|
|
|
except AnsibleParserError as e:
|
2023-05-31 08:19:45 +00:00
|
|
|
raise LaterAnsibleError(e) from e
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
# denormalize shell -> command conversion
|
2019-04-10 14:09:37 +00:00
|
|
|
if "_uses_shell" in arguments:
|
|
|
|
action = "shell"
|
2020-04-05 12:33:43 +00:00
|
|
|
del (arguments["_uses_shell"])
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
for (k, v) in list(task.items()):
|
2019-04-10 14:09:37 +00:00
|
|
|
if k in ("action", "local_action", "args", "delegate_to") or k == action:
|
|
|
|
# we don"t want to re-assign these values, which were
|
2018-12-19 10:19:07 +00:00
|
|
|
# determined by the ModuleArgsParser() above
|
|
|
|
continue
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
normalized[k] = v
|
|
|
|
|
|
|
|
normalized["action"] = {"__ansible_module__": action}
|
2018-12-19 10:19:07 +00:00
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
if "_raw_params" in arguments:
|
2019-06-21 09:43:55 +00:00
|
|
|
normalized["action"]["__ansible_arguments__"] = arguments["_raw_params"].strip().split()
|
2020-04-05 12:33:43 +00:00
|
|
|
del (arguments["_raw_params"])
|
2018-12-19 10:19:07 +00:00
|
|
|
else:
|
2023-02-10 07:51:17 +00:00
|
|
|
normalized["action"]["__ansible_arguments__"] = []
|
2019-04-10 14:09:37 +00:00
|
|
|
normalized["action"].update(arguments)
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
normalized[FILENAME_KEY] = filename
|
2019-04-10 14:09:37 +00:00
|
|
|
normalized["__ansible_action_type__"] = ansible_action_type
|
2020-09-26 12:13:09 +00:00
|
|
|
|
|
|
|
# add back extracted metadata
|
|
|
|
for (k, v) in ansible_meta.items():
|
|
|
|
if v:
|
|
|
|
normalized[k] = v
|
|
|
|
|
2018-12-19 10:19:07 +00:00
|
|
|
return normalized
|
|
|
|
|
|
|
|
|
|
|
|
def action_tasks(yaml, file):
|
2023-02-10 07:51:17 +00:00
|
|
|
tasks = []
|
2019-04-10 14:09:37 +00:00
|
|
|
if file["filetype"] in ["tasks", "handlers"]:
|
|
|
|
tasks = add_action_type(yaml, file["filetype"])
|
2018-12-19 10:19:07 +00:00
|
|
|
else:
|
2019-04-10 14:09:37 +00:00
|
|
|
tasks.extend(extract_from_list(yaml, ["tasks", "handlers", "pre_tasks", "post_tasks"]))
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
# Add sub-elements of block/rescue/always to tasks list
|
2019-04-10 14:09:37 +00:00
|
|
|
tasks.extend(extract_from_list(tasks, ["block", "rescue", "always"]))
|
2018-12-19 10:19:07 +00:00
|
|
|
# Remove block/rescue/always elements from tasks list
|
2019-04-10 14:09:37 +00:00
|
|
|
block_rescue_always = ("block", "rescue", "always")
|
2018-12-19 10:19:07 +00:00
|
|
|
tasks[:] = [task for task in tasks if all(k not in task for k in block_rescue_always)]
|
|
|
|
|
2020-04-05 12:54:39 +00:00
|
|
|
allowed = ["include", "include_tasks", "import_playbook", "import_tasks"]
|
|
|
|
|
|
|
|
return [task for task in tasks if set(allowed).isdisjoint(task.keys())]
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def task_to_str(task):
|
|
|
|
name = task.get("name")
|
|
|
|
if name:
|
|
|
|
return name
|
|
|
|
action = task.get("action")
|
2020-04-05 12:33:43 +00:00
|
|
|
args = " ".join([
|
2023-02-10 07:51:17 +00:00
|
|
|
f"{k}={v}" for (k, v) in action.items()
|
2020-04-05 12:33:43 +00:00
|
|
|
if k not in ["__ansible_module__", "__ansible_arguments__"]
|
|
|
|
] + action.get("__ansible_arguments__"))
|
2023-02-10 07:51:17 +00:00
|
|
|
return "{} {}".format(action["__ansible_module__"], args)
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def extract_from_list(blocks, candidates):
|
2023-02-10 07:51:17 +00:00
|
|
|
results = []
|
2018-12-19 10:19:07 +00:00
|
|
|
for block in blocks:
|
|
|
|
for candidate in candidates:
|
2019-04-10 14:09:37 +00:00
|
|
|
delete_meta_keys = [candidate, "__line__", "__file__", "__ansible_action_type__"]
|
2018-12-19 10:19:07 +00:00
|
|
|
if isinstance(block, dict) and candidate in block:
|
|
|
|
if isinstance(block[candidate], list):
|
2019-02-19 09:09:23 +00:00
|
|
|
meta_data = dict(block)
|
|
|
|
for key in delete_meta_keys:
|
2019-04-05 20:05:06 +00:00
|
|
|
meta_data.pop(key, None)
|
2019-02-19 09:09:23 +00:00
|
|
|
results.extend(add_action_type(block[candidate], candidate, meta_data))
|
2018-12-19 10:19:07 +00:00
|
|
|
elif block[candidate] is not None:
|
|
|
|
raise RuntimeError(
|
2023-01-09 10:59:25 +00:00
|
|
|
f"Key '{candidate}' defined, but bad value: '{str(block[candidate])}'"
|
2020-04-05 12:33:43 +00:00
|
|
|
)
|
2018-12-19 10:19:07 +00:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
2019-02-19 09:09:23 +00:00
|
|
|
def add_action_type(actions, action_type, action_meta=None):
|
2023-02-10 07:51:17 +00:00
|
|
|
results = []
|
2018-12-19 10:19:07 +00:00
|
|
|
for action in actions:
|
2019-04-10 14:09:37 +00:00
|
|
|
action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type]
|
2019-02-19 09:09:23 +00:00
|
|
|
if action_meta:
|
2019-04-10 14:09:37 +00:00
|
|
|
action["__ansible_action_meta__"] = action_meta
|
2018-12-19 10:19:07 +00:00
|
|
|
results.append(action)
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def parse_yaml_linenumbers(data, filename):
|
2019-04-10 14:09:37 +00:00
|
|
|
"""
|
|
|
|
Parse yaml as ansible.utils.parse_yaml but with linenumbers.
|
2018-12-19 10:19:07 +00:00
|
|
|
|
|
|
|
The line numbers are stored in each node's LINE_NUMBER_KEY key.
|
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
"""
|
2020-04-05 12:33:43 +00:00
|
|
|
|
2018-12-19 10:19:07 +00:00
|
|
|
def compose_node(parent, index):
|
|
|
|
# the line number where the previous token has ended (plus empty lines)
|
|
|
|
line = loader.line
|
|
|
|
node = Composer.compose_node(loader, parent, index)
|
|
|
|
node.__line__ = line + 1
|
|
|
|
return node
|
|
|
|
|
|
|
|
def construct_mapping(node, deep=False):
|
2021-01-31 13:17:57 +00:00
|
|
|
try:
|
|
|
|
mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep)
|
|
|
|
except yaml.constructor.ConstructorError as e:
|
2023-02-10 07:51:17 +00:00
|
|
|
raise LaterError("syntax error", e) from e
|
2021-01-31 13:17:57 +00:00
|
|
|
|
2019-04-10 14:09:37 +00:00
|
|
|
if hasattr(node, "__line__"):
|
2018-12-19 10:19:07 +00:00
|
|
|
mapping[LINE_NUMBER_KEY] = node.__line__
|
|
|
|
else:
|
|
|
|
mapping[LINE_NUMBER_KEY] = mapping._line_number
|
|
|
|
mapping[FILENAME_KEY] = filename
|
|
|
|
return mapping
|
|
|
|
|
|
|
|
try:
|
|
|
|
kwargs = {}
|
|
|
|
loader = AnsibleLoader(data, **kwargs)
|
|
|
|
loader.compose_node = compose_node
|
|
|
|
loader.construct_mapping = construct_mapping
|
2021-01-09 12:16:08 +00:00
|
|
|
data = loader.get_single_data() or []
|
2018-12-19 10:19:07 +00:00
|
|
|
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
2023-02-10 07:51:17 +00:00
|
|
|
raise LaterError("syntax error", e) from e
|
2019-04-03 21:39:27 +00:00
|
|
|
except (yaml.composer.ComposerError) as e:
|
2023-01-09 10:59:25 +00:00
|
|
|
e.problem = f"{e.context} {e.problem}"
|
2023-02-10 07:51:17 +00:00
|
|
|
raise LaterError("syntax error", e) from e
|
2018-12-19 10:19:07 +00:00
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
def normalized_yaml(file, options):
|
2019-01-28 10:04:44 +00:00
|
|
|
lines = []
|
2018-12-19 10:19:07 +00:00
|
|
|
removes = []
|
|
|
|
|
|
|
|
try:
|
2019-04-10 14:09:37 +00:00
|
|
|
with codecs.open(file, mode="rb", encoding="utf-8") as f:
|
2019-01-28 10:04:44 +00:00
|
|
|
lines = list(enumerate(f.readlines(), start=1))
|
|
|
|
|
2019-01-25 14:08:37 +00:00
|
|
|
for i, line in lines:
|
|
|
|
if line.strip().startswith("#"):
|
2019-01-28 10:04:44 +00:00
|
|
|
removes.append((i, line))
|
2019-01-25 14:08:37 +00:00
|
|
|
# remove document starter also
|
|
|
|
if options.get("remove_markers") and line.strip() == "---":
|
2019-01-28 10:04:44 +00:00
|
|
|
removes.append((i, line))
|
2019-01-25 14:08:37 +00:00
|
|
|
# remove empty lines
|
|
|
|
if options.get("remove_empty") and not line.strip():
|
2019-01-28 10:04:44 +00:00
|
|
|
removes.append((i, line))
|
2019-01-25 14:08:37 +00:00
|
|
|
|
2019-01-28 10:04:44 +00:00
|
|
|
for line in removes:
|
|
|
|
lines.remove(line)
|
2018-12-19 10:19:07 +00:00
|
|
|
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
|
2023-02-10 07:51:17 +00:00
|
|
|
raise LaterError("syntax error", e) from e
|
2018-12-19 10:19:07 +00:00
|
|
|
return lines
|
2021-01-11 11:08:55 +00:00
|
|
|
|
|
|
|
|
|
|
|
class UnsafeTag:
|
|
|
|
"""Handle custom yaml unsafe tag."""
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
yaml_tag = "!unsafe"
|
2021-01-11 11:08:55 +00:00
|
|
|
|
|
|
|
def __init__(self, value):
|
|
|
|
self.unsafe = value
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def yaml_constructor(loader, node):
|
|
|
|
return loader.construct_scalar(node)
|
2021-04-13 20:27:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
class VaultTag:
|
|
|
|
"""Handle custom yaml vault tag."""
|
|
|
|
|
2023-02-10 07:51:17 +00:00
|
|
|
yaml_tag = "!vault"
|
2021-04-13 20:27:26 +00:00
|
|
|
|
|
|
|
def __init__(self, value):
|
|
|
|
self.unsafe = value
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def yaml_constructor(loader, node):
|
|
|
|
return loader.construct_scalar(node)
|