mirror of
https://github.com/thegeeklab/docker-tidy.git
synced 2024-11-21 19:50:40 +00:00
refactor base structure
This commit is contained in:
parent
12b6e10737
commit
936e4b2188
4
.flake8
4
.flake8
@ -1,6 +1,6 @@
|
||||
[flake8]
|
||||
ignore = D103
|
||||
max-line-length = 110
|
||||
ignore = D103, W503
|
||||
max-line-length = 99
|
||||
inline-quotes = double
|
||||
exclude =
|
||||
.git
|
||||
|
2
Pipfile
2
Pipfile
@ -21,6 +21,8 @@ pytest-mock = "*"
|
||||
pytest-cov = "*"
|
||||
bandit = "*"
|
||||
docker-tidy = {editable = true,path = "."}
|
||||
autopep8 = "*"
|
||||
yapf = "*"
|
||||
|
||||
[packages]
|
||||
importlib-metadata = {version = "*",markers = "python_version<'3.8'"}
|
||||
|
41
Pipfile.lock
generated
41
Pipfile.lock
generated
@ -1,7 +1,7 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "5435cb449b46e93e063eb55b6d7bd5d990e1c552d7648a35b4a5eef846914075"
|
||||
"sha256": "afa3bac7184b4b165d029d7c1db785812064a0ee572d9617b4974f2a922db927"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
@ -80,11 +80,11 @@
|
||||
},
|
||||
"environs": {
|
||||
"hashes": [
|
||||
"sha256:2291ce502c9e61b8e208c8c9be4ac474e0f523c4dc23e0beb23118086e43b324",
|
||||
"sha256:44700c562fb6f783640f90c2225d9a80d85d24833b4dd02d20b8ff1c83901e47"
|
||||
"sha256:54099cfbdd9cb320f438bf29992969ccfd5e232ba068bd650b04d76d96001631",
|
||||
"sha256:9578ce00ead984124a5336e5ea073707df303dc19d3b1e7ba34cdce1bb4fe02f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==7.2.0"
|
||||
"version": "==7.3.0"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
@ -242,10 +242,10 @@
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2",
|
||||
"sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"
|
||||
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
|
||||
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
|
||||
],
|
||||
"version": "==3.0.0"
|
||||
"version": "==3.1.0"
|
||||
}
|
||||
},
|
||||
"develop": {
|
||||
@ -271,6 +271,13 @@
|
||||
],
|
||||
"version": "==19.3.0"
|
||||
},
|
||||
"autopep8": {
|
||||
"hashes": [
|
||||
"sha256:0f592a0447acea0c2b0a9602be1e4e3d86db52badd2e3c84f0193bfd89fd3a43"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.5"
|
||||
},
|
||||
"bandit": {
|
||||
"hashes": [
|
||||
"sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952",
|
||||
@ -402,11 +409,11 @@
|
||||
},
|
||||
"environs": {
|
||||
"hashes": [
|
||||
"sha256:2291ce502c9e61b8e208c8c9be4ac474e0f523c4dc23e0beb23118086e43b324",
|
||||
"sha256:44700c562fb6f783640f90c2225d9a80d85d24833b4dd02d20b8ff1c83901e47"
|
||||
"sha256:54099cfbdd9cb320f438bf29992969ccfd5e232ba068bd650b04d76d96001631",
|
||||
"sha256:9578ce00ead984124a5336e5ea073707df303dc19d3b1e7ba34cdce1bb4fe02f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==7.2.0"
|
||||
"version": "==7.3.0"
|
||||
},
|
||||
"first": {
|
||||
"hashes": [
|
||||
@ -956,12 +963,20 @@
|
||||
],
|
||||
"version": "==0.34.2"
|
||||
},
|
||||
"yapf": {
|
||||
"hashes": [
|
||||
"sha256:712e23c468506bf12cadd10169f852572ecc61b266258422d45aaf4ad7ef43de",
|
||||
"sha256:cad8a272c6001b3401de3278238fdc54997b6c2e56baa751788915f879a52fca"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.29.0"
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2",
|
||||
"sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"
|
||||
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
|
||||
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
|
||||
],
|
||||
"version": "==3.0.0"
|
||||
"version": "==3.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
69
dockertidy/Autostop.py
Normal file
69
dockertidy/Autostop.py
Normal file
@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Stop long running docker iamges."""
|
||||
|
||||
import dateutil.parser
|
||||
import docker
|
||||
import docker.errors
|
||||
import requests.exceptions
|
||||
|
||||
from dockertidy.Config import SingleConfig
|
||||
from dockertidy.Logger import SingleLog
|
||||
from dockertidy.Parser import timedelta
|
||||
|
||||
|
||||
class AutoStop:
|
||||
|
||||
def __init__(self):
|
||||
self.config = SingleConfig()
|
||||
self.log = SingleLog()
|
||||
self.logger = SingleLog().logger
|
||||
self.docker = self._get_docker_client()
|
||||
|
||||
def stop_containers(self):
|
||||
client = self.docker
|
||||
config = self.config.config
|
||||
|
||||
max_run_time = timedelta(config["stop"]["max_run_time"])
|
||||
prefix = config["stop"]["prefix"]
|
||||
dry_run = config["dry_run"]
|
||||
|
||||
matcher = self._build_container_matcher(prefix)
|
||||
|
||||
for container_summary in client.containers():
|
||||
container = client.inspect_container(container_summary["Id"])
|
||||
name = container["Name"].lstrip("/")
|
||||
if (matcher(name) and self._has_been_running_since(container, max_run_time)):
|
||||
|
||||
self.logger.info(
|
||||
"Stopping container %s %s: running since %s" %
|
||||
(container["Id"][:16], name, container["State"]["StartedAt"])
|
||||
)
|
||||
|
||||
if not dry_run:
|
||||
self._stop_container(client, container["Id"])
|
||||
|
||||
def _stop_container(self, client, cid):
|
||||
try:
|
||||
client.stop(cid)
|
||||
except requests.exceptions.Timeout as e:
|
||||
self.logger.warn("Failed to stop container %s: %s" % (cid, e))
|
||||
except docker.errors.APIError as ae:
|
||||
self.logger.warn("Error stopping %s: %s" % (cid, ae))
|
||||
|
||||
def _build_container_matcher(self, prefixes):
|
||||
|
||||
def matcher(name):
|
||||
return any(name.startswith(prefix) for prefix in prefixes)
|
||||
|
||||
return matcher
|
||||
|
||||
def _has_been_running_since(self, container, min_time):
|
||||
started_at = container.get("State", {}).get("StartedAt")
|
||||
if not started_at:
|
||||
return False
|
||||
|
||||
return dateutil.parser.parse(started_at) <= min_time
|
||||
|
||||
def _get_docker_client(self):
|
||||
config = self.config.config
|
||||
return docker.APIClient(version="auto", timeout=config["timeout"])
|
@ -2,18 +2,12 @@
|
||||
"""Entrypoint and CLI handler."""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from importlib_metadata import PackageNotFoundError
|
||||
from importlib_metadata import version
|
||||
|
||||
import dockertidy.Exception
|
||||
from dockertidy import __version__
|
||||
from dockertidy.Config import SingleConfig
|
||||
from dockertidy.Utils import SingleLog
|
||||
from dockertidy.Utils import timedelta_type
|
||||
from dockertidy.Logger import SingleLog
|
||||
from dockertidy.Parser import timedelta_validator
|
||||
|
||||
|
||||
class DockerTidy:
|
||||
@ -31,53 +25,97 @@ class DockerTidy:
|
||||
:return: args objec
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate documentation from annotated Ansible roles using templates")
|
||||
parser.add_argument("-v", dest="logging.level", action="append_const", const=-1,
|
||||
help="increase log level")
|
||||
parser.add_argument("-q", dest="logging.level", action="append_const",
|
||||
const=1, help="decrease log level")
|
||||
parser.add_argument("--version", action="version",
|
||||
version="%(prog)s {}".format(__version__))
|
||||
description="Generate documentation from annotated Ansible roles using templates"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
dest="dry_run",
|
||||
help="Only log actions, don't stop anything."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--timeout",
|
||||
type=int,
|
||||
dest="http_timeout",
|
||||
metavar="HTTP_TIMEOUT",
|
||||
help="HTTP timeout in seconds for making docker API calls."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version", action="version", version="%(prog)s {}".format(__version__)
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(help="sub-command help")
|
||||
|
||||
parser_gc = subparsers.add_parser(
|
||||
"gc", help="Run docker garbage collector.")
|
||||
parser_gc = subparsers.add_parser("gc", help="Run docker garbage collector.")
|
||||
parser_gc.add_argument(
|
||||
"--max-container-age",
|
||||
type=timedelta_type,
|
||||
type=timedelta_validator,
|
||||
dest="gc.max_container_age",
|
||||
metavar="MAX_CONTAINER_AGE",
|
||||
help="Maximum age for a container. Containers older than this age "
|
||||
"will be removed. Age can be specified in any pytimeparse "
|
||||
"supported format.")
|
||||
"supported format."
|
||||
)
|
||||
parser_gc.add_argument(
|
||||
"--max-image-age",
|
||||
type=timedelta_type,
|
||||
type=timedelta_validator,
|
||||
dest="gc.max_image_age",
|
||||
metavar="MAX_IMAGE_AGE",
|
||||
help="Maxium age for an image. Images older than this age will be "
|
||||
"removed. Age can be specified in any pytimeparse supported "
|
||||
"format.")
|
||||
"format."
|
||||
)
|
||||
parser_gc.add_argument(
|
||||
"--dangling-volumes",
|
||||
action="store_true",
|
||||
help="Dangling volumes will be removed.")
|
||||
parser_gc.add_argument(
|
||||
"--dry-run", action="store_true",
|
||||
help="Only log actions, don't remove anything.")
|
||||
parser_gc.add_argument(
|
||||
"-t", "--timeout", type=int, default=60,
|
||||
help="HTTP timeout in seconds for making docker API calls.")
|
||||
dest="gc.dangling_volumes",
|
||||
help="Dangling volumes will be removed."
|
||||
)
|
||||
parser_gc.add_argument(
|
||||
"--exclude-image",
|
||||
action="append",
|
||||
help="Never remove images with this tag.")
|
||||
parser_gc.add_argument(
|
||||
"--exclude-image-file",
|
||||
type=argparse.FileType("r"),
|
||||
help="Path to a file which contains a list of images to exclude, one "
|
||||
"image tag per line.")
|
||||
type=str,
|
||||
dest="gc.exclude_image",
|
||||
metavar="EXCLUDE_IMAGE",
|
||||
help="Never remove images with this tag."
|
||||
)
|
||||
parser_gc.add_argument(
|
||||
"--exclude-container-label",
|
||||
action="append", type=str, default=[],
|
||||
help="Never remove containers with this label key or label key=value")
|
||||
action="append",
|
||||
type=str,
|
||||
dest="gc.exclude_container_label",
|
||||
metavar="EXCLUDE_CONTAINER_LABEL",
|
||||
help="Never remove containers with this label key "
|
||||
"or label key=value"
|
||||
)
|
||||
|
||||
parser_stop = subparsers.add_parser(
|
||||
"stop", help="Stop containers that have been running for too long."
|
||||
)
|
||||
parser_stop.add_argument(
|
||||
"--max-run-time",
|
||||
type=timedelta_validator,
|
||||
dest="stop.max_run_time",
|
||||
metavar="MAX_RUN_TIME",
|
||||
help="Maximum time a container is allows to run. Time may "
|
||||
"be specified in any pytimeparse supported format."
|
||||
)
|
||||
parser_stop.add_argument(
|
||||
"--prefix",
|
||||
action="append",
|
||||
type=str,
|
||||
dest="stop.prefix",
|
||||
metavar="PREFIX",
|
||||
help="Only stop containers which match one of the "
|
||||
"prefix."
|
||||
)
|
||||
|
||||
return parser.parse_args().__dict__
|
||||
|
||||
@ -87,11 +125,12 @@ class DockerTidy:
|
||||
except dockertidy.Exception.ConfigError as e:
|
||||
self.log.sysexit_with_message(e)
|
||||
|
||||
print(config.config)
|
||||
|
||||
try:
|
||||
self.log.set_level(config.config["logging"]["level"])
|
||||
except ValueError as e:
|
||||
self.log.sysexit_with_message(
|
||||
"Can not set log level.\n{}".format(str(e)))
|
||||
self.log.sysexit_with_message("Can not set log level.\n{}".format(str(e)))
|
||||
|
||||
self.logger.info("Using config file {}".format(config.config_file))
|
||||
|
||||
|
@ -1,9 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Global settings definition."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import anyconfig
|
||||
import environs
|
||||
@ -11,9 +9,10 @@ import jsonschema.exceptions
|
||||
import ruamel.yaml
|
||||
from appdirs import AppDirs
|
||||
from jsonschema._utils import format_as_index
|
||||
from pkg_resources import resource_filename
|
||||
|
||||
import dockertidy.Exception
|
||||
import dockertidy.Parser
|
||||
from dockertidy.Parser import env
|
||||
from dockertidy.Utils import Singleton
|
||||
|
||||
config_dir = AppDirs("docker-tidy").user_config_dir
|
||||
@ -26,7 +25,8 @@ class Config():
|
||||
|
||||
Settings are loade from multiple locations in defined order (last wins):
|
||||
- default settings defined by `self._get_defaults()`
|
||||
- yaml config file, defaults to OS specific user config dir (https://pypi.org/project/appdirs/)
|
||||
- yaml config file, defaults to OS specific user config dir
|
||||
see (https://pypi.org/project/appdirs/)
|
||||
- provides cli parameters
|
||||
"""
|
||||
|
||||
@ -36,22 +36,18 @@ class Config():
|
||||
"env": "CONFIG_FILE",
|
||||
"type": environs.Env().str
|
||||
},
|
||||
"role_dir": {
|
||||
"default": "",
|
||||
"env": "ROLE_DIR",
|
||||
"type": environs.Env().str
|
||||
},
|
||||
"role_name": {
|
||||
"default": "",
|
||||
"env": "ROLE_NAME",
|
||||
"type": environs.Env().str
|
||||
},
|
||||
"dry_run": {
|
||||
"default": False,
|
||||
"env": "DRY_RUN",
|
||||
"env": "DRY_TUN",
|
||||
"file": True,
|
||||
"type": environs.Env().bool
|
||||
},
|
||||
"http_timeout": {
|
||||
"default": 60,
|
||||
"env": "HTTP_TIMEOUT",
|
||||
"file": True,
|
||||
"type": environs.Env().int
|
||||
},
|
||||
"logging.level": {
|
||||
"default": "WARNING",
|
||||
"env": "LOG_LEVEL",
|
||||
@ -64,73 +60,47 @@ class Config():
|
||||
"file": True,
|
||||
"type": environs.Env().bool
|
||||
},
|
||||
"output_dir": {
|
||||
"default": os.getcwd(),
|
||||
"env": "OUTPUT_DIR",
|
||||
"gc.max_container_age": {
|
||||
"default": "1day",
|
||||
"env": "GC_MAX_CONTAINER_AGE",
|
||||
"file": True,
|
||||
"type": environs.Env().str
|
||||
"type": env.timedelta_validator
|
||||
},
|
||||
"template_dir": {
|
||||
"default": os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates"),
|
||||
"env": "TEMPLATE_DIR",
|
||||
"gc.max_image_age": {
|
||||
"default": "1day",
|
||||
"env": "GC_MAX_IMAGE_AGE",
|
||||
"file": True,
|
||||
"type": environs.Env().str
|
||||
"type": env.timedelta_validator
|
||||
},
|
||||
"template": {
|
||||
"default": "readme",
|
||||
"env": "TEMPLATE",
|
||||
"file": True,
|
||||
"type": environs.Env().str
|
||||
},
|
||||
"force_overwrite": {
|
||||
"gc.dangling_volumes": {
|
||||
"default": False,
|
||||
"env": "FORCE_OVERWRITE",
|
||||
"env": "GC_EXCLUDE_IMAGE",
|
||||
"file": True,
|
||||
"type": environs.Env().bool
|
||||
},
|
||||
"custom_header": {
|
||||
"default": "",
|
||||
"env": "CUSTOM_HEADER",
|
||||
"file": True,
|
||||
"type": environs.Env().str
|
||||
},
|
||||
"exclude_files": {
|
||||
"gc.exclude_image": {
|
||||
"default": [],
|
||||
"env": "EXCLUDE_FILES",
|
||||
"env": "GC_DANGLING_VOLUMES",
|
||||
"file": True,
|
||||
"type": environs.Env().list
|
||||
},
|
||||
}
|
||||
|
||||
ANNOTATIONS = {
|
||||
"meta": {
|
||||
"name": "meta",
|
||||
"automatic": True,
|
||||
"subtypes": []
|
||||
"gc.exclude_container_label": {
|
||||
"default": [],
|
||||
"env": "GC_EXCLUDE_CONTAINER_LABEL",
|
||||
"file": True,
|
||||
"type": environs.Env().list
|
||||
},
|
||||
"todo": {
|
||||
"name": "todo",
|
||||
"automatic": True,
|
||||
"subtypes": []
|
||||
"stop.max_run_time": {
|
||||
"default": "3days",
|
||||
"env": "STOP_MAX_RUN_TIME",
|
||||
"file": True,
|
||||
"type": env.timedelta_validator
|
||||
},
|
||||
"var": {
|
||||
"name": "var",
|
||||
"automatic": True,
|
||||
"subtypes": [
|
||||
"value",
|
||||
"example",
|
||||
"description"
|
||||
]
|
||||
},
|
||||
"example": {
|
||||
"name": "example",
|
||||
"automatic": True,
|
||||
"subtypes": []
|
||||
},
|
||||
"tag": {
|
||||
"name": "tag",
|
||||
"automatic": True,
|
||||
"subtypes": []
|
||||
"stop.prefix": {
|
||||
"default": [],
|
||||
"env": "STOP_PREFIX",
|
||||
"file": True,
|
||||
"type": environs.Env().list
|
||||
},
|
||||
}
|
||||
|
||||
@ -146,10 +116,8 @@ class Config():
|
||||
self._args = args
|
||||
self._schema = None
|
||||
self.config_file = default_config_file
|
||||
self.role_dir = os.getcwd()
|
||||
self.config = None
|
||||
self._set_config()
|
||||
self.is_role = self._set_is_role() or False
|
||||
|
||||
def _get_args(self, args):
|
||||
cleaned = dict(filter(lambda item: item[1] is not None, args.items()))
|
||||
@ -173,9 +141,6 @@ class Config():
|
||||
for key, item in self.SETTINGS.items():
|
||||
normalized = self._add_dict_branch(normalized, key.split("."), item["default"])
|
||||
|
||||
# compute role_name default
|
||||
normalized["role_name"] = os.path.basename(self.role_dir)
|
||||
|
||||
self.schema = anyconfig.gen_schema(normalized)
|
||||
return normalized
|
||||
|
||||
@ -183,7 +148,7 @@ class Config():
|
||||
normalized = {}
|
||||
for key, item in self.SETTINGS.items():
|
||||
if item.get("env"):
|
||||
prefix = "ANSIBLE_DOCTOR_"
|
||||
prefix = "TIDY_"
|
||||
envname = prefix + item["env"]
|
||||
try:
|
||||
value = item["type"](envname)
|
||||
@ -192,7 +157,9 @@ class Config():
|
||||
if '"{}" not set'.format(envname) in str(e):
|
||||
pass
|
||||
else:
|
||||
raise dockertidy.Exception.ConfigError("Unable to read environment variable", str(e))
|
||||
raise dockertidy.Exception.ConfigError(
|
||||
"Unable to read environment variable", str(e)
|
||||
)
|
||||
|
||||
return normalized
|
||||
|
||||
@ -204,13 +171,9 @@ class Config():
|
||||
# preset config file path
|
||||
if envs.get("config_file"):
|
||||
self.config_file = self._normalize_path(envs.get("config_file"))
|
||||
if envs.get("role_dir"):
|
||||
self.role_dir = self._normalize_path(envs.get("role_dir"))
|
||||
|
||||
if args.get("config_file"):
|
||||
self.config_file = self._normalize_path(args.get("config_file"))
|
||||
if args.get("role_dir"):
|
||||
self.role_dir = self._normalize_path(args.get("role_dir"))
|
||||
|
||||
source_files = []
|
||||
source_files.append(self.config_file)
|
||||
@ -224,7 +187,9 @@ class Config():
|
||||
s = stream.read()
|
||||
try:
|
||||
file_dict = ruamel.yaml.safe_load(s)
|
||||
except (ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError) as e:
|
||||
except (
|
||||
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
|
||||
) as e:
|
||||
message = "{} {}".format(e.context, e.problem)
|
||||
raise dockertidy.Exception.ConfigError(
|
||||
"Unable to read config file {}".format(config), message
|
||||
@ -240,15 +205,8 @@ class Config():
|
||||
if self._validate(args):
|
||||
anyconfig.merge(defaults, args, ac_merge=anyconfig.MS_DICTS)
|
||||
|
||||
fix_files = ["output_dir", "template_dir", "custom_header"]
|
||||
for file in fix_files:
|
||||
if defaults[file] and defaults[file] != "":
|
||||
defaults[file] = self._normalize_path(defaults[file])
|
||||
|
||||
if "config_file" in defaults:
|
||||
defaults.pop("config_file")
|
||||
if "role_dir" in defaults:
|
||||
defaults.pop("role_dir")
|
||||
|
||||
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
|
||||
|
||||
@ -261,10 +219,6 @@ class Config():
|
||||
else:
|
||||
return path
|
||||
|
||||
def _set_is_role(self):
|
||||
if os.path.isdir(os.path.join(self.role_dir, "tasks")):
|
||||
return True
|
||||
|
||||
def _validate(self, config):
|
||||
try:
|
||||
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
||||
@ -285,32 +239,6 @@ class Config():
|
||||
else self._add_dict_branch(tree[key] if key in tree else {}, vector[1:], value)
|
||||
return tree
|
||||
|
||||
def get_annotations_definition(self, automatic=True):
|
||||
annotations = {}
|
||||
if automatic:
|
||||
for k, item in self.ANNOTATIONS.items():
|
||||
if "automatic" in item.keys() and item["automatic"]:
|
||||
annotations[k] = item
|
||||
return annotations
|
||||
|
||||
def get_annotations_names(self, automatic=True):
|
||||
annotations = []
|
||||
if automatic:
|
||||
for k, item in self.ANNOTATIONS.items():
|
||||
if "automatic" in item.keys() and item["automatic"]:
|
||||
annotations.append(k)
|
||||
return annotations
|
||||
|
||||
def get_template(self):
|
||||
"""
|
||||
Get the base dir for the template to use.
|
||||
|
||||
:return: str abs path
|
||||
"""
|
||||
template_dir = self.config.get("template_dir")
|
||||
template = self.config.get("template")
|
||||
return os.path.realpath(os.path.join(template_dir, template))
|
||||
|
||||
|
||||
class SingleConfig(Config, metaclass=Singleton):
|
||||
pass
|
||||
|
@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Remove unused docker containers and images."""
|
||||
|
||||
import argparse
|
||||
import fnmatch
|
||||
import logging
|
||||
import sys
|
||||
@ -12,10 +11,9 @@ import docker
|
||||
import docker.errors
|
||||
import requests.exceptions
|
||||
from docker.utils import kwargs_from_env
|
||||
from docker_custodian.args import timedelta_type
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from dockertidy.Config import SingleConfig
|
||||
from dockertidy.Logger import SingleLog
|
||||
|
||||
# This seems to be something docker uses for a null/zero date
|
||||
YEAR_ZERO = "0001-01-01T00:00:00Z"
|
||||
@ -23,12 +21,15 @@ YEAR_ZERO = "0001-01-01T00:00:00Z"
|
||||
ExcludeLabel = namedtuple("ExcludeLabel", ["key", "value"])
|
||||
|
||||
|
||||
def cleanup_containers(
|
||||
client,
|
||||
max_container_age,
|
||||
dry_run,
|
||||
exclude_container_labels,
|
||||
):
|
||||
class GarbageCollector:
|
||||
|
||||
def __init__(self):
|
||||
self.config = SingleConfig()
|
||||
self.log = SingleLog()
|
||||
self.logger = SingleLog().logger
|
||||
|
||||
|
||||
def cleanup_containers(client, max_container_age, dry_run, exclude_container_labels):
|
||||
all_containers = get_all_containers(client)
|
||||
filtered_containers = filter_excluded_containers(
|
||||
all_containers,
|
||||
@ -40,15 +41,13 @@ def cleanup_containers(
|
||||
container=container_summary["Id"],
|
||||
)
|
||||
if not container or not should_remove_container(
|
||||
container,
|
||||
max_container_age,
|
||||
container,
|
||||
max_container_age,
|
||||
):
|
||||
continue
|
||||
|
||||
log.info("Removing container %s %s %s" % (
|
||||
container["Id"][:16],
|
||||
container.get("Name", "").lstrip("/"),
|
||||
container["State"]["FinishedAt"]))
|
||||
log.info("Removing container %s %s %s" % (container["Id"][:16], container.get(
|
||||
"Name", "").lstrip("/"), container["State"]["FinishedAt"]))
|
||||
|
||||
if not dry_run:
|
||||
api_call(
|
||||
@ -64,11 +63,12 @@ def filter_excluded_containers(containers, exclude_container_labels):
|
||||
|
||||
def include_container(container):
|
||||
if should_exclude_container_with_labels(
|
||||
container,
|
||||
exclude_container_labels,
|
||||
container,
|
||||
exclude_container_labels,
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
return filter(include_container, containers)
|
||||
|
||||
|
||||
@ -81,16 +81,12 @@ def should_exclude_container_with_labels(container, exclude_container_labels):
|
||||
exclude_label.key,
|
||||
)
|
||||
label_values_to_check = [
|
||||
container["Labels"][matching_key]
|
||||
for matching_key in matching_keys
|
||||
container["Labels"][matching_key] for matching_key in matching_keys
|
||||
]
|
||||
if fnmatch.filter(label_values_to_check, exclude_label.value):
|
||||
return True
|
||||
else:
|
||||
if fnmatch.filter(
|
||||
container["Labels"].keys(),
|
||||
exclude_label.key
|
||||
):
|
||||
if fnmatch.filter(container["Labels"].keys(), exclude_label.key):
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -153,6 +149,7 @@ def cleanup_images(client, max_image_age, dry_run, exclude_set):
|
||||
|
||||
|
||||
def filter_excluded_images(images, exclude_set):
|
||||
|
||||
def include_image(image_summary):
|
||||
image_tags = image_summary.get("RepoTags")
|
||||
if no_image_tags(image_tags):
|
||||
@ -166,6 +163,7 @@ def filter_excluded_images(images, exclude_set):
|
||||
|
||||
|
||||
def filter_images_in_use(images, image_tags_in_use):
|
||||
|
||||
def get_tag_set(image_summary):
|
||||
image_tags = image_summary.get("RepoTags")
|
||||
if no_image_tags(image_tags):
|
||||
@ -180,6 +178,7 @@ def filter_images_in_use(images, image_tags_in_use):
|
||||
|
||||
|
||||
def filter_images_in_use_by_id(images, image_ids_in_use):
|
||||
|
||||
def image_not_in_use(image_summary):
|
||||
return image_summary["Id"] not in image_ids_in_use
|
||||
|
||||
@ -245,6 +244,7 @@ def api_call(func, **kwargs):
|
||||
|
||||
|
||||
def format_image(image, image_summary):
|
||||
|
||||
def get_tags():
|
||||
tags = image_summary.get("RepoTags")
|
||||
if not tags or tags == ["<none>:<none>"]:
|
||||
@ -275,29 +275,20 @@ def format_exclude_labels(exclude_label_args):
|
||||
exclude_label_value = split_exclude_label[1]
|
||||
else:
|
||||
exclude_label_value = None
|
||||
exclude_labels.append(
|
||||
ExcludeLabel(
|
||||
key=exclude_label_key,
|
||||
value=exclude_label_value,
|
||||
)
|
||||
)
|
||||
exclude_labels.append(ExcludeLabel(
|
||||
key=exclude_label_key,
|
||||
value=exclude_label_value,
|
||||
))
|
||||
return exclude_labels
|
||||
|
||||
|
||||
def main():
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(message)s",
|
||||
stream=sys.stdout)
|
||||
logging.basicConfig(level=logging.INFO, format="%(message)s", stream=sys.stdout)
|
||||
|
||||
args = get_args()
|
||||
client = docker.APIClient(version="auto",
|
||||
timeout=args.timeout,
|
||||
**kwargs_from_env())
|
||||
client = docker.APIClient(version="auto", timeout=args.timeout, **kwargs_from_env())
|
||||
|
||||
exclude_container_labels = format_exclude_labels(
|
||||
args.exclude_container_label
|
||||
)
|
||||
exclude_container_labels = format_exclude_labels(args.exclude_container_label)
|
||||
|
||||
if args.max_container_age:
|
||||
cleanup_containers(
|
||||
@ -308,55 +299,8 @@ def main():
|
||||
)
|
||||
|
||||
if args.max_image_age:
|
||||
exclude_set = build_exclude_set(
|
||||
args.exclude_image,
|
||||
args.exclude_image_file)
|
||||
exclude_set = build_exclude_set(args.exclude_image, args.exclude_image_file)
|
||||
cleanup_images(client, args.max_image_age, args.dry_run, exclude_set)
|
||||
|
||||
if args.dangling_volumes:
|
||||
cleanup_volumes(client, args.dry_run)
|
||||
|
||||
|
||||
def get_args(args=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--max-container-age",
|
||||
type=timedelta_type,
|
||||
help="Maximum age for a container. Containers older than this age "
|
||||
"will be removed. Age can be specified in any pytimeparse "
|
||||
"supported format.")
|
||||
parser.add_argument(
|
||||
"--max-image-age",
|
||||
type=timedelta_type,
|
||||
help="Maxium age for an image. Images older than this age will be "
|
||||
"removed. Age can be specified in any pytimeparse supported "
|
||||
"format.")
|
||||
parser.add_argument(
|
||||
"--dangling-volumes",
|
||||
action="store_true",
|
||||
help="Dangling volumes will be removed.")
|
||||
parser.add_argument(
|
||||
"--dry-run", action="store_true",
|
||||
help="Only log actions, don't remove anything.")
|
||||
parser.add_argument(
|
||||
"-t", "--timeout", type=int, default=60,
|
||||
help="HTTP timeout in seconds for making docker API calls.")
|
||||
parser.add_argument(
|
||||
"--exclude-image",
|
||||
action="append",
|
||||
help="Never remove images with this tag.")
|
||||
parser.add_argument(
|
||||
"--exclude-image-file",
|
||||
type=argparse.FileType("r"),
|
||||
help="Path to a file which contains a list of images to exclude, one "
|
||||
"image tag per line.")
|
||||
parser.add_argument(
|
||||
"--exclude-container-label",
|
||||
action="append", type=str, default=[],
|
||||
help="Never remove containers with this label key or label key=value")
|
||||
|
||||
return parser.parse_args(args=args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
184
dockertidy/Logger.py
Normal file
184
dockertidy/Logger.py
Normal file
@ -0,0 +1,184 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Global utility methods and classes."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import colorama
|
||||
from pythonjsonlogger import jsonlogger
|
||||
|
||||
from dockertidy.Utils import Singleton
|
||||
from dockertidy.Utils import to_bool
|
||||
|
||||
CONSOLE_FORMAT = "{}[%(levelname)s]{} %(message)s"
|
||||
JSON_FORMAT = "(asctime) (levelname) (message)"
|
||||
|
||||
|
||||
def _should_do_markup():
|
||||
py_colors = os.environ.get("PY_COLORS", None)
|
||||
if py_colors is not None:
|
||||
return to_bool(py_colors)
|
||||
|
||||
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
|
||||
|
||||
|
||||
colorama.init(autoreset=True, strip=not _should_do_markup())
|
||||
|
||||
|
||||
class LogFilter(object):
|
||||
"""A custom log filter which excludes log messages above the logged level."""
|
||||
|
||||
def __init__(self, level):
|
||||
"""
|
||||
Initialize a new custom log filter.
|
||||
|
||||
:param level: Log level limit
|
||||
:returns: None
|
||||
|
||||
"""
|
||||
self.__level = level
|
||||
|
||||
def filter(self, logRecord): # noqa
|
||||
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||
return logRecord.levelno <= self.__level
|
||||
|
||||
|
||||
class MultilineFormatter(logging.Formatter):
|
||||
"""Logging Formatter to reset color after newline characters."""
|
||||
|
||||
def format(self, record): # noqa
|
||||
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
|
||||
return logging.Formatter.format(self, record)
|
||||
|
||||
|
||||
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
|
||||
"""Logging Formatter to remove newline characters."""
|
||||
|
||||
def format(self, record): # noqa
|
||||
record.msg = record.msg.replace("\n", " ")
|
||||
return jsonlogger.JsonFormatter.format(self, record)
|
||||
|
||||
|
||||
class Log:
|
||||
|
||||
def __init__(self, level=logging.WARN, name="dockertidy", json=False):
|
||||
self.logger = logging.getLogger(name)
|
||||
self.logger.setLevel(level)
|
||||
self.logger.addHandler(self._get_error_handler(json=json))
|
||||
self.logger.addHandler(self._get_warn_handler(json=json))
|
||||
self.logger.addHandler(self._get_info_handler(json=json))
|
||||
self.logger.addHandler(self._get_critical_handler(json=json))
|
||||
self.logger.addHandler(self._get_debug_handler(json=json))
|
||||
self.logger.propagate = False
|
||||
|
||||
def _get_error_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setLevel(logging.ERROR)
|
||||
handler.addFilter(LogFilter(logging.ERROR))
|
||||
handler.setFormatter(
|
||||
MultilineFormatter(
|
||||
self.error(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def _get_warn_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(logging.WARN)
|
||||
handler.addFilter(LogFilter(logging.WARN))
|
||||
handler.setFormatter(
|
||||
MultilineFormatter(
|
||||
self.warn(CONSOLE_FORMAT.format(colorama.Fore.YELLOW, colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def _get_info_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(logging.INFO)
|
||||
handler.addFilter(LogFilter(logging.INFO))
|
||||
handler.setFormatter(
|
||||
MultilineFormatter(
|
||||
self.info(CONSOLE_FORMAT.format(colorama.Fore.CYAN, colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def _get_critical_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setLevel(logging.CRITICAL)
|
||||
handler.addFilter(LogFilter(logging.CRITICAL))
|
||||
handler.setFormatter(
|
||||
MultilineFormatter(
|
||||
self.critical(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def _get_debug_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setLevel(logging.DEBUG)
|
||||
handler.addFilter(LogFilter(logging.DEBUG))
|
||||
handler.setFormatter(
|
||||
MultilineFormatter(
|
||||
self.critical(CONSOLE_FORMAT.format(colorama.Fore.BLUE,
|
||||
colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def set_level(self, s):
|
||||
self.logger.setLevel(s)
|
||||
|
||||
def debug(self, msg):
|
||||
"""Format info messages and return string."""
|
||||
return msg
|
||||
|
||||
def critical(self, msg):
|
||||
"""Format critical messages and return string."""
|
||||
return msg
|
||||
|
||||
def error(self, msg):
|
||||
"""Format error messages and return string."""
|
||||
return msg
|
||||
|
||||
def warn(self, msg):
|
||||
"""Format warn messages and return string."""
|
||||
return msg
|
||||
|
||||
def info(self, msg):
|
||||
"""Format info messages and return string."""
|
||||
return msg
|
||||
|
||||
def _color_text(self, color, msg):
|
||||
"""
|
||||
Colorize strings.
|
||||
|
||||
:param color: colorama color settings
|
||||
:param msg: string to colorize
|
||||
:returns: string
|
||||
|
||||
"""
|
||||
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
|
||||
|
||||
def sysexit(self, code=1):
|
||||
sys.exit(code)
|
||||
|
||||
def sysexit_with_message(self, msg, code=1):
|
||||
self.logger.critical(str(msg))
|
||||
self.sysexit(code)
|
||||
|
||||
|
||||
class SingleLog(Log, metaclass=Singleton):
|
||||
pass
|
51
dockertidy/Parser.py
Normal file
51
dockertidy/Parser.py
Normal file
@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Custom input type parser."""
|
||||
|
||||
import datetime
|
||||
|
||||
import environs
|
||||
from dateutil import tz
|
||||
from pytimeparse import timeparse
|
||||
|
||||
env = environs.Env()
|
||||
|
||||
|
||||
def timedelta_validator(value):
|
||||
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
|
||||
|
||||
:param value: a string containing a time format supported by
|
||||
mod:`pytimeparse`
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
_datetime_seconds_ago(timeparse.timeparse(value))
|
||||
return value
|
||||
except TypeError:
|
||||
raise
|
||||
|
||||
|
||||
def timedelta(value):
|
||||
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
|
||||
|
||||
:param value: a string containing a time format supported by
|
||||
mod:`pytimeparse`
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
return _datetime_seconds_ago(timeparse.timeparse(value))
|
||||
|
||||
|
||||
def _datetime_seconds_ago(seconds):
|
||||
now = datetime.datetime.now(tz.tzutc())
|
||||
return now - datetime.timedelta(seconds=seconds)
|
||||
|
||||
|
||||
@env.parser_for("timedelta_validator")
|
||||
def timedelta_parser(value):
|
||||
try:
|
||||
timedelta_validator(value)
|
||||
return value
|
||||
except TypeError as e:
|
||||
raise environs.EnvError(e)
|
@ -1,55 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Global utility methods and classes."""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import pprint
|
||||
import sys
|
||||
from distutils.util import strtobool
|
||||
|
||||
import colorama
|
||||
from dateutil import tz
|
||||
from pythonjsonlogger import jsonlogger
|
||||
from pytimeparse import timeparse
|
||||
|
||||
import dockertidy.Exception
|
||||
|
||||
CONSOLE_FORMAT = "{}[%(levelname)s]{} %(message)s"
|
||||
JSON_FORMAT = "(asctime) (levelname) (message)"
|
||||
|
||||
|
||||
def to_bool(string):
|
||||
return bool(strtobool(str(string)))
|
||||
|
||||
|
||||
def timedelta_type(value):
|
||||
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
|
||||
|
||||
:param value: a string containing a time format supported by
|
||||
mod:`pytimeparse`
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
return _datetime_seconds_ago(timeparse.timeparse(value))
|
||||
|
||||
|
||||
def _datetime_seconds_ago(seconds):
|
||||
now = datetime.datetime.now(tz.tzutc())
|
||||
return now - datetime.timedelta(seconds=seconds)
|
||||
|
||||
|
||||
def _should_do_markup():
|
||||
py_colors = os.environ.get("PY_COLORS", None)
|
||||
if py_colors is not None:
|
||||
return to_bool(py_colors)
|
||||
|
||||
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
|
||||
|
||||
|
||||
colorama.init(autoreset=True, strip=not _should_do_markup())
|
||||
|
||||
|
||||
class Singleton(type):
|
||||
_instances = {}
|
||||
|
||||
@ -57,183 +15,3 @@ class Singleton(type):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class LogFilter(object):
|
||||
"""A custom log filter which excludes log messages above the logged level."""
|
||||
|
||||
def __init__(self, level):
|
||||
"""
|
||||
Initialize a new custom log filter.
|
||||
|
||||
:param level: Log level limit
|
||||
:returns: None
|
||||
|
||||
"""
|
||||
self.__level = level
|
||||
|
||||
def filter(self, logRecord): # noqa
|
||||
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||
return logRecord.levelno <= self.__level
|
||||
|
||||
|
||||
class MultilineFormatter(logging.Formatter):
|
||||
"""Logging Formatter to reset color after newline characters."""
|
||||
|
||||
def format(self, record): # noqa
|
||||
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
|
||||
return logging.Formatter.format(self, record)
|
||||
|
||||
|
||||
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
|
||||
"""Logging Formatter to remove newline characters."""
|
||||
|
||||
def format(self, record): # noqa
|
||||
record.msg = record.msg.replace("\n", " ")
|
||||
return jsonlogger.JsonFormatter.format(self, record)
|
||||
|
||||
|
||||
class Log:
|
||||
def __init__(self, level=logging.WARN, name="dockertidy", json=False):
|
||||
self.logger = logging.getLogger(name)
|
||||
self.logger.setLevel(level)
|
||||
self.logger.addHandler(self._get_error_handler(json=json))
|
||||
self.logger.addHandler(self._get_warn_handler(json=json))
|
||||
self.logger.addHandler(self._get_info_handler(json=json))
|
||||
self.logger.addHandler(self._get_critical_handler(json=json))
|
||||
self.logger.addHandler(self._get_debug_handler(json=json))
|
||||
self.logger.propagate = False
|
||||
|
||||
def _get_error_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setLevel(logging.ERROR)
|
||||
handler.addFilter(LogFilter(logging.ERROR))
|
||||
handler.setFormatter(MultilineFormatter(
|
||||
self.error(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def _get_warn_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(logging.WARN)
|
||||
handler.addFilter(LogFilter(logging.WARN))
|
||||
handler.setFormatter(MultilineFormatter(
|
||||
self.warn(CONSOLE_FORMAT.format(colorama.Fore.YELLOW, colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def _get_info_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(logging.INFO)
|
||||
handler.addFilter(LogFilter(logging.INFO))
|
||||
handler.setFormatter(MultilineFormatter(
|
||||
self.info(CONSOLE_FORMAT.format(colorama.Fore.CYAN, colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def _get_critical_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setLevel(logging.CRITICAL)
|
||||
handler.addFilter(LogFilter(logging.CRITICAL))
|
||||
handler.setFormatter(MultilineFormatter(
|
||||
self.critical(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def _get_debug_handler(self, json=False):
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setLevel(logging.DEBUG)
|
||||
handler.addFilter(LogFilter(logging.DEBUG))
|
||||
handler.setFormatter(MultilineFormatter(
|
||||
self.critical(CONSOLE_FORMAT.format(colorama.Fore.BLUE, colorama.Style.RESET_ALL))))
|
||||
|
||||
if json:
|
||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||
|
||||
return handler
|
||||
|
||||
def set_level(self, s):
|
||||
self.logger.setLevel(s)
|
||||
|
||||
def debug(self, msg):
|
||||
"""Format info messages and return string."""
|
||||
return msg
|
||||
|
||||
def critical(self, msg):
|
||||
"""Format critical messages and return string."""
|
||||
return msg
|
||||
|
||||
def error(self, msg):
|
||||
"""Format error messages and return string."""
|
||||
return msg
|
||||
|
||||
def warn(self, msg):
|
||||
"""Format warn messages and return string."""
|
||||
return msg
|
||||
|
||||
def info(self, msg):
|
||||
"""Format info messages and return string."""
|
||||
return msg
|
||||
|
||||
def _color_text(self, color, msg):
|
||||
"""
|
||||
Colorize strings.
|
||||
|
||||
:param color: colorama color settings
|
||||
:param msg: string to colorize
|
||||
:returns: string
|
||||
|
||||
"""
|
||||
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
|
||||
|
||||
def sysexit(self, code=1):
|
||||
sys.exit(code)
|
||||
|
||||
def sysexit_with_message(self, msg, code=1):
|
||||
self.logger.critical(str(msg))
|
||||
self.sysexit(code)
|
||||
|
||||
|
||||
class SingleLog(Log, metaclass=Singleton):
|
||||
pass
|
||||
|
||||
|
||||
class FileUtils:
|
||||
@staticmethod
|
||||
def create_path(path):
|
||||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
@staticmethod
|
||||
def query_yes_no(question, default=True):
|
||||
"""Ask a yes/no question via input() and return their answer.
|
||||
|
||||
"question" is a string that is presented to the user.
|
||||
"default" is the presumed answer if the user just hits <Enter>.
|
||||
It must be "yes" (the default), "no" or None (meaning
|
||||
an answer is required of the user).
|
||||
|
||||
The "answer" return value is one of "yes" or "no".
|
||||
"""
|
||||
if default:
|
||||
prompt = "[Y/n]"
|
||||
else:
|
||||
prompt = "[N/y]"
|
||||
|
||||
try:
|
||||
# input() is safe in python3
|
||||
choice = input("{} {} ".format(question, prompt)) or default # nosec
|
||||
return to_bool(choice)
|
||||
except (KeyboardInterrupt, ValueError) as e:
|
||||
raise dockertidy.Exception.InputError("Error while reading input", e)
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Default package."""
|
||||
from importlib_metadata import PackageNotFoundError
|
||||
|
||||
from importlib_metadata import version
|
||||
|
||||
__author__ = "Robert Kaussow"
|
||||
|
@ -1,20 +0,0 @@
|
||||
import datetime
|
||||
|
||||
from dateutil import tz
|
||||
from pytimeparse import timeparse
|
||||
|
||||
|
||||
def timedelta_type(value):
|
||||
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
|
||||
|
||||
:param value: a string containing a time format supported by
|
||||
mod:`pytimeparse`
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
return datetime_seconds_ago(timeparse.timeparse(value))
|
||||
|
||||
|
||||
def datetime_seconds_ago(seconds):
|
||||
now = datetime.datetime.now(tz.tzutc())
|
||||
return now - datetime.timedelta(seconds=seconds)
|
@ -1,103 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Stop long running docker iamges."""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import dateutil.parser
|
||||
import docker
|
||||
import docker.errors
|
||||
import requests.exceptions
|
||||
from docker.utils import kwargs_from_env
|
||||
from docker_custodian.args import timedelta_type
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def stop_containers(client, max_run_time, matcher, dry_run):
|
||||
for container_summary in client.containers():
|
||||
container = client.inspect_container(container_summary["Id"])
|
||||
name = container["Name"].lstrip("/")
|
||||
if (
|
||||
matcher(name) and has_been_running_since(container, max_run_time)
|
||||
):
|
||||
|
||||
log.info("Stopping container %s %s: running since %s" % (
|
||||
container["Id"][:16],
|
||||
name,
|
||||
container["State"]["StartedAt"]))
|
||||
|
||||
if not dry_run:
|
||||
stop_container(client, container["Id"])
|
||||
|
||||
|
||||
def stop_container(client, cid):
|
||||
try:
|
||||
client.stop(cid)
|
||||
except requests.exceptions.Timeout as e:
|
||||
log.warn("Failed to stop container %s: %s" % (cid, e))
|
||||
except docker.errors.APIError as ae:
|
||||
log.warn("Error stopping %s: %s" % (cid, ae))
|
||||
|
||||
|
||||
def build_container_matcher(prefixes):
|
||||
def matcher(name):
|
||||
return any(name.startswith(prefix) for prefix in prefixes)
|
||||
return matcher
|
||||
|
||||
|
||||
def has_been_running_since(container, min_time):
|
||||
started_at = container.get("State", {}).get("StartedAt")
|
||||
if not started_at:
|
||||
return False
|
||||
|
||||
return dateutil.parser.parse(started_at) <= min_time
|
||||
|
||||
|
||||
def main():
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(message)s",
|
||||
stream=sys.stdout)
|
||||
|
||||
opts = get_opts()
|
||||
client = docker.APIClient(version="auto",
|
||||
timeout=opts.timeout,
|
||||
**kwargs_from_env())
|
||||
|
||||
matcher = build_container_matcher(opts.prefix)
|
||||
stop_containers(client, opts.max_run_time, matcher, opts.dry_run)
|
||||
|
||||
|
||||
def get_opts(args=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--max-run-time",
|
||||
type=timedelta_type,
|
||||
help="Maximum time a container is allows to run. Time may "
|
||||
"be specified in any pytimeparse supported format."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--prefix", action="append", default=[],
|
||||
help="Only stop containers which match one of the "
|
||||
"prefix."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run", action="store_true",
|
||||
help="Only log actions, don't stop anything."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t", "--timeout", type=int, default=60,
|
||||
help="HTTP timeout in seconds for making docker API calls."
|
||||
)
|
||||
opts = parser.parse_args(args=args)
|
||||
|
||||
if not opts.prefix:
|
||||
parser.error("Running with no --prefix will match nothing.")
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -9,13 +9,11 @@ except ImportError:
|
||||
import mock
|
||||
|
||||
|
||||
|
||||
|
||||
def test_datetime_seconds_ago(now):
|
||||
expected = datetime.datetime(2014, 1, 15, 10, 10, tzinfo=tz.tzutc())
|
||||
with mock.patch(
|
||||
'docker_custodian.args.datetime.datetime',
|
||||
autospec=True,
|
||||
'docker_custodian.args.datetime.datetime',
|
||||
autospec=True,
|
||||
) as mock_datetime:
|
||||
mock_datetime.now.return_value = now
|
||||
assert args.datetime_seconds_ago(24 * 60 * 60 * 5) == expected
|
||||
@ -28,8 +26,8 @@ def test_timedelta_type_none():
|
||||
def test_timedelta_type(now):
|
||||
expected = datetime.datetime(2014, 1, 15, 10, 10, tzinfo=tz.tzutc())
|
||||
with mock.patch(
|
||||
'docker_custodian.args.datetime.datetime',
|
||||
autospec=True,
|
||||
'docker_custodian.args.datetime.datetime',
|
||||
autospec=True,
|
||||
) as mock_datetime:
|
||||
mock_datetime.now.return_value = now
|
||||
assert args.timedelta_type('5 days') == expected
|
||||
|
@ -45,29 +45,18 @@ def test_has_been_running_since_false(container, earlier_time):
|
||||
assert not has_been_running_since(container, earlier_time)
|
||||
|
||||
|
||||
@mock.patch('docker_custodian.docker_autostop.build_container_matcher',
|
||||
autospec=True)
|
||||
@mock.patch('docker_custodian.docker_autostop.stop_containers',
|
||||
autospec=True)
|
||||
@mock.patch('docker_custodian.docker_autostop.get_opts',
|
||||
autospec=True)
|
||||
@mock.patch('docker_custodian.docker_autostop.build_container_matcher', autospec=True)
|
||||
@mock.patch('docker_custodian.docker_autostop.stop_containers', autospec=True)
|
||||
@mock.patch('docker_custodian.docker_autostop.get_opts', autospec=True)
|
||||
@mock.patch('docker_custodian.docker_autostop.docker', autospec=True)
|
||||
def test_main(
|
||||
mock_docker,
|
||||
mock_get_opts,
|
||||
mock_stop_containers,
|
||||
mock_build_matcher
|
||||
):
|
||||
def test_main(mock_docker, mock_get_opts, mock_stop_containers, mock_build_matcher):
|
||||
mock_get_opts.return_value.timeout = 30
|
||||
main()
|
||||
mock_get_opts.assert_called_once_with()
|
||||
mock_build_matcher.assert_called_once_with(
|
||||
mock_get_opts.return_value.prefix)
|
||||
mock_stop_containers.assert_called_once_with(
|
||||
mock.ANY,
|
||||
mock_get_opts.return_value.max_run_time,
|
||||
mock_build_matcher.return_value,
|
||||
mock_get_opts.return_value.dry_run)
|
||||
mock_build_matcher.assert_called_once_with(mock_get_opts.return_value.prefix)
|
||||
mock_stop_containers.assert_called_once_with(mock.ANY, mock_get_opts.return_value.max_run_time,
|
||||
mock_build_matcher.return_value,
|
||||
mock_get_opts.return_value.dry_run)
|
||||
|
||||
|
||||
def test_get_opts_with_defaults():
|
||||
@ -79,10 +68,8 @@ def test_get_opts_with_defaults():
|
||||
|
||||
|
||||
def test_get_opts_with_args(now):
|
||||
with mock.patch(
|
||||
'docker_custodian.docker_autostop.timedelta_type',
|
||||
autospec=True
|
||||
) as mock_timedelta_type:
|
||||
with mock.patch('docker_custodian.docker_autostop.timedelta_type',
|
||||
autospec=True) as mock_timedelta_type:
|
||||
opts = get_opts(args=['--prefix', 'one', '--max-run-time', '24h'])
|
||||
assert opts.max_run_time == mock_timedelta_type.return_value
|
||||
mock_timedelta_type.assert_called_once_with('24h')
|
||||
|
@ -11,7 +11,6 @@ except ImportError:
|
||||
import mock
|
||||
|
||||
|
||||
|
||||
class TestShouldRemoveContainer(object):
|
||||
|
||||
def test_is_running(self, container, now):
|
||||
@ -43,8 +42,12 @@ class TestShouldRemoveContainer(object):
|
||||
def test_cleanup_containers(mock_client, now):
|
||||
max_container_age = now
|
||||
mock_client.containers.return_value = [
|
||||
{'Id': 'abcd'},
|
||||
{'Id': 'abbb'},
|
||||
{
|
||||
'Id': 'abcd'
|
||||
},
|
||||
{
|
||||
'Id': 'abbb'
|
||||
},
|
||||
]
|
||||
mock_containers = [
|
||||
{
|
||||
@ -66,17 +69,34 @@ def test_cleanup_containers(mock_client, now):
|
||||
]
|
||||
mock_client.inspect_container.side_effect = iter(mock_containers)
|
||||
docker_gc.cleanup_containers(mock_client, max_container_age, False, None)
|
||||
mock_client.remove_container.assert_called_once_with(container='abcd',
|
||||
v=True)
|
||||
mock_client.remove_container.assert_called_once_with(container='abcd', v=True)
|
||||
|
||||
|
||||
def test_filter_excluded_containers():
|
||||
mock_containers = [
|
||||
{'Labels': {'toot': ''}},
|
||||
{'Labels': {'too': 'lol'}},
|
||||
{'Labels': {'toots': 'lol'}},
|
||||
{'Labels': {'foo': 'bar'}},
|
||||
{'Labels': None},
|
||||
{
|
||||
'Labels': {
|
||||
'toot': ''
|
||||
}
|
||||
},
|
||||
{
|
||||
'Labels': {
|
||||
'too': 'lol'
|
||||
}
|
||||
},
|
||||
{
|
||||
'Labels': {
|
||||
'toots': 'lol'
|
||||
}
|
||||
},
|
||||
{
|
||||
'Labels': {
|
||||
'foo': 'bar'
|
||||
}
|
||||
},
|
||||
{
|
||||
'Labels': None
|
||||
},
|
||||
]
|
||||
result = docker_gc.filter_excluded_containers(mock_containers, None)
|
||||
assert mock_containers == list(result)
|
||||
@ -88,11 +108,7 @@ def test_filter_excluded_containers():
|
||||
mock_containers,
|
||||
exclude_labels,
|
||||
)
|
||||
assert [
|
||||
mock_containers[0],
|
||||
mock_containers[2],
|
||||
mock_containers[4]
|
||||
] == list(result)
|
||||
assert [mock_containers[0], mock_containers[2], mock_containers[4]] == list(result)
|
||||
exclude_labels = [
|
||||
docker_gc.ExcludeLabel(key='too*', value='lol'),
|
||||
]
|
||||
@ -100,18 +116,18 @@ def test_filter_excluded_containers():
|
||||
mock_containers,
|
||||
exclude_labels,
|
||||
)
|
||||
assert [
|
||||
mock_containers[0],
|
||||
mock_containers[3],
|
||||
mock_containers[4]
|
||||
] == list(result)
|
||||
assert [mock_containers[0], mock_containers[3], mock_containers[4]] == list(result)
|
||||
|
||||
|
||||
def test_cleanup_images(mock_client, now):
|
||||
max_image_age = now
|
||||
mock_client.images.return_value = images = [
|
||||
{'Id': 'abcd'},
|
||||
{'Id': 'abbb'},
|
||||
{
|
||||
'Id': 'abcd'
|
||||
},
|
||||
{
|
||||
'Id': 'abbb'
|
||||
},
|
||||
]
|
||||
mock_images = [
|
||||
{
|
||||
@ -152,8 +168,7 @@ def test_cleanup_volumes(mock_client):
|
||||
|
||||
docker_gc.cleanup_volumes(mock_client, False)
|
||||
assert mock_client.remove_volume.mock_calls == [
|
||||
mock.call(name=volume['Name'])
|
||||
for volume in reversed(volumes['Volumes'])
|
||||
mock.call(name=volume['Name']) for volume in reversed(volumes['Volumes'])
|
||||
]
|
||||
|
||||
|
||||
@ -205,35 +220,56 @@ def test_filter_images_in_use():
|
||||
def test_filter_images_in_use_by_id(mock_client, now):
|
||||
mock_client._version = '1.21'
|
||||
mock_client.containers.return_value = [
|
||||
{'Id': 'abcd', 'ImageID': '1'},
|
||||
{'Id': 'abbb', 'ImageID': '2'},
|
||||
]
|
||||
mock_containers = [
|
||||
{
|
||||
'Id': 'abcd',
|
||||
'Name': 'one',
|
||||
'State': {
|
||||
'Running': False,
|
||||
'FinishedAt': '2014-01-01T01:01:01Z'
|
||||
}
|
||||
'ImageID': '1'
|
||||
},
|
||||
{
|
||||
'Id': 'abbb',
|
||||
'Name': 'two',
|
||||
'State': {
|
||||
'Running': True,
|
||||
'FinishedAt': '2014-01-01T01:01:01Z'
|
||||
}
|
||||
}
|
||||
'ImageID': '2'
|
||||
},
|
||||
]
|
||||
mock_containers = [{
|
||||
'Id': 'abcd',
|
||||
'Name': 'one',
|
||||
'State': {
|
||||
'Running': False,
|
||||
'FinishedAt': '2014-01-01T01:01:01Z'
|
||||
}
|
||||
}, {
|
||||
'Id': 'abbb',
|
||||
'Name': 'two',
|
||||
'State': {
|
||||
'Running': True,
|
||||
'FinishedAt': '2014-01-01T01:01:01Z'
|
||||
}
|
||||
}]
|
||||
mock_client.inspect_container.side_effect = iter(mock_containers)
|
||||
mock_client.images.return_value = [
|
||||
{'Id': '1', 'Created': '2014-01-01T01:01:01Z'},
|
||||
{'Id': '2', 'Created': '2014-01-01T01:01:01Z'},
|
||||
{'Id': '3', 'Created': '2014-01-01T01:01:01Z'},
|
||||
{'Id': '4', 'Created': '2014-01-01T01:01:01Z'},
|
||||
{'Id': '5', 'Created': '2014-01-01T01:01:01Z'},
|
||||
{'Id': '6', 'Created': '2014-01-01T01:01:01Z'},
|
||||
{
|
||||
'Id': '1',
|
||||
'Created': '2014-01-01T01:01:01Z'
|
||||
},
|
||||
{
|
||||
'Id': '2',
|
||||
'Created': '2014-01-01T01:01:01Z'
|
||||
},
|
||||
{
|
||||
'Id': '3',
|
||||
'Created': '2014-01-01T01:01:01Z'
|
||||
},
|
||||
{
|
||||
'Id': '4',
|
||||
'Created': '2014-01-01T01:01:01Z'
|
||||
},
|
||||
{
|
||||
'Id': '5',
|
||||
'Created': '2014-01-01T01:01:01Z'
|
||||
},
|
||||
{
|
||||
'Id': '6',
|
||||
'Created': '2014-01-01T01:01:01Z'
|
||||
},
|
||||
]
|
||||
mock_client.inspect_image.side_effect = lambda image: {
|
||||
'Id': image,
|
||||
@ -252,34 +288,34 @@ def test_filter_excluded_images():
|
||||
'other:12345',
|
||||
])
|
||||
images = [
|
||||
{
|
||||
'RepoTags': ['<none>:<none>'],
|
||||
'Id': 'babababababaabababab'
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/one:latest', 'user/one:abcd']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['other:abcda']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['other:12345']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['new_image:latest', 'new_image:123']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['<none>:<none>'],
|
||||
'Id': 'babababababaabababab'
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/one:latest', 'user/one:abcd']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['other:abcda']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['other:12345']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['new_image:latest', 'new_image:123']
|
||||
},
|
||||
]
|
||||
expected = [
|
||||
{
|
||||
'RepoTags': ['<none>:<none>'],
|
||||
'Id': 'babababababaabababab'
|
||||
},
|
||||
{
|
||||
'RepoTags': ['other:abcda']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['new_image:latest', 'new_image:123']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['<none>:<none>'],
|
||||
'Id': 'babababababaabababab'
|
||||
},
|
||||
{
|
||||
'RepoTags': ['other:abcda']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['new_image:latest', 'new_image:123']
|
||||
},
|
||||
]
|
||||
actual = docker_gc.filter_excluded_images(images, exclude_set)
|
||||
assert list(actual) == expected
|
||||
@ -292,35 +328,34 @@ def test_filter_excluded_images_advanced():
|
||||
'user/repo-*:tag',
|
||||
])
|
||||
images = [
|
||||
{
|
||||
'RepoTags': ['<none>:<none>'],
|
||||
'Id': 'babababababaabababab'
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/one:latest', 'user/one:abcd']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/foo:test']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/foo:tag123']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/repo-1:tag']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/repo-2:tag']
|
||||
},
|
||||
|
||||
{
|
||||
'RepoTags': ['<none>:<none>'],
|
||||
'Id': 'babababababaabababab'
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/one:latest', 'user/one:abcd']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/foo:test']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/foo:tag123']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/repo-1:tag']
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/repo-2:tag']
|
||||
},
|
||||
]
|
||||
expected = [
|
||||
{
|
||||
'RepoTags': ['<none>:<none>'],
|
||||
'Id': 'babababababaabababab'
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/foo:test'],
|
||||
},
|
||||
{
|
||||
'RepoTags': ['<none>:<none>'],
|
||||
'Id': 'babababababaabababab'
|
||||
},
|
||||
{
|
||||
'RepoTags': ['user/foo:test'],
|
||||
},
|
||||
]
|
||||
actual = docker_gc.filter_excluded_images(images, exclude_set)
|
||||
assert list(actual) == expected
|
||||
@ -355,16 +390,11 @@ def test_remove_image_new_image_not_removed(mock_client, image, later_time):
|
||||
def test_remove_image_with_tags(mock_client, image, now):
|
||||
image_id = 'abcd'
|
||||
repo_tags = ['user/one:latest', 'user/one:12345']
|
||||
image_summary = {
|
||||
'Id': image_id,
|
||||
'RepoTags': repo_tags
|
||||
}
|
||||
image_summary = {'Id': image_id, 'RepoTags': repo_tags}
|
||||
mock_client.inspect_image.return_value = image
|
||||
docker_gc.remove_image(mock_client, image_summary, now, False)
|
||||
|
||||
assert mock_client.remove_image.mock_calls == [
|
||||
mock.call(image=tag) for tag in repo_tags
|
||||
]
|
||||
assert mock_client.remove_image.mock_calls == [mock.call(image=tag) for tag in repo_tags]
|
||||
|
||||
|
||||
def test_api_call_success():
|
||||
@ -376,40 +406,30 @@ def test_api_call_success():
|
||||
|
||||
|
||||
def test_api_call_with_timeout():
|
||||
func = mock.Mock(
|
||||
side_effect=requests.exceptions.ReadTimeout("msg"),
|
||||
__name__="remove_image")
|
||||
func = mock.Mock(side_effect=requests.exceptions.ReadTimeout("msg"), __name__="remove_image")
|
||||
image = "abcd"
|
||||
|
||||
with mock.patch(
|
||||
'docker_custodian.docker_gc.log',
|
||||
autospec=True) as mock_log:
|
||||
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||
docker_gc.api_call(func, image=image)
|
||||
|
||||
func.assert_called_once_with(image="abcd")
|
||||
mock_log.warn.assert_called_once_with('Failed to call remove_image '
|
||||
+ 'image=abcd msg'
|
||||
)
|
||||
mock_log.warn.assert_called_once_with('Failed to call remove_image ' + 'image=abcd msg')
|
||||
|
||||
|
||||
def test_api_call_with_api_error():
|
||||
func = mock.Mock(
|
||||
side_effect=docker.errors.APIError(
|
||||
"Ooops",
|
||||
mock.Mock(status_code=409, reason="Conflict"),
|
||||
explanation="failed"),
|
||||
__name__="remove_image")
|
||||
func = mock.Mock(side_effect=docker.errors.APIError("Ooops",
|
||||
mock.Mock(status_code=409,
|
||||
reason="Conflict"),
|
||||
explanation="failed"),
|
||||
__name__="remove_image")
|
||||
image = "abcd"
|
||||
|
||||
with mock.patch(
|
||||
'docker_custodian.docker_gc.log',
|
||||
autospec=True) as mock_log:
|
||||
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||
docker_gc.api_call(func, image=image)
|
||||
|
||||
func.assert_called_once_with(image="abcd")
|
||||
mock_log.warn.assert_called_once_with(
|
||||
'Error calling remove_image image=abcd '
|
||||
'409 Client Error: Conflict ("failed")')
|
||||
mock_log.warn.assert_called_once_with('Error calling remove_image image=abcd '
|
||||
'409 Client Error: Conflict ("failed")')
|
||||
|
||||
|
||||
def days_as_seconds(num):
|
||||
@ -425,13 +445,13 @@ def test_get_args_with_defaults():
|
||||
|
||||
|
||||
def test_get_args_with_args():
|
||||
with mock.patch(
|
||||
'docker_custodian.docker_gc.timedelta_type',
|
||||
autospec=True
|
||||
) as mock_timedelta_type:
|
||||
with mock.patch('docker_custodian.docker_gc.timedelta_type',
|
||||
autospec=True) as mock_timedelta_type:
|
||||
opts = docker_gc.get_args(args=[
|
||||
'--max-image-age', '30 days',
|
||||
'--max-container-age', '3d',
|
||||
'--max-image-age',
|
||||
'30 days',
|
||||
'--max-container-age',
|
||||
'3d',
|
||||
])
|
||||
assert mock_timedelta_type.mock_calls == [
|
||||
mock.call('30 days'),
|
||||
@ -444,8 +464,7 @@ def test_get_args_with_args():
|
||||
def test_get_all_containers(mock_client):
|
||||
count = 10
|
||||
mock_client.containers.return_value = [mock.Mock() for _ in range(count)]
|
||||
with mock.patch('docker_custodian.docker_gc.log',
|
||||
autospec=True) as mock_log:
|
||||
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||
containers = docker_gc.get_all_containers(mock_client)
|
||||
assert containers == mock_client.containers.return_value
|
||||
mock_client.containers.assert_called_once_with(all=True)
|
||||
@ -455,8 +474,7 @@ def test_get_all_containers(mock_client):
|
||||
def test_get_all_images(mock_client):
|
||||
count = 7
|
||||
mock_client.images.return_value = [mock.Mock() for _ in range(count)]
|
||||
with mock.patch('docker_custodian.docker_gc.log',
|
||||
autospec=True) as mock_log:
|
||||
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||
images = docker_gc.get_all_images(mock_client)
|
||||
assert images == mock_client.images.return_value
|
||||
mock_log.info.assert_called_with("Found %s images", count)
|
||||
@ -464,11 +482,8 @@ def test_get_all_images(mock_client):
|
||||
|
||||
def test_get_dangling_volumes(mock_client):
|
||||
count = 4
|
||||
mock_client.volumes.return_value = {
|
||||
'Volumes': [mock.Mock() for _ in range(count)]
|
||||
}
|
||||
with mock.patch('docker_custodian.docker_gc.log',
|
||||
autospec=True) as mock_log:
|
||||
mock_client.volumes.return_value = {'Volumes': [mock.Mock() for _ in range(count)]}
|
||||
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||
volumes = docker_gc.get_dangling_volumes(mock_client)
|
||||
assert volumes == mock_client.volumes.return_value['Volumes']
|
||||
mock_log.info.assert_called_with("Found %s dangling volumes", count)
|
||||
@ -480,7 +495,8 @@ def test_build_exclude_set():
|
||||
'repo/foo:12345',
|
||||
'duplicate:latest',
|
||||
]
|
||||
exclude_image_file = StringIO(textwrap.dedent("""
|
||||
exclude_image_file = StringIO(
|
||||
textwrap.dedent("""
|
||||
# Exclude this one because
|
||||
duplicate:latest
|
||||
# Also this one
|
||||
@ -516,13 +532,9 @@ def test_build_exclude_set_empty():
|
||||
|
||||
|
||||
def test_main(mock_client):
|
||||
with mock.patch(
|
||||
'docker_custodian.docker_gc.docker.APIClient',
|
||||
return_value=mock_client):
|
||||
with mock.patch('docker_custodian.docker_gc.docker.APIClient', return_value=mock_client):
|
||||
|
||||
with mock.patch(
|
||||
'docker_custodian.docker_gc.get_args',
|
||||
autospec=True) as mock_get_args:
|
||||
with mock.patch('docker_custodian.docker_gc.get_args', autospec=True) as mock_get_args:
|
||||
mock_get_args.return_value = mock.Mock(
|
||||
max_image_age=100,
|
||||
max_container_age=200,
|
||||
|
@ -10,9 +10,16 @@ default_section = THIRDPARTY
|
||||
known_first_party = dockertidy
|
||||
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||
force_single_line = true
|
||||
line_length = 110
|
||||
line_length = 99
|
||||
skip_glob = **/env/*,**/docs/*
|
||||
|
||||
[yapf]
|
||||
based_on_style = google
|
||||
column_limit = 99
|
||||
dedent_closing_brackets = true
|
||||
coalesce_brackets = true
|
||||
split_before_logical_operator = true
|
||||
|
||||
[tool:pytest]
|
||||
filterwarnings =
|
||||
ignore::FutureWarning
|
||||
|
4
setup.py
4
setup.py
@ -71,7 +71,7 @@ setup(
|
||||
"colorama==0.4.3",
|
||||
"docker==4.2.0",
|
||||
"docker-pycreds==0.4.0",
|
||||
"environs==7.2.0",
|
||||
"environs==7.3.0",
|
||||
"idna==2.9",
|
||||
"importlib-metadata==1.5.0; python_version < '3.8'",
|
||||
"ipaddress==1.0.23",
|
||||
@ -90,7 +90,7 @@ setup(
|
||||
"six==1.14.0",
|
||||
"urllib3==1.25.8",
|
||||
"websocket-client==0.57.0",
|
||||
"zipp==3.0.0",
|
||||
"zipp==3.1.0",
|
||||
],
|
||||
dependency_links=[],
|
||||
setup_requires=["setuptools_scm",],
|
||||
|
Loading…
Reference in New Issue
Block a user