mirror of
https://github.com/thegeeklab/docker-tidy.git
synced 2024-11-25 13:40:40 +00:00
refactor base structure
This commit is contained in:
parent
12b6e10737
commit
936e4b2188
4
.flake8
4
.flake8
@ -1,6 +1,6 @@
|
|||||||
[flake8]
|
[flake8]
|
||||||
ignore = D103
|
ignore = D103, W503
|
||||||
max-line-length = 110
|
max-line-length = 99
|
||||||
inline-quotes = double
|
inline-quotes = double
|
||||||
exclude =
|
exclude =
|
||||||
.git
|
.git
|
||||||
|
2
Pipfile
2
Pipfile
@ -21,6 +21,8 @@ pytest-mock = "*"
|
|||||||
pytest-cov = "*"
|
pytest-cov = "*"
|
||||||
bandit = "*"
|
bandit = "*"
|
||||||
docker-tidy = {editable = true,path = "."}
|
docker-tidy = {editable = true,path = "."}
|
||||||
|
autopep8 = "*"
|
||||||
|
yapf = "*"
|
||||||
|
|
||||||
[packages]
|
[packages]
|
||||||
importlib-metadata = {version = "*",markers = "python_version<'3.8'"}
|
importlib-metadata = {version = "*",markers = "python_version<'3.8'"}
|
||||||
|
41
Pipfile.lock
generated
41
Pipfile.lock
generated
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"_meta": {
|
"_meta": {
|
||||||
"hash": {
|
"hash": {
|
||||||
"sha256": "5435cb449b46e93e063eb55b6d7bd5d990e1c552d7648a35b4a5eef846914075"
|
"sha256": "afa3bac7184b4b165d029d7c1db785812064a0ee572d9617b4974f2a922db927"
|
||||||
},
|
},
|
||||||
"pipfile-spec": 6,
|
"pipfile-spec": 6,
|
||||||
"requires": {
|
"requires": {
|
||||||
@ -80,11 +80,11 @@
|
|||||||
},
|
},
|
||||||
"environs": {
|
"environs": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:2291ce502c9e61b8e208c8c9be4ac474e0f523c4dc23e0beb23118086e43b324",
|
"sha256:54099cfbdd9cb320f438bf29992969ccfd5e232ba068bd650b04d76d96001631",
|
||||||
"sha256:44700c562fb6f783640f90c2225d9a80d85d24833b4dd02d20b8ff1c83901e47"
|
"sha256:9578ce00ead984124a5336e5ea073707df303dc19d3b1e7ba34cdce1bb4fe02f"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==7.2.0"
|
"version": "==7.3.0"
|
||||||
},
|
},
|
||||||
"idna": {
|
"idna": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -242,10 +242,10 @@
|
|||||||
},
|
},
|
||||||
"zipp": {
|
"zipp": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2",
|
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
|
||||||
"sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"
|
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
|
||||||
],
|
],
|
||||||
"version": "==3.0.0"
|
"version": "==3.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"develop": {
|
"develop": {
|
||||||
@ -271,6 +271,13 @@
|
|||||||
],
|
],
|
||||||
"version": "==19.3.0"
|
"version": "==19.3.0"
|
||||||
},
|
},
|
||||||
|
"autopep8": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:0f592a0447acea0c2b0a9602be1e4e3d86db52badd2e3c84f0193bfd89fd3a43"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==1.5"
|
||||||
|
},
|
||||||
"bandit": {
|
"bandit": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952",
|
"sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952",
|
||||||
@ -402,11 +409,11 @@
|
|||||||
},
|
},
|
||||||
"environs": {
|
"environs": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:2291ce502c9e61b8e208c8c9be4ac474e0f523c4dc23e0beb23118086e43b324",
|
"sha256:54099cfbdd9cb320f438bf29992969ccfd5e232ba068bd650b04d76d96001631",
|
||||||
"sha256:44700c562fb6f783640f90c2225d9a80d85d24833b4dd02d20b8ff1c83901e47"
|
"sha256:9578ce00ead984124a5336e5ea073707df303dc19d3b1e7ba34cdce1bb4fe02f"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==7.2.0"
|
"version": "==7.3.0"
|
||||||
},
|
},
|
||||||
"first": {
|
"first": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -956,12 +963,20 @@
|
|||||||
],
|
],
|
||||||
"version": "==0.34.2"
|
"version": "==0.34.2"
|
||||||
},
|
},
|
||||||
|
"yapf": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:712e23c468506bf12cadd10169f852572ecc61b266258422d45aaf4ad7ef43de",
|
||||||
|
"sha256:cad8a272c6001b3401de3278238fdc54997b6c2e56baa751788915f879a52fca"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==0.29.0"
|
||||||
|
},
|
||||||
"zipp": {
|
"zipp": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2",
|
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
|
||||||
"sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"
|
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
|
||||||
],
|
],
|
||||||
"version": "==3.0.0"
|
"version": "==3.1.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
69
dockertidy/Autostop.py
Normal file
69
dockertidy/Autostop.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Stop long running docker iamges."""
|
||||||
|
|
||||||
|
import dateutil.parser
|
||||||
|
import docker
|
||||||
|
import docker.errors
|
||||||
|
import requests.exceptions
|
||||||
|
|
||||||
|
from dockertidy.Config import SingleConfig
|
||||||
|
from dockertidy.Logger import SingleLog
|
||||||
|
from dockertidy.Parser import timedelta
|
||||||
|
|
||||||
|
|
||||||
|
class AutoStop:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.config = SingleConfig()
|
||||||
|
self.log = SingleLog()
|
||||||
|
self.logger = SingleLog().logger
|
||||||
|
self.docker = self._get_docker_client()
|
||||||
|
|
||||||
|
def stop_containers(self):
|
||||||
|
client = self.docker
|
||||||
|
config = self.config.config
|
||||||
|
|
||||||
|
max_run_time = timedelta(config["stop"]["max_run_time"])
|
||||||
|
prefix = config["stop"]["prefix"]
|
||||||
|
dry_run = config["dry_run"]
|
||||||
|
|
||||||
|
matcher = self._build_container_matcher(prefix)
|
||||||
|
|
||||||
|
for container_summary in client.containers():
|
||||||
|
container = client.inspect_container(container_summary["Id"])
|
||||||
|
name = container["Name"].lstrip("/")
|
||||||
|
if (matcher(name) and self._has_been_running_since(container, max_run_time)):
|
||||||
|
|
||||||
|
self.logger.info(
|
||||||
|
"Stopping container %s %s: running since %s" %
|
||||||
|
(container["Id"][:16], name, container["State"]["StartedAt"])
|
||||||
|
)
|
||||||
|
|
||||||
|
if not dry_run:
|
||||||
|
self._stop_container(client, container["Id"])
|
||||||
|
|
||||||
|
def _stop_container(self, client, cid):
|
||||||
|
try:
|
||||||
|
client.stop(cid)
|
||||||
|
except requests.exceptions.Timeout as e:
|
||||||
|
self.logger.warn("Failed to stop container %s: %s" % (cid, e))
|
||||||
|
except docker.errors.APIError as ae:
|
||||||
|
self.logger.warn("Error stopping %s: %s" % (cid, ae))
|
||||||
|
|
||||||
|
def _build_container_matcher(self, prefixes):
|
||||||
|
|
||||||
|
def matcher(name):
|
||||||
|
return any(name.startswith(prefix) for prefix in prefixes)
|
||||||
|
|
||||||
|
return matcher
|
||||||
|
|
||||||
|
def _has_been_running_since(self, container, min_time):
|
||||||
|
started_at = container.get("State", {}).get("StartedAt")
|
||||||
|
if not started_at:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return dateutil.parser.parse(started_at) <= min_time
|
||||||
|
|
||||||
|
def _get_docker_client(self):
|
||||||
|
config = self.config.config
|
||||||
|
return docker.APIClient(version="auto", timeout=config["timeout"])
|
@ -2,18 +2,12 @@
|
|||||||
"""Entrypoint and CLI handler."""
|
"""Entrypoint and CLI handler."""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from importlib_metadata import PackageNotFoundError
|
|
||||||
from importlib_metadata import version
|
|
||||||
|
|
||||||
import dockertidy.Exception
|
import dockertidy.Exception
|
||||||
from dockertidy import __version__
|
from dockertidy import __version__
|
||||||
from dockertidy.Config import SingleConfig
|
from dockertidy.Config import SingleConfig
|
||||||
from dockertidy.Utils import SingleLog
|
from dockertidy.Logger import SingleLog
|
||||||
from dockertidy.Utils import timedelta_type
|
from dockertidy.Parser import timedelta_validator
|
||||||
|
|
||||||
|
|
||||||
class DockerTidy:
|
class DockerTidy:
|
||||||
@ -31,53 +25,97 @@ class DockerTidy:
|
|||||||
:return: args objec
|
:return: args objec
|
||||||
"""
|
"""
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate documentation from annotated Ansible roles using templates")
|
description="Generate documentation from annotated Ansible roles using templates"
|
||||||
parser.add_argument("-v", dest="logging.level", action="append_const", const=-1,
|
)
|
||||||
help="increase log level")
|
parser.add_argument(
|
||||||
parser.add_argument("-q", dest="logging.level", action="append_const",
|
"--dry-run",
|
||||||
const=1, help="decrease log level")
|
action="store_true",
|
||||||
parser.add_argument("--version", action="version",
|
dest="dry_run",
|
||||||
version="%(prog)s {}".format(__version__))
|
help="Only log actions, don't stop anything."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-t",
|
||||||
|
"--timeout",
|
||||||
|
type=int,
|
||||||
|
dest="http_timeout",
|
||||||
|
metavar="HTTP_TIMEOUT",
|
||||||
|
help="HTTP timeout in seconds for making docker API calls."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--version", action="version", version="%(prog)s {}".format(__version__)
|
||||||
|
)
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(help="sub-command help")
|
subparsers = parser.add_subparsers(help="sub-command help")
|
||||||
|
|
||||||
parser_gc = subparsers.add_parser(
|
parser_gc = subparsers.add_parser("gc", help="Run docker garbage collector.")
|
||||||
"gc", help="Run docker garbage collector.")
|
|
||||||
parser_gc.add_argument(
|
parser_gc.add_argument(
|
||||||
"--max-container-age",
|
"--max-container-age",
|
||||||
type=timedelta_type,
|
type=timedelta_validator,
|
||||||
|
dest="gc.max_container_age",
|
||||||
|
metavar="MAX_CONTAINER_AGE",
|
||||||
help="Maximum age for a container. Containers older than this age "
|
help="Maximum age for a container. Containers older than this age "
|
||||||
"will be removed. Age can be specified in any pytimeparse "
|
"will be removed. Age can be specified in any pytimeparse "
|
||||||
"supported format.")
|
"supported format."
|
||||||
|
)
|
||||||
parser_gc.add_argument(
|
parser_gc.add_argument(
|
||||||
"--max-image-age",
|
"--max-image-age",
|
||||||
type=timedelta_type,
|
type=timedelta_validator,
|
||||||
|
dest="gc.max_image_age",
|
||||||
|
metavar="MAX_IMAGE_AGE",
|
||||||
help="Maxium age for an image. Images older than this age will be "
|
help="Maxium age for an image. Images older than this age will be "
|
||||||
"removed. Age can be specified in any pytimeparse supported "
|
"removed. Age can be specified in any pytimeparse supported "
|
||||||
"format.")
|
"format."
|
||||||
|
)
|
||||||
parser_gc.add_argument(
|
parser_gc.add_argument(
|
||||||
"--dangling-volumes",
|
"--dangling-volumes",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Dangling volumes will be removed.")
|
dest="gc.dangling_volumes",
|
||||||
parser_gc.add_argument(
|
help="Dangling volumes will be removed."
|
||||||
"--dry-run", action="store_true",
|
)
|
||||||
help="Only log actions, don't remove anything.")
|
|
||||||
parser_gc.add_argument(
|
|
||||||
"-t", "--timeout", type=int, default=60,
|
|
||||||
help="HTTP timeout in seconds for making docker API calls.")
|
|
||||||
parser_gc.add_argument(
|
parser_gc.add_argument(
|
||||||
"--exclude-image",
|
"--exclude-image",
|
||||||
action="append",
|
action="append",
|
||||||
help="Never remove images with this tag.")
|
type=str,
|
||||||
parser_gc.add_argument(
|
dest="gc.exclude_image",
|
||||||
"--exclude-image-file",
|
metavar="EXCLUDE_IMAGE",
|
||||||
type=argparse.FileType("r"),
|
help="Never remove images with this tag."
|
||||||
help="Path to a file which contains a list of images to exclude, one "
|
)
|
||||||
"image tag per line.")
|
|
||||||
parser_gc.add_argument(
|
parser_gc.add_argument(
|
||||||
"--exclude-container-label",
|
"--exclude-container-label",
|
||||||
action="append", type=str, default=[],
|
action="append",
|
||||||
help="Never remove containers with this label key or label key=value")
|
type=str,
|
||||||
|
dest="gc.exclude_container_label",
|
||||||
|
metavar="EXCLUDE_CONTAINER_LABEL",
|
||||||
|
help="Never remove containers with this label key "
|
||||||
|
"or label key=value"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser_stop = subparsers.add_parser(
|
||||||
|
"stop", help="Stop containers that have been running for too long."
|
||||||
|
)
|
||||||
|
parser_stop.add_argument(
|
||||||
|
"--max-run-time",
|
||||||
|
type=timedelta_validator,
|
||||||
|
dest="stop.max_run_time",
|
||||||
|
metavar="MAX_RUN_TIME",
|
||||||
|
help="Maximum time a container is allows to run. Time may "
|
||||||
|
"be specified in any pytimeparse supported format."
|
||||||
|
)
|
||||||
|
parser_stop.add_argument(
|
||||||
|
"--prefix",
|
||||||
|
action="append",
|
||||||
|
type=str,
|
||||||
|
dest="stop.prefix",
|
||||||
|
metavar="PREFIX",
|
||||||
|
help="Only stop containers which match one of the "
|
||||||
|
"prefix."
|
||||||
|
)
|
||||||
|
|
||||||
return parser.parse_args().__dict__
|
return parser.parse_args().__dict__
|
||||||
|
|
||||||
@ -87,11 +125,12 @@ class DockerTidy:
|
|||||||
except dockertidy.Exception.ConfigError as e:
|
except dockertidy.Exception.ConfigError as e:
|
||||||
self.log.sysexit_with_message(e)
|
self.log.sysexit_with_message(e)
|
||||||
|
|
||||||
|
print(config.config)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.log.set_level(config.config["logging"]["level"])
|
self.log.set_level(config.config["logging"]["level"])
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
self.log.sysexit_with_message(
|
self.log.sysexit_with_message("Can not set log level.\n{}".format(str(e)))
|
||||||
"Can not set log level.\n{}".format(str(e)))
|
|
||||||
|
|
||||||
self.logger.info("Using config file {}".format(config.config_file))
|
self.logger.info("Using config file {}".format(config.config_file))
|
||||||
|
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""Global settings definition."""
|
"""Global settings definition."""
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
import anyconfig
|
import anyconfig
|
||||||
import environs
|
import environs
|
||||||
@ -11,9 +9,10 @@ import jsonschema.exceptions
|
|||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
from appdirs import AppDirs
|
from appdirs import AppDirs
|
||||||
from jsonschema._utils import format_as_index
|
from jsonschema._utils import format_as_index
|
||||||
from pkg_resources import resource_filename
|
|
||||||
|
|
||||||
import dockertidy.Exception
|
import dockertidy.Exception
|
||||||
|
import dockertidy.Parser
|
||||||
|
from dockertidy.Parser import env
|
||||||
from dockertidy.Utils import Singleton
|
from dockertidy.Utils import Singleton
|
||||||
|
|
||||||
config_dir = AppDirs("docker-tidy").user_config_dir
|
config_dir = AppDirs("docker-tidy").user_config_dir
|
||||||
@ -26,7 +25,8 @@ class Config():
|
|||||||
|
|
||||||
Settings are loade from multiple locations in defined order (last wins):
|
Settings are loade from multiple locations in defined order (last wins):
|
||||||
- default settings defined by `self._get_defaults()`
|
- default settings defined by `self._get_defaults()`
|
||||||
- yaml config file, defaults to OS specific user config dir (https://pypi.org/project/appdirs/)
|
- yaml config file, defaults to OS specific user config dir
|
||||||
|
see (https://pypi.org/project/appdirs/)
|
||||||
- provides cli parameters
|
- provides cli parameters
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -36,22 +36,18 @@ class Config():
|
|||||||
"env": "CONFIG_FILE",
|
"env": "CONFIG_FILE",
|
||||||
"type": environs.Env().str
|
"type": environs.Env().str
|
||||||
},
|
},
|
||||||
"role_dir": {
|
|
||||||
"default": "",
|
|
||||||
"env": "ROLE_DIR",
|
|
||||||
"type": environs.Env().str
|
|
||||||
},
|
|
||||||
"role_name": {
|
|
||||||
"default": "",
|
|
||||||
"env": "ROLE_NAME",
|
|
||||||
"type": environs.Env().str
|
|
||||||
},
|
|
||||||
"dry_run": {
|
"dry_run": {
|
||||||
"default": False,
|
"default": False,
|
||||||
"env": "DRY_RUN",
|
"env": "DRY_TUN",
|
||||||
"file": True,
|
"file": True,
|
||||||
"type": environs.Env().bool
|
"type": environs.Env().bool
|
||||||
},
|
},
|
||||||
|
"http_timeout": {
|
||||||
|
"default": 60,
|
||||||
|
"env": "HTTP_TIMEOUT",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().int
|
||||||
|
},
|
||||||
"logging.level": {
|
"logging.level": {
|
||||||
"default": "WARNING",
|
"default": "WARNING",
|
||||||
"env": "LOG_LEVEL",
|
"env": "LOG_LEVEL",
|
||||||
@ -64,73 +60,47 @@ class Config():
|
|||||||
"file": True,
|
"file": True,
|
||||||
"type": environs.Env().bool
|
"type": environs.Env().bool
|
||||||
},
|
},
|
||||||
"output_dir": {
|
"gc.max_container_age": {
|
||||||
"default": os.getcwd(),
|
"default": "1day",
|
||||||
"env": "OUTPUT_DIR",
|
"env": "GC_MAX_CONTAINER_AGE",
|
||||||
"file": True,
|
"file": True,
|
||||||
"type": environs.Env().str
|
"type": env.timedelta_validator
|
||||||
},
|
},
|
||||||
"template_dir": {
|
"gc.max_image_age": {
|
||||||
"default": os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates"),
|
"default": "1day",
|
||||||
"env": "TEMPLATE_DIR",
|
"env": "GC_MAX_IMAGE_AGE",
|
||||||
"file": True,
|
"file": True,
|
||||||
"type": environs.Env().str
|
"type": env.timedelta_validator
|
||||||
},
|
},
|
||||||
"template": {
|
"gc.dangling_volumes": {
|
||||||
"default": "readme",
|
|
||||||
"env": "TEMPLATE",
|
|
||||||
"file": True,
|
|
||||||
"type": environs.Env().str
|
|
||||||
},
|
|
||||||
"force_overwrite": {
|
|
||||||
"default": False,
|
"default": False,
|
||||||
"env": "FORCE_OVERWRITE",
|
"env": "GC_EXCLUDE_IMAGE",
|
||||||
"file": True,
|
"file": True,
|
||||||
"type": environs.Env().bool
|
"type": environs.Env().bool
|
||||||
},
|
},
|
||||||
"custom_header": {
|
"gc.exclude_image": {
|
||||||
"default": "",
|
|
||||||
"env": "CUSTOM_HEADER",
|
|
||||||
"file": True,
|
|
||||||
"type": environs.Env().str
|
|
||||||
},
|
|
||||||
"exclude_files": {
|
|
||||||
"default": [],
|
"default": [],
|
||||||
"env": "EXCLUDE_FILES",
|
"env": "GC_DANGLING_VOLUMES",
|
||||||
"file": True,
|
"file": True,
|
||||||
"type": environs.Env().list
|
"type": environs.Env().list
|
||||||
},
|
},
|
||||||
}
|
"gc.exclude_container_label": {
|
||||||
|
"default": [],
|
||||||
ANNOTATIONS = {
|
"env": "GC_EXCLUDE_CONTAINER_LABEL",
|
||||||
"meta": {
|
"file": True,
|
||||||
"name": "meta",
|
"type": environs.Env().list
|
||||||
"automatic": True,
|
|
||||||
"subtypes": []
|
|
||||||
},
|
},
|
||||||
"todo": {
|
"stop.max_run_time": {
|
||||||
"name": "todo",
|
"default": "3days",
|
||||||
"automatic": True,
|
"env": "STOP_MAX_RUN_TIME",
|
||||||
"subtypes": []
|
"file": True,
|
||||||
|
"type": env.timedelta_validator
|
||||||
},
|
},
|
||||||
"var": {
|
"stop.prefix": {
|
||||||
"name": "var",
|
"default": [],
|
||||||
"automatic": True,
|
"env": "STOP_PREFIX",
|
||||||
"subtypes": [
|
"file": True,
|
||||||
"value",
|
"type": environs.Env().list
|
||||||
"example",
|
|
||||||
"description"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"example": {
|
|
||||||
"name": "example",
|
|
||||||
"automatic": True,
|
|
||||||
"subtypes": []
|
|
||||||
},
|
|
||||||
"tag": {
|
|
||||||
"name": "tag",
|
|
||||||
"automatic": True,
|
|
||||||
"subtypes": []
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -146,10 +116,8 @@ class Config():
|
|||||||
self._args = args
|
self._args = args
|
||||||
self._schema = None
|
self._schema = None
|
||||||
self.config_file = default_config_file
|
self.config_file = default_config_file
|
||||||
self.role_dir = os.getcwd()
|
|
||||||
self.config = None
|
self.config = None
|
||||||
self._set_config()
|
self._set_config()
|
||||||
self.is_role = self._set_is_role() or False
|
|
||||||
|
|
||||||
def _get_args(self, args):
|
def _get_args(self, args):
|
||||||
cleaned = dict(filter(lambda item: item[1] is not None, args.items()))
|
cleaned = dict(filter(lambda item: item[1] is not None, args.items()))
|
||||||
@ -173,9 +141,6 @@ class Config():
|
|||||||
for key, item in self.SETTINGS.items():
|
for key, item in self.SETTINGS.items():
|
||||||
normalized = self._add_dict_branch(normalized, key.split("."), item["default"])
|
normalized = self._add_dict_branch(normalized, key.split("."), item["default"])
|
||||||
|
|
||||||
# compute role_name default
|
|
||||||
normalized["role_name"] = os.path.basename(self.role_dir)
|
|
||||||
|
|
||||||
self.schema = anyconfig.gen_schema(normalized)
|
self.schema = anyconfig.gen_schema(normalized)
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
@ -183,7 +148,7 @@ class Config():
|
|||||||
normalized = {}
|
normalized = {}
|
||||||
for key, item in self.SETTINGS.items():
|
for key, item in self.SETTINGS.items():
|
||||||
if item.get("env"):
|
if item.get("env"):
|
||||||
prefix = "ANSIBLE_DOCTOR_"
|
prefix = "TIDY_"
|
||||||
envname = prefix + item["env"]
|
envname = prefix + item["env"]
|
||||||
try:
|
try:
|
||||||
value = item["type"](envname)
|
value = item["type"](envname)
|
||||||
@ -192,7 +157,9 @@ class Config():
|
|||||||
if '"{}" not set'.format(envname) in str(e):
|
if '"{}" not set'.format(envname) in str(e):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
raise dockertidy.Exception.ConfigError("Unable to read environment variable", str(e))
|
raise dockertidy.Exception.ConfigError(
|
||||||
|
"Unable to read environment variable", str(e)
|
||||||
|
)
|
||||||
|
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
@ -204,13 +171,9 @@ class Config():
|
|||||||
# preset config file path
|
# preset config file path
|
||||||
if envs.get("config_file"):
|
if envs.get("config_file"):
|
||||||
self.config_file = self._normalize_path(envs.get("config_file"))
|
self.config_file = self._normalize_path(envs.get("config_file"))
|
||||||
if envs.get("role_dir"):
|
|
||||||
self.role_dir = self._normalize_path(envs.get("role_dir"))
|
|
||||||
|
|
||||||
if args.get("config_file"):
|
if args.get("config_file"):
|
||||||
self.config_file = self._normalize_path(args.get("config_file"))
|
self.config_file = self._normalize_path(args.get("config_file"))
|
||||||
if args.get("role_dir"):
|
|
||||||
self.role_dir = self._normalize_path(args.get("role_dir"))
|
|
||||||
|
|
||||||
source_files = []
|
source_files = []
|
||||||
source_files.append(self.config_file)
|
source_files.append(self.config_file)
|
||||||
@ -224,7 +187,9 @@ class Config():
|
|||||||
s = stream.read()
|
s = stream.read()
|
||||||
try:
|
try:
|
||||||
file_dict = ruamel.yaml.safe_load(s)
|
file_dict = ruamel.yaml.safe_load(s)
|
||||||
except (ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError) as e:
|
except (
|
||||||
|
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
|
||||||
|
) as e:
|
||||||
message = "{} {}".format(e.context, e.problem)
|
message = "{} {}".format(e.context, e.problem)
|
||||||
raise dockertidy.Exception.ConfigError(
|
raise dockertidy.Exception.ConfigError(
|
||||||
"Unable to read config file {}".format(config), message
|
"Unable to read config file {}".format(config), message
|
||||||
@ -240,15 +205,8 @@ class Config():
|
|||||||
if self._validate(args):
|
if self._validate(args):
|
||||||
anyconfig.merge(defaults, args, ac_merge=anyconfig.MS_DICTS)
|
anyconfig.merge(defaults, args, ac_merge=anyconfig.MS_DICTS)
|
||||||
|
|
||||||
fix_files = ["output_dir", "template_dir", "custom_header"]
|
|
||||||
for file in fix_files:
|
|
||||||
if defaults[file] and defaults[file] != "":
|
|
||||||
defaults[file] = self._normalize_path(defaults[file])
|
|
||||||
|
|
||||||
if "config_file" in defaults:
|
if "config_file" in defaults:
|
||||||
defaults.pop("config_file")
|
defaults.pop("config_file")
|
||||||
if "role_dir" in defaults:
|
|
||||||
defaults.pop("role_dir")
|
|
||||||
|
|
||||||
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
|
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
|
||||||
|
|
||||||
@ -261,10 +219,6 @@ class Config():
|
|||||||
else:
|
else:
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def _set_is_role(self):
|
|
||||||
if os.path.isdir(os.path.join(self.role_dir, "tasks")):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _validate(self, config):
|
def _validate(self, config):
|
||||||
try:
|
try:
|
||||||
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
||||||
@ -285,32 +239,6 @@ class Config():
|
|||||||
else self._add_dict_branch(tree[key] if key in tree else {}, vector[1:], value)
|
else self._add_dict_branch(tree[key] if key in tree else {}, vector[1:], value)
|
||||||
return tree
|
return tree
|
||||||
|
|
||||||
def get_annotations_definition(self, automatic=True):
|
|
||||||
annotations = {}
|
|
||||||
if automatic:
|
|
||||||
for k, item in self.ANNOTATIONS.items():
|
|
||||||
if "automatic" in item.keys() and item["automatic"]:
|
|
||||||
annotations[k] = item
|
|
||||||
return annotations
|
|
||||||
|
|
||||||
def get_annotations_names(self, automatic=True):
|
|
||||||
annotations = []
|
|
||||||
if automatic:
|
|
||||||
for k, item in self.ANNOTATIONS.items():
|
|
||||||
if "automatic" in item.keys() and item["automatic"]:
|
|
||||||
annotations.append(k)
|
|
||||||
return annotations
|
|
||||||
|
|
||||||
def get_template(self):
|
|
||||||
"""
|
|
||||||
Get the base dir for the template to use.
|
|
||||||
|
|
||||||
:return: str abs path
|
|
||||||
"""
|
|
||||||
template_dir = self.config.get("template_dir")
|
|
||||||
template = self.config.get("template")
|
|
||||||
return os.path.realpath(os.path.join(template_dir, template))
|
|
||||||
|
|
||||||
|
|
||||||
class SingleConfig(Config, metaclass=Singleton):
|
class SingleConfig(Config, metaclass=Singleton):
|
||||||
pass
|
pass
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""Remove unused docker containers and images."""
|
"""Remove unused docker containers and images."""
|
||||||
|
|
||||||
import argparse
|
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
@ -12,10 +11,9 @@ import docker
|
|||||||
import docker.errors
|
import docker.errors
|
||||||
import requests.exceptions
|
import requests.exceptions
|
||||||
from docker.utils import kwargs_from_env
|
from docker.utils import kwargs_from_env
|
||||||
from docker_custodian.args import timedelta_type
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
from dockertidy.Config import SingleConfig
|
||||||
|
from dockertidy.Logger import SingleLog
|
||||||
|
|
||||||
# This seems to be something docker uses for a null/zero date
|
# This seems to be something docker uses for a null/zero date
|
||||||
YEAR_ZERO = "0001-01-01T00:00:00Z"
|
YEAR_ZERO = "0001-01-01T00:00:00Z"
|
||||||
@ -23,12 +21,15 @@ YEAR_ZERO = "0001-01-01T00:00:00Z"
|
|||||||
ExcludeLabel = namedtuple("ExcludeLabel", ["key", "value"])
|
ExcludeLabel = namedtuple("ExcludeLabel", ["key", "value"])
|
||||||
|
|
||||||
|
|
||||||
def cleanup_containers(
|
class GarbageCollector:
|
||||||
client,
|
|
||||||
max_container_age,
|
def __init__(self):
|
||||||
dry_run,
|
self.config = SingleConfig()
|
||||||
exclude_container_labels,
|
self.log = SingleLog()
|
||||||
):
|
self.logger = SingleLog().logger
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_containers(client, max_container_age, dry_run, exclude_container_labels):
|
||||||
all_containers = get_all_containers(client)
|
all_containers = get_all_containers(client)
|
||||||
filtered_containers = filter_excluded_containers(
|
filtered_containers = filter_excluded_containers(
|
||||||
all_containers,
|
all_containers,
|
||||||
@ -40,15 +41,13 @@ def cleanup_containers(
|
|||||||
container=container_summary["Id"],
|
container=container_summary["Id"],
|
||||||
)
|
)
|
||||||
if not container or not should_remove_container(
|
if not container or not should_remove_container(
|
||||||
container,
|
container,
|
||||||
max_container_age,
|
max_container_age,
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
log.info("Removing container %s %s %s" % (
|
log.info("Removing container %s %s %s" % (container["Id"][:16], container.get(
|
||||||
container["Id"][:16],
|
"Name", "").lstrip("/"), container["State"]["FinishedAt"]))
|
||||||
container.get("Name", "").lstrip("/"),
|
|
||||||
container["State"]["FinishedAt"]))
|
|
||||||
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
api_call(
|
api_call(
|
||||||
@ -64,11 +63,12 @@ def filter_excluded_containers(containers, exclude_container_labels):
|
|||||||
|
|
||||||
def include_container(container):
|
def include_container(container):
|
||||||
if should_exclude_container_with_labels(
|
if should_exclude_container_with_labels(
|
||||||
container,
|
container,
|
||||||
exclude_container_labels,
|
exclude_container_labels,
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return filter(include_container, containers)
|
return filter(include_container, containers)
|
||||||
|
|
||||||
|
|
||||||
@ -81,16 +81,12 @@ def should_exclude_container_with_labels(container, exclude_container_labels):
|
|||||||
exclude_label.key,
|
exclude_label.key,
|
||||||
)
|
)
|
||||||
label_values_to_check = [
|
label_values_to_check = [
|
||||||
container["Labels"][matching_key]
|
container["Labels"][matching_key] for matching_key in matching_keys
|
||||||
for matching_key in matching_keys
|
|
||||||
]
|
]
|
||||||
if fnmatch.filter(label_values_to_check, exclude_label.value):
|
if fnmatch.filter(label_values_to_check, exclude_label.value):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
if fnmatch.filter(
|
if fnmatch.filter(container["Labels"].keys(), exclude_label.key):
|
||||||
container["Labels"].keys(),
|
|
||||||
exclude_label.key
|
|
||||||
):
|
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -153,6 +149,7 @@ def cleanup_images(client, max_image_age, dry_run, exclude_set):
|
|||||||
|
|
||||||
|
|
||||||
def filter_excluded_images(images, exclude_set):
|
def filter_excluded_images(images, exclude_set):
|
||||||
|
|
||||||
def include_image(image_summary):
|
def include_image(image_summary):
|
||||||
image_tags = image_summary.get("RepoTags")
|
image_tags = image_summary.get("RepoTags")
|
||||||
if no_image_tags(image_tags):
|
if no_image_tags(image_tags):
|
||||||
@ -166,6 +163,7 @@ def filter_excluded_images(images, exclude_set):
|
|||||||
|
|
||||||
|
|
||||||
def filter_images_in_use(images, image_tags_in_use):
|
def filter_images_in_use(images, image_tags_in_use):
|
||||||
|
|
||||||
def get_tag_set(image_summary):
|
def get_tag_set(image_summary):
|
||||||
image_tags = image_summary.get("RepoTags")
|
image_tags = image_summary.get("RepoTags")
|
||||||
if no_image_tags(image_tags):
|
if no_image_tags(image_tags):
|
||||||
@ -180,6 +178,7 @@ def filter_images_in_use(images, image_tags_in_use):
|
|||||||
|
|
||||||
|
|
||||||
def filter_images_in_use_by_id(images, image_ids_in_use):
|
def filter_images_in_use_by_id(images, image_ids_in_use):
|
||||||
|
|
||||||
def image_not_in_use(image_summary):
|
def image_not_in_use(image_summary):
|
||||||
return image_summary["Id"] not in image_ids_in_use
|
return image_summary["Id"] not in image_ids_in_use
|
||||||
|
|
||||||
@ -245,6 +244,7 @@ def api_call(func, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
def format_image(image, image_summary):
|
def format_image(image, image_summary):
|
||||||
|
|
||||||
def get_tags():
|
def get_tags():
|
||||||
tags = image_summary.get("RepoTags")
|
tags = image_summary.get("RepoTags")
|
||||||
if not tags or tags == ["<none>:<none>"]:
|
if not tags or tags == ["<none>:<none>"]:
|
||||||
@ -275,29 +275,20 @@ def format_exclude_labels(exclude_label_args):
|
|||||||
exclude_label_value = split_exclude_label[1]
|
exclude_label_value = split_exclude_label[1]
|
||||||
else:
|
else:
|
||||||
exclude_label_value = None
|
exclude_label_value = None
|
||||||
exclude_labels.append(
|
exclude_labels.append(ExcludeLabel(
|
||||||
ExcludeLabel(
|
key=exclude_label_key,
|
||||||
key=exclude_label_key,
|
value=exclude_label_value,
|
||||||
value=exclude_label_value,
|
))
|
||||||
)
|
|
||||||
)
|
|
||||||
return exclude_labels
|
return exclude_labels
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
logging.basicConfig(
|
logging.basicConfig(level=logging.INFO, format="%(message)s", stream=sys.stdout)
|
||||||
level=logging.INFO,
|
|
||||||
format="%(message)s",
|
|
||||||
stream=sys.stdout)
|
|
||||||
|
|
||||||
args = get_args()
|
args = get_args()
|
||||||
client = docker.APIClient(version="auto",
|
client = docker.APIClient(version="auto", timeout=args.timeout, **kwargs_from_env())
|
||||||
timeout=args.timeout,
|
|
||||||
**kwargs_from_env())
|
|
||||||
|
|
||||||
exclude_container_labels = format_exclude_labels(
|
exclude_container_labels = format_exclude_labels(args.exclude_container_label)
|
||||||
args.exclude_container_label
|
|
||||||
)
|
|
||||||
|
|
||||||
if args.max_container_age:
|
if args.max_container_age:
|
||||||
cleanup_containers(
|
cleanup_containers(
|
||||||
@ -308,55 +299,8 @@ def main():
|
|||||||
)
|
)
|
||||||
|
|
||||||
if args.max_image_age:
|
if args.max_image_age:
|
||||||
exclude_set = build_exclude_set(
|
exclude_set = build_exclude_set(args.exclude_image, args.exclude_image_file)
|
||||||
args.exclude_image,
|
|
||||||
args.exclude_image_file)
|
|
||||||
cleanup_images(client, args.max_image_age, args.dry_run, exclude_set)
|
cleanup_images(client, args.max_image_age, args.dry_run, exclude_set)
|
||||||
|
|
||||||
if args.dangling_volumes:
|
if args.dangling_volumes:
|
||||||
cleanup_volumes(client, args.dry_run)
|
cleanup_volumes(client, args.dry_run)
|
||||||
|
|
||||||
|
|
||||||
def get_args(args=None):
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument(
|
|
||||||
"--max-container-age",
|
|
||||||
type=timedelta_type,
|
|
||||||
help="Maximum age for a container. Containers older than this age "
|
|
||||||
"will be removed. Age can be specified in any pytimeparse "
|
|
||||||
"supported format.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--max-image-age",
|
|
||||||
type=timedelta_type,
|
|
||||||
help="Maxium age for an image. Images older than this age will be "
|
|
||||||
"removed. Age can be specified in any pytimeparse supported "
|
|
||||||
"format.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--dangling-volumes",
|
|
||||||
action="store_true",
|
|
||||||
help="Dangling volumes will be removed.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--dry-run", action="store_true",
|
|
||||||
help="Only log actions, don't remove anything.")
|
|
||||||
parser.add_argument(
|
|
||||||
"-t", "--timeout", type=int, default=60,
|
|
||||||
help="HTTP timeout in seconds for making docker API calls.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--exclude-image",
|
|
||||||
action="append",
|
|
||||||
help="Never remove images with this tag.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--exclude-image-file",
|
|
||||||
type=argparse.FileType("r"),
|
|
||||||
help="Path to a file which contains a list of images to exclude, one "
|
|
||||||
"image tag per line.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--exclude-container-label",
|
|
||||||
action="append", type=str, default=[],
|
|
||||||
help="Never remove containers with this label key or label key=value")
|
|
||||||
|
|
||||||
return parser.parse_args(args=args)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
184
dockertidy/Logger.py
Normal file
184
dockertidy/Logger.py
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Global utility methods and classes."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import colorama
|
||||||
|
from pythonjsonlogger import jsonlogger
|
||||||
|
|
||||||
|
from dockertidy.Utils import Singleton
|
||||||
|
from dockertidy.Utils import to_bool
|
||||||
|
|
||||||
|
CONSOLE_FORMAT = "{}[%(levelname)s]{} %(message)s"
|
||||||
|
JSON_FORMAT = "(asctime) (levelname) (message)"
|
||||||
|
|
||||||
|
|
||||||
|
def _should_do_markup():
|
||||||
|
py_colors = os.environ.get("PY_COLORS", None)
|
||||||
|
if py_colors is not None:
|
||||||
|
return to_bool(py_colors)
|
||||||
|
|
||||||
|
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
|
||||||
|
|
||||||
|
|
||||||
|
colorama.init(autoreset=True, strip=not _should_do_markup())
|
||||||
|
|
||||||
|
|
||||||
|
class LogFilter(object):
|
||||||
|
"""A custom log filter which excludes log messages above the logged level."""
|
||||||
|
|
||||||
|
def __init__(self, level):
|
||||||
|
"""
|
||||||
|
Initialize a new custom log filter.
|
||||||
|
|
||||||
|
:param level: Log level limit
|
||||||
|
:returns: None
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.__level = level
|
||||||
|
|
||||||
|
def filter(self, logRecord): # noqa
|
||||||
|
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||||
|
return logRecord.levelno <= self.__level
|
||||||
|
|
||||||
|
|
||||||
|
class MultilineFormatter(logging.Formatter):
|
||||||
|
"""Logging Formatter to reset color after newline characters."""
|
||||||
|
|
||||||
|
def format(self, record): # noqa
|
||||||
|
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
|
||||||
|
return logging.Formatter.format(self, record)
|
||||||
|
|
||||||
|
|
||||||
|
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
|
||||||
|
"""Logging Formatter to remove newline characters."""
|
||||||
|
|
||||||
|
def format(self, record): # noqa
|
||||||
|
record.msg = record.msg.replace("\n", " ")
|
||||||
|
return jsonlogger.JsonFormatter.format(self, record)
|
||||||
|
|
||||||
|
|
||||||
|
class Log:
|
||||||
|
|
||||||
|
def __init__(self, level=logging.WARN, name="dockertidy", json=False):
|
||||||
|
self.logger = logging.getLogger(name)
|
||||||
|
self.logger.setLevel(level)
|
||||||
|
self.logger.addHandler(self._get_error_handler(json=json))
|
||||||
|
self.logger.addHandler(self._get_warn_handler(json=json))
|
||||||
|
self.logger.addHandler(self._get_info_handler(json=json))
|
||||||
|
self.logger.addHandler(self._get_critical_handler(json=json))
|
||||||
|
self.logger.addHandler(self._get_debug_handler(json=json))
|
||||||
|
self.logger.propagate = False
|
||||||
|
|
||||||
|
def _get_error_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
|
handler.setLevel(logging.ERROR)
|
||||||
|
handler.addFilter(LogFilter(logging.ERROR))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.error(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def _get_warn_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setLevel(logging.WARN)
|
||||||
|
handler.addFilter(LogFilter(logging.WARN))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.warn(CONSOLE_FORMAT.format(colorama.Fore.YELLOW, colorama.Style.RESET_ALL))))
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def _get_info_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setLevel(logging.INFO)
|
||||||
|
handler.addFilter(LogFilter(logging.INFO))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.info(CONSOLE_FORMAT.format(colorama.Fore.CYAN, colorama.Style.RESET_ALL))))
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def _get_critical_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
|
handler.setLevel(logging.CRITICAL)
|
||||||
|
handler.addFilter(LogFilter(logging.CRITICAL))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.critical(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def _get_debug_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
|
handler.setLevel(logging.DEBUG)
|
||||||
|
handler.addFilter(LogFilter(logging.DEBUG))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.critical(CONSOLE_FORMAT.format(colorama.Fore.BLUE,
|
||||||
|
colorama.Style.RESET_ALL))))
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def set_level(self, s):
|
||||||
|
self.logger.setLevel(s)
|
||||||
|
|
||||||
|
def debug(self, msg):
|
||||||
|
"""Format info messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def critical(self, msg):
|
||||||
|
"""Format critical messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def error(self, msg):
|
||||||
|
"""Format error messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def warn(self, msg):
|
||||||
|
"""Format warn messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def info(self, msg):
|
||||||
|
"""Format info messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def _color_text(self, color, msg):
|
||||||
|
"""
|
||||||
|
Colorize strings.
|
||||||
|
|
||||||
|
:param color: colorama color settings
|
||||||
|
:param msg: string to colorize
|
||||||
|
:returns: string
|
||||||
|
|
||||||
|
"""
|
||||||
|
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
|
||||||
|
|
||||||
|
def sysexit(self, code=1):
|
||||||
|
sys.exit(code)
|
||||||
|
|
||||||
|
def sysexit_with_message(self, msg, code=1):
|
||||||
|
self.logger.critical(str(msg))
|
||||||
|
self.sysexit(code)
|
||||||
|
|
||||||
|
|
||||||
|
class SingleLog(Log, metaclass=Singleton):
|
||||||
|
pass
|
51
dockertidy/Parser.py
Normal file
51
dockertidy/Parser.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Custom input type parser."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
import environs
|
||||||
|
from dateutil import tz
|
||||||
|
from pytimeparse import timeparse
|
||||||
|
|
||||||
|
env = environs.Env()
|
||||||
|
|
||||||
|
|
||||||
|
def timedelta_validator(value):
|
||||||
|
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
|
||||||
|
|
||||||
|
:param value: a string containing a time format supported by
|
||||||
|
mod:`pytimeparse`
|
||||||
|
"""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
_datetime_seconds_ago(timeparse.timeparse(value))
|
||||||
|
return value
|
||||||
|
except TypeError:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def timedelta(value):
|
||||||
|
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
|
||||||
|
|
||||||
|
:param value: a string containing a time format supported by
|
||||||
|
mod:`pytimeparse`
|
||||||
|
"""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
return _datetime_seconds_ago(timeparse.timeparse(value))
|
||||||
|
|
||||||
|
|
||||||
|
def _datetime_seconds_ago(seconds):
|
||||||
|
now = datetime.datetime.now(tz.tzutc())
|
||||||
|
return now - datetime.timedelta(seconds=seconds)
|
||||||
|
|
||||||
|
|
||||||
|
@env.parser_for("timedelta_validator")
|
||||||
|
def timedelta_parser(value):
|
||||||
|
try:
|
||||||
|
timedelta_validator(value)
|
||||||
|
return value
|
||||||
|
except TypeError as e:
|
||||||
|
raise environs.EnvError(e)
|
@ -1,55 +1,13 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""Global utility methods and classes."""
|
"""Global utility methods and classes."""
|
||||||
|
|
||||||
import datetime
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import pprint
|
|
||||||
import sys
|
|
||||||
from distutils.util import strtobool
|
from distutils.util import strtobool
|
||||||
|
|
||||||
import colorama
|
|
||||||
from dateutil import tz
|
|
||||||
from pythonjsonlogger import jsonlogger
|
|
||||||
from pytimeparse import timeparse
|
|
||||||
|
|
||||||
import dockertidy.Exception
|
|
||||||
|
|
||||||
CONSOLE_FORMAT = "{}[%(levelname)s]{} %(message)s"
|
|
||||||
JSON_FORMAT = "(asctime) (levelname) (message)"
|
|
||||||
|
|
||||||
|
|
||||||
def to_bool(string):
|
def to_bool(string):
|
||||||
return bool(strtobool(str(string)))
|
return bool(strtobool(str(string)))
|
||||||
|
|
||||||
|
|
||||||
def timedelta_type(value):
|
|
||||||
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
|
|
||||||
|
|
||||||
:param value: a string containing a time format supported by
|
|
||||||
mod:`pytimeparse`
|
|
||||||
"""
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
return _datetime_seconds_ago(timeparse.timeparse(value))
|
|
||||||
|
|
||||||
|
|
||||||
def _datetime_seconds_ago(seconds):
|
|
||||||
now = datetime.datetime.now(tz.tzutc())
|
|
||||||
return now - datetime.timedelta(seconds=seconds)
|
|
||||||
|
|
||||||
|
|
||||||
def _should_do_markup():
|
|
||||||
py_colors = os.environ.get("PY_COLORS", None)
|
|
||||||
if py_colors is not None:
|
|
||||||
return to_bool(py_colors)
|
|
||||||
|
|
||||||
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
|
|
||||||
|
|
||||||
|
|
||||||
colorama.init(autoreset=True, strip=not _should_do_markup())
|
|
||||||
|
|
||||||
|
|
||||||
class Singleton(type):
|
class Singleton(type):
|
||||||
_instances = {}
|
_instances = {}
|
||||||
|
|
||||||
@ -57,183 +15,3 @@ class Singleton(type):
|
|||||||
if cls not in cls._instances:
|
if cls not in cls._instances:
|
||||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||||
return cls._instances[cls]
|
return cls._instances[cls]
|
||||||
|
|
||||||
|
|
||||||
class LogFilter(object):
|
|
||||||
"""A custom log filter which excludes log messages above the logged level."""
|
|
||||||
|
|
||||||
def __init__(self, level):
|
|
||||||
"""
|
|
||||||
Initialize a new custom log filter.
|
|
||||||
|
|
||||||
:param level: Log level limit
|
|
||||||
:returns: None
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.__level = level
|
|
||||||
|
|
||||||
def filter(self, logRecord): # noqa
|
|
||||||
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
|
||||||
return logRecord.levelno <= self.__level
|
|
||||||
|
|
||||||
|
|
||||||
class MultilineFormatter(logging.Formatter):
|
|
||||||
"""Logging Formatter to reset color after newline characters."""
|
|
||||||
|
|
||||||
def format(self, record): # noqa
|
|
||||||
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
|
|
||||||
return logging.Formatter.format(self, record)
|
|
||||||
|
|
||||||
|
|
||||||
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
|
|
||||||
"""Logging Formatter to remove newline characters."""
|
|
||||||
|
|
||||||
def format(self, record): # noqa
|
|
||||||
record.msg = record.msg.replace("\n", " ")
|
|
||||||
return jsonlogger.JsonFormatter.format(self, record)
|
|
||||||
|
|
||||||
|
|
||||||
class Log:
|
|
||||||
def __init__(self, level=logging.WARN, name="dockertidy", json=False):
|
|
||||||
self.logger = logging.getLogger(name)
|
|
||||||
self.logger.setLevel(level)
|
|
||||||
self.logger.addHandler(self._get_error_handler(json=json))
|
|
||||||
self.logger.addHandler(self._get_warn_handler(json=json))
|
|
||||||
self.logger.addHandler(self._get_info_handler(json=json))
|
|
||||||
self.logger.addHandler(self._get_critical_handler(json=json))
|
|
||||||
self.logger.addHandler(self._get_debug_handler(json=json))
|
|
||||||
self.logger.propagate = False
|
|
||||||
|
|
||||||
def _get_error_handler(self, json=False):
|
|
||||||
handler = logging.StreamHandler(sys.stderr)
|
|
||||||
handler.setLevel(logging.ERROR)
|
|
||||||
handler.addFilter(LogFilter(logging.ERROR))
|
|
||||||
handler.setFormatter(MultilineFormatter(
|
|
||||||
self.error(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
|
||||||
|
|
||||||
if json:
|
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
|
||||||
|
|
||||||
return handler
|
|
||||||
|
|
||||||
def _get_warn_handler(self, json=False):
|
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
|
||||||
handler.setLevel(logging.WARN)
|
|
||||||
handler.addFilter(LogFilter(logging.WARN))
|
|
||||||
handler.setFormatter(MultilineFormatter(
|
|
||||||
self.warn(CONSOLE_FORMAT.format(colorama.Fore.YELLOW, colorama.Style.RESET_ALL))))
|
|
||||||
|
|
||||||
if json:
|
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
|
||||||
|
|
||||||
return handler
|
|
||||||
|
|
||||||
def _get_info_handler(self, json=False):
|
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
|
||||||
handler.setLevel(logging.INFO)
|
|
||||||
handler.addFilter(LogFilter(logging.INFO))
|
|
||||||
handler.setFormatter(MultilineFormatter(
|
|
||||||
self.info(CONSOLE_FORMAT.format(colorama.Fore.CYAN, colorama.Style.RESET_ALL))))
|
|
||||||
|
|
||||||
if json:
|
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
|
||||||
|
|
||||||
return handler
|
|
||||||
|
|
||||||
def _get_critical_handler(self, json=False):
|
|
||||||
handler = logging.StreamHandler(sys.stderr)
|
|
||||||
handler.setLevel(logging.CRITICAL)
|
|
||||||
handler.addFilter(LogFilter(logging.CRITICAL))
|
|
||||||
handler.setFormatter(MultilineFormatter(
|
|
||||||
self.critical(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
|
|
||||||
|
|
||||||
if json:
|
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
|
||||||
|
|
||||||
return handler
|
|
||||||
|
|
||||||
def _get_debug_handler(self, json=False):
|
|
||||||
handler = logging.StreamHandler(sys.stderr)
|
|
||||||
handler.setLevel(logging.DEBUG)
|
|
||||||
handler.addFilter(LogFilter(logging.DEBUG))
|
|
||||||
handler.setFormatter(MultilineFormatter(
|
|
||||||
self.critical(CONSOLE_FORMAT.format(colorama.Fore.BLUE, colorama.Style.RESET_ALL))))
|
|
||||||
|
|
||||||
if json:
|
|
||||||
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
|
||||||
|
|
||||||
return handler
|
|
||||||
|
|
||||||
def set_level(self, s):
|
|
||||||
self.logger.setLevel(s)
|
|
||||||
|
|
||||||
def debug(self, msg):
|
|
||||||
"""Format info messages and return string."""
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def critical(self, msg):
|
|
||||||
"""Format critical messages and return string."""
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def error(self, msg):
|
|
||||||
"""Format error messages and return string."""
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def warn(self, msg):
|
|
||||||
"""Format warn messages and return string."""
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def info(self, msg):
|
|
||||||
"""Format info messages and return string."""
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def _color_text(self, color, msg):
|
|
||||||
"""
|
|
||||||
Colorize strings.
|
|
||||||
|
|
||||||
:param color: colorama color settings
|
|
||||||
:param msg: string to colorize
|
|
||||||
:returns: string
|
|
||||||
|
|
||||||
"""
|
|
||||||
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
|
|
||||||
|
|
||||||
def sysexit(self, code=1):
|
|
||||||
sys.exit(code)
|
|
||||||
|
|
||||||
def sysexit_with_message(self, msg, code=1):
|
|
||||||
self.logger.critical(str(msg))
|
|
||||||
self.sysexit(code)
|
|
||||||
|
|
||||||
|
|
||||||
class SingleLog(Log, metaclass=Singleton):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class FileUtils:
|
|
||||||
@staticmethod
|
|
||||||
def create_path(path):
|
|
||||||
os.makedirs(path, exist_ok=True)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def query_yes_no(question, default=True):
|
|
||||||
"""Ask a yes/no question via input() and return their answer.
|
|
||||||
|
|
||||||
"question" is a string that is presented to the user.
|
|
||||||
"default" is the presumed answer if the user just hits <Enter>.
|
|
||||||
It must be "yes" (the default), "no" or None (meaning
|
|
||||||
an answer is required of the user).
|
|
||||||
|
|
||||||
The "answer" return value is one of "yes" or "no".
|
|
||||||
"""
|
|
||||||
if default:
|
|
||||||
prompt = "[Y/n]"
|
|
||||||
else:
|
|
||||||
prompt = "[N/y]"
|
|
||||||
|
|
||||||
try:
|
|
||||||
# input() is safe in python3
|
|
||||||
choice = input("{} {} ".format(question, prompt)) or default # nosec
|
|
||||||
return to_bool(choice)
|
|
||||||
except (KeyboardInterrupt, ValueError) as e:
|
|
||||||
raise dockertidy.Exception.InputError("Error while reading input", e)
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
"""Default package."""
|
"""Default package."""
|
||||||
from importlib_metadata import PackageNotFoundError
|
|
||||||
from importlib_metadata import version
|
from importlib_metadata import version
|
||||||
|
|
||||||
__author__ = "Robert Kaussow"
|
__author__ = "Robert Kaussow"
|
||||||
|
@ -1,20 +0,0 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
from dateutil import tz
|
|
||||||
from pytimeparse import timeparse
|
|
||||||
|
|
||||||
|
|
||||||
def timedelta_type(value):
|
|
||||||
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
|
|
||||||
|
|
||||||
:param value: a string containing a time format supported by
|
|
||||||
mod:`pytimeparse`
|
|
||||||
"""
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
return datetime_seconds_ago(timeparse.timeparse(value))
|
|
||||||
|
|
||||||
|
|
||||||
def datetime_seconds_ago(seconds):
|
|
||||||
now = datetime.datetime.now(tz.tzutc())
|
|
||||||
return now - datetime.timedelta(seconds=seconds)
|
|
@ -1,103 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Stop long running docker iamges."""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import dateutil.parser
|
|
||||||
import docker
|
|
||||||
import docker.errors
|
|
||||||
import requests.exceptions
|
|
||||||
from docker.utils import kwargs_from_env
|
|
||||||
from docker_custodian.args import timedelta_type
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def stop_containers(client, max_run_time, matcher, dry_run):
|
|
||||||
for container_summary in client.containers():
|
|
||||||
container = client.inspect_container(container_summary["Id"])
|
|
||||||
name = container["Name"].lstrip("/")
|
|
||||||
if (
|
|
||||||
matcher(name) and has_been_running_since(container, max_run_time)
|
|
||||||
):
|
|
||||||
|
|
||||||
log.info("Stopping container %s %s: running since %s" % (
|
|
||||||
container["Id"][:16],
|
|
||||||
name,
|
|
||||||
container["State"]["StartedAt"]))
|
|
||||||
|
|
||||||
if not dry_run:
|
|
||||||
stop_container(client, container["Id"])
|
|
||||||
|
|
||||||
|
|
||||||
def stop_container(client, cid):
|
|
||||||
try:
|
|
||||||
client.stop(cid)
|
|
||||||
except requests.exceptions.Timeout as e:
|
|
||||||
log.warn("Failed to stop container %s: %s" % (cid, e))
|
|
||||||
except docker.errors.APIError as ae:
|
|
||||||
log.warn("Error stopping %s: %s" % (cid, ae))
|
|
||||||
|
|
||||||
|
|
||||||
def build_container_matcher(prefixes):
|
|
||||||
def matcher(name):
|
|
||||||
return any(name.startswith(prefix) for prefix in prefixes)
|
|
||||||
return matcher
|
|
||||||
|
|
||||||
|
|
||||||
def has_been_running_since(container, min_time):
|
|
||||||
started_at = container.get("State", {}).get("StartedAt")
|
|
||||||
if not started_at:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return dateutil.parser.parse(started_at) <= min_time
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(message)s",
|
|
||||||
stream=sys.stdout)
|
|
||||||
|
|
||||||
opts = get_opts()
|
|
||||||
client = docker.APIClient(version="auto",
|
|
||||||
timeout=opts.timeout,
|
|
||||||
**kwargs_from_env())
|
|
||||||
|
|
||||||
matcher = build_container_matcher(opts.prefix)
|
|
||||||
stop_containers(client, opts.max_run_time, matcher, opts.dry_run)
|
|
||||||
|
|
||||||
|
|
||||||
def get_opts(args=None):
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument(
|
|
||||||
"--max-run-time",
|
|
||||||
type=timedelta_type,
|
|
||||||
help="Maximum time a container is allows to run. Time may "
|
|
||||||
"be specified in any pytimeparse supported format."
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--prefix", action="append", default=[],
|
|
||||||
help="Only stop containers which match one of the "
|
|
||||||
"prefix."
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--dry-run", action="store_true",
|
|
||||||
help="Only log actions, don't stop anything."
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-t", "--timeout", type=int, default=60,
|
|
||||||
help="HTTP timeout in seconds for making docker API calls."
|
|
||||||
)
|
|
||||||
opts = parser.parse_args(args=args)
|
|
||||||
|
|
||||||
if not opts.prefix:
|
|
||||||
parser.error("Running with no --prefix will match nothing.")
|
|
||||||
|
|
||||||
return opts
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -9,13 +9,11 @@ except ImportError:
|
|||||||
import mock
|
import mock
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_datetime_seconds_ago(now):
|
def test_datetime_seconds_ago(now):
|
||||||
expected = datetime.datetime(2014, 1, 15, 10, 10, tzinfo=tz.tzutc())
|
expected = datetime.datetime(2014, 1, 15, 10, 10, tzinfo=tz.tzutc())
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
'docker_custodian.args.datetime.datetime',
|
'docker_custodian.args.datetime.datetime',
|
||||||
autospec=True,
|
autospec=True,
|
||||||
) as mock_datetime:
|
) as mock_datetime:
|
||||||
mock_datetime.now.return_value = now
|
mock_datetime.now.return_value = now
|
||||||
assert args.datetime_seconds_ago(24 * 60 * 60 * 5) == expected
|
assert args.datetime_seconds_ago(24 * 60 * 60 * 5) == expected
|
||||||
@ -28,8 +26,8 @@ def test_timedelta_type_none():
|
|||||||
def test_timedelta_type(now):
|
def test_timedelta_type(now):
|
||||||
expected = datetime.datetime(2014, 1, 15, 10, 10, tzinfo=tz.tzutc())
|
expected = datetime.datetime(2014, 1, 15, 10, 10, tzinfo=tz.tzutc())
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
'docker_custodian.args.datetime.datetime',
|
'docker_custodian.args.datetime.datetime',
|
||||||
autospec=True,
|
autospec=True,
|
||||||
) as mock_datetime:
|
) as mock_datetime:
|
||||||
mock_datetime.now.return_value = now
|
mock_datetime.now.return_value = now
|
||||||
assert args.timedelta_type('5 days') == expected
|
assert args.timedelta_type('5 days') == expected
|
||||||
|
@ -45,29 +45,18 @@ def test_has_been_running_since_false(container, earlier_time):
|
|||||||
assert not has_been_running_since(container, earlier_time)
|
assert not has_been_running_since(container, earlier_time)
|
||||||
|
|
||||||
|
|
||||||
@mock.patch('docker_custodian.docker_autostop.build_container_matcher',
|
@mock.patch('docker_custodian.docker_autostop.build_container_matcher', autospec=True)
|
||||||
autospec=True)
|
@mock.patch('docker_custodian.docker_autostop.stop_containers', autospec=True)
|
||||||
@mock.patch('docker_custodian.docker_autostop.stop_containers',
|
@mock.patch('docker_custodian.docker_autostop.get_opts', autospec=True)
|
||||||
autospec=True)
|
|
||||||
@mock.patch('docker_custodian.docker_autostop.get_opts',
|
|
||||||
autospec=True)
|
|
||||||
@mock.patch('docker_custodian.docker_autostop.docker', autospec=True)
|
@mock.patch('docker_custodian.docker_autostop.docker', autospec=True)
|
||||||
def test_main(
|
def test_main(mock_docker, mock_get_opts, mock_stop_containers, mock_build_matcher):
|
||||||
mock_docker,
|
|
||||||
mock_get_opts,
|
|
||||||
mock_stop_containers,
|
|
||||||
mock_build_matcher
|
|
||||||
):
|
|
||||||
mock_get_opts.return_value.timeout = 30
|
mock_get_opts.return_value.timeout = 30
|
||||||
main()
|
main()
|
||||||
mock_get_opts.assert_called_once_with()
|
mock_get_opts.assert_called_once_with()
|
||||||
mock_build_matcher.assert_called_once_with(
|
mock_build_matcher.assert_called_once_with(mock_get_opts.return_value.prefix)
|
||||||
mock_get_opts.return_value.prefix)
|
mock_stop_containers.assert_called_once_with(mock.ANY, mock_get_opts.return_value.max_run_time,
|
||||||
mock_stop_containers.assert_called_once_with(
|
mock_build_matcher.return_value,
|
||||||
mock.ANY,
|
mock_get_opts.return_value.dry_run)
|
||||||
mock_get_opts.return_value.max_run_time,
|
|
||||||
mock_build_matcher.return_value,
|
|
||||||
mock_get_opts.return_value.dry_run)
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_opts_with_defaults():
|
def test_get_opts_with_defaults():
|
||||||
@ -79,10 +68,8 @@ def test_get_opts_with_defaults():
|
|||||||
|
|
||||||
|
|
||||||
def test_get_opts_with_args(now):
|
def test_get_opts_with_args(now):
|
||||||
with mock.patch(
|
with mock.patch('docker_custodian.docker_autostop.timedelta_type',
|
||||||
'docker_custodian.docker_autostop.timedelta_type',
|
autospec=True) as mock_timedelta_type:
|
||||||
autospec=True
|
|
||||||
) as mock_timedelta_type:
|
|
||||||
opts = get_opts(args=['--prefix', 'one', '--max-run-time', '24h'])
|
opts = get_opts(args=['--prefix', 'one', '--max-run-time', '24h'])
|
||||||
assert opts.max_run_time == mock_timedelta_type.return_value
|
assert opts.max_run_time == mock_timedelta_type.return_value
|
||||||
mock_timedelta_type.assert_called_once_with('24h')
|
mock_timedelta_type.assert_called_once_with('24h')
|
||||||
|
@ -11,7 +11,6 @@ except ImportError:
|
|||||||
import mock
|
import mock
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class TestShouldRemoveContainer(object):
|
class TestShouldRemoveContainer(object):
|
||||||
|
|
||||||
def test_is_running(self, container, now):
|
def test_is_running(self, container, now):
|
||||||
@ -43,8 +42,12 @@ class TestShouldRemoveContainer(object):
|
|||||||
def test_cleanup_containers(mock_client, now):
|
def test_cleanup_containers(mock_client, now):
|
||||||
max_container_age = now
|
max_container_age = now
|
||||||
mock_client.containers.return_value = [
|
mock_client.containers.return_value = [
|
||||||
{'Id': 'abcd'},
|
{
|
||||||
{'Id': 'abbb'},
|
'Id': 'abcd'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Id': 'abbb'
|
||||||
|
},
|
||||||
]
|
]
|
||||||
mock_containers = [
|
mock_containers = [
|
||||||
{
|
{
|
||||||
@ -66,17 +69,34 @@ def test_cleanup_containers(mock_client, now):
|
|||||||
]
|
]
|
||||||
mock_client.inspect_container.side_effect = iter(mock_containers)
|
mock_client.inspect_container.side_effect = iter(mock_containers)
|
||||||
docker_gc.cleanup_containers(mock_client, max_container_age, False, None)
|
docker_gc.cleanup_containers(mock_client, max_container_age, False, None)
|
||||||
mock_client.remove_container.assert_called_once_with(container='abcd',
|
mock_client.remove_container.assert_called_once_with(container='abcd', v=True)
|
||||||
v=True)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_excluded_containers():
|
def test_filter_excluded_containers():
|
||||||
mock_containers = [
|
mock_containers = [
|
||||||
{'Labels': {'toot': ''}},
|
{
|
||||||
{'Labels': {'too': 'lol'}},
|
'Labels': {
|
||||||
{'Labels': {'toots': 'lol'}},
|
'toot': ''
|
||||||
{'Labels': {'foo': 'bar'}},
|
}
|
||||||
{'Labels': None},
|
},
|
||||||
|
{
|
||||||
|
'Labels': {
|
||||||
|
'too': 'lol'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Labels': {
|
||||||
|
'toots': 'lol'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Labels': {
|
||||||
|
'foo': 'bar'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Labels': None
|
||||||
|
},
|
||||||
]
|
]
|
||||||
result = docker_gc.filter_excluded_containers(mock_containers, None)
|
result = docker_gc.filter_excluded_containers(mock_containers, None)
|
||||||
assert mock_containers == list(result)
|
assert mock_containers == list(result)
|
||||||
@ -88,11 +108,7 @@ def test_filter_excluded_containers():
|
|||||||
mock_containers,
|
mock_containers,
|
||||||
exclude_labels,
|
exclude_labels,
|
||||||
)
|
)
|
||||||
assert [
|
assert [mock_containers[0], mock_containers[2], mock_containers[4]] == list(result)
|
||||||
mock_containers[0],
|
|
||||||
mock_containers[2],
|
|
||||||
mock_containers[4]
|
|
||||||
] == list(result)
|
|
||||||
exclude_labels = [
|
exclude_labels = [
|
||||||
docker_gc.ExcludeLabel(key='too*', value='lol'),
|
docker_gc.ExcludeLabel(key='too*', value='lol'),
|
||||||
]
|
]
|
||||||
@ -100,18 +116,18 @@ def test_filter_excluded_containers():
|
|||||||
mock_containers,
|
mock_containers,
|
||||||
exclude_labels,
|
exclude_labels,
|
||||||
)
|
)
|
||||||
assert [
|
assert [mock_containers[0], mock_containers[3], mock_containers[4]] == list(result)
|
||||||
mock_containers[0],
|
|
||||||
mock_containers[3],
|
|
||||||
mock_containers[4]
|
|
||||||
] == list(result)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleanup_images(mock_client, now):
|
def test_cleanup_images(mock_client, now):
|
||||||
max_image_age = now
|
max_image_age = now
|
||||||
mock_client.images.return_value = images = [
|
mock_client.images.return_value = images = [
|
||||||
{'Id': 'abcd'},
|
{
|
||||||
{'Id': 'abbb'},
|
'Id': 'abcd'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Id': 'abbb'
|
||||||
|
},
|
||||||
]
|
]
|
||||||
mock_images = [
|
mock_images = [
|
||||||
{
|
{
|
||||||
@ -152,8 +168,7 @@ def test_cleanup_volumes(mock_client):
|
|||||||
|
|
||||||
docker_gc.cleanup_volumes(mock_client, False)
|
docker_gc.cleanup_volumes(mock_client, False)
|
||||||
assert mock_client.remove_volume.mock_calls == [
|
assert mock_client.remove_volume.mock_calls == [
|
||||||
mock.call(name=volume['Name'])
|
mock.call(name=volume['Name']) for volume in reversed(volumes['Volumes'])
|
||||||
for volume in reversed(volumes['Volumes'])
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -205,35 +220,56 @@ def test_filter_images_in_use():
|
|||||||
def test_filter_images_in_use_by_id(mock_client, now):
|
def test_filter_images_in_use_by_id(mock_client, now):
|
||||||
mock_client._version = '1.21'
|
mock_client._version = '1.21'
|
||||||
mock_client.containers.return_value = [
|
mock_client.containers.return_value = [
|
||||||
{'Id': 'abcd', 'ImageID': '1'},
|
|
||||||
{'Id': 'abbb', 'ImageID': '2'},
|
|
||||||
]
|
|
||||||
mock_containers = [
|
|
||||||
{
|
{
|
||||||
'Id': 'abcd',
|
'Id': 'abcd',
|
||||||
'Name': 'one',
|
'ImageID': '1'
|
||||||
'State': {
|
|
||||||
'Running': False,
|
|
||||||
'FinishedAt': '2014-01-01T01:01:01Z'
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'Id': 'abbb',
|
'Id': 'abbb',
|
||||||
'Name': 'two',
|
'ImageID': '2'
|
||||||
'State': {
|
},
|
||||||
'Running': True,
|
|
||||||
'FinishedAt': '2014-01-01T01:01:01Z'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
|
mock_containers = [{
|
||||||
|
'Id': 'abcd',
|
||||||
|
'Name': 'one',
|
||||||
|
'State': {
|
||||||
|
'Running': False,
|
||||||
|
'FinishedAt': '2014-01-01T01:01:01Z'
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
'Id': 'abbb',
|
||||||
|
'Name': 'two',
|
||||||
|
'State': {
|
||||||
|
'Running': True,
|
||||||
|
'FinishedAt': '2014-01-01T01:01:01Z'
|
||||||
|
}
|
||||||
|
}]
|
||||||
mock_client.inspect_container.side_effect = iter(mock_containers)
|
mock_client.inspect_container.side_effect = iter(mock_containers)
|
||||||
mock_client.images.return_value = [
|
mock_client.images.return_value = [
|
||||||
{'Id': '1', 'Created': '2014-01-01T01:01:01Z'},
|
{
|
||||||
{'Id': '2', 'Created': '2014-01-01T01:01:01Z'},
|
'Id': '1',
|
||||||
{'Id': '3', 'Created': '2014-01-01T01:01:01Z'},
|
'Created': '2014-01-01T01:01:01Z'
|
||||||
{'Id': '4', 'Created': '2014-01-01T01:01:01Z'},
|
},
|
||||||
{'Id': '5', 'Created': '2014-01-01T01:01:01Z'},
|
{
|
||||||
{'Id': '6', 'Created': '2014-01-01T01:01:01Z'},
|
'Id': '2',
|
||||||
|
'Created': '2014-01-01T01:01:01Z'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Id': '3',
|
||||||
|
'Created': '2014-01-01T01:01:01Z'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Id': '4',
|
||||||
|
'Created': '2014-01-01T01:01:01Z'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Id': '5',
|
||||||
|
'Created': '2014-01-01T01:01:01Z'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'Id': '6',
|
||||||
|
'Created': '2014-01-01T01:01:01Z'
|
||||||
|
},
|
||||||
]
|
]
|
||||||
mock_client.inspect_image.side_effect = lambda image: {
|
mock_client.inspect_image.side_effect = lambda image: {
|
||||||
'Id': image,
|
'Id': image,
|
||||||
@ -252,34 +288,34 @@ def test_filter_excluded_images():
|
|||||||
'other:12345',
|
'other:12345',
|
||||||
])
|
])
|
||||||
images = [
|
images = [
|
||||||
{
|
{
|
||||||
'RepoTags': ['<none>:<none>'],
|
'RepoTags': ['<none>:<none>'],
|
||||||
'Id': 'babababababaabababab'
|
'Id': 'babababababaabababab'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['user/one:latest', 'user/one:abcd']
|
'RepoTags': ['user/one:latest', 'user/one:abcd']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['other:abcda']
|
'RepoTags': ['other:abcda']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['other:12345']
|
'RepoTags': ['other:12345']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['new_image:latest', 'new_image:123']
|
'RepoTags': ['new_image:latest', 'new_image:123']
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
expected = [
|
expected = [
|
||||||
{
|
{
|
||||||
'RepoTags': ['<none>:<none>'],
|
'RepoTags': ['<none>:<none>'],
|
||||||
'Id': 'babababababaabababab'
|
'Id': 'babababababaabababab'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['other:abcda']
|
'RepoTags': ['other:abcda']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['new_image:latest', 'new_image:123']
|
'RepoTags': ['new_image:latest', 'new_image:123']
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
actual = docker_gc.filter_excluded_images(images, exclude_set)
|
actual = docker_gc.filter_excluded_images(images, exclude_set)
|
||||||
assert list(actual) == expected
|
assert list(actual) == expected
|
||||||
@ -292,35 +328,34 @@ def test_filter_excluded_images_advanced():
|
|||||||
'user/repo-*:tag',
|
'user/repo-*:tag',
|
||||||
])
|
])
|
||||||
images = [
|
images = [
|
||||||
{
|
{
|
||||||
'RepoTags': ['<none>:<none>'],
|
'RepoTags': ['<none>:<none>'],
|
||||||
'Id': 'babababababaabababab'
|
'Id': 'babababababaabababab'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['user/one:latest', 'user/one:abcd']
|
'RepoTags': ['user/one:latest', 'user/one:abcd']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['user/foo:test']
|
'RepoTags': ['user/foo:test']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['user/foo:tag123']
|
'RepoTags': ['user/foo:tag123']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['user/repo-1:tag']
|
'RepoTags': ['user/repo-1:tag']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['user/repo-2:tag']
|
'RepoTags': ['user/repo-2:tag']
|
||||||
},
|
},
|
||||||
|
|
||||||
]
|
]
|
||||||
expected = [
|
expected = [
|
||||||
{
|
{
|
||||||
'RepoTags': ['<none>:<none>'],
|
'RepoTags': ['<none>:<none>'],
|
||||||
'Id': 'babababababaabababab'
|
'Id': 'babababababaabababab'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'RepoTags': ['user/foo:test'],
|
'RepoTags': ['user/foo:test'],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
actual = docker_gc.filter_excluded_images(images, exclude_set)
|
actual = docker_gc.filter_excluded_images(images, exclude_set)
|
||||||
assert list(actual) == expected
|
assert list(actual) == expected
|
||||||
@ -355,16 +390,11 @@ def test_remove_image_new_image_not_removed(mock_client, image, later_time):
|
|||||||
def test_remove_image_with_tags(mock_client, image, now):
|
def test_remove_image_with_tags(mock_client, image, now):
|
||||||
image_id = 'abcd'
|
image_id = 'abcd'
|
||||||
repo_tags = ['user/one:latest', 'user/one:12345']
|
repo_tags = ['user/one:latest', 'user/one:12345']
|
||||||
image_summary = {
|
image_summary = {'Id': image_id, 'RepoTags': repo_tags}
|
||||||
'Id': image_id,
|
|
||||||
'RepoTags': repo_tags
|
|
||||||
}
|
|
||||||
mock_client.inspect_image.return_value = image
|
mock_client.inspect_image.return_value = image
|
||||||
docker_gc.remove_image(mock_client, image_summary, now, False)
|
docker_gc.remove_image(mock_client, image_summary, now, False)
|
||||||
|
|
||||||
assert mock_client.remove_image.mock_calls == [
|
assert mock_client.remove_image.mock_calls == [mock.call(image=tag) for tag in repo_tags]
|
||||||
mock.call(image=tag) for tag in repo_tags
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_api_call_success():
|
def test_api_call_success():
|
||||||
@ -376,40 +406,30 @@ def test_api_call_success():
|
|||||||
|
|
||||||
|
|
||||||
def test_api_call_with_timeout():
|
def test_api_call_with_timeout():
|
||||||
func = mock.Mock(
|
func = mock.Mock(side_effect=requests.exceptions.ReadTimeout("msg"), __name__="remove_image")
|
||||||
side_effect=requests.exceptions.ReadTimeout("msg"),
|
|
||||||
__name__="remove_image")
|
|
||||||
image = "abcd"
|
image = "abcd"
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||||
'docker_custodian.docker_gc.log',
|
|
||||||
autospec=True) as mock_log:
|
|
||||||
docker_gc.api_call(func, image=image)
|
docker_gc.api_call(func, image=image)
|
||||||
|
|
||||||
func.assert_called_once_with(image="abcd")
|
func.assert_called_once_with(image="abcd")
|
||||||
mock_log.warn.assert_called_once_with('Failed to call remove_image '
|
mock_log.warn.assert_called_once_with('Failed to call remove_image ' + 'image=abcd msg')
|
||||||
+ 'image=abcd msg'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_api_call_with_api_error():
|
def test_api_call_with_api_error():
|
||||||
func = mock.Mock(
|
func = mock.Mock(side_effect=docker.errors.APIError("Ooops",
|
||||||
side_effect=docker.errors.APIError(
|
mock.Mock(status_code=409,
|
||||||
"Ooops",
|
reason="Conflict"),
|
||||||
mock.Mock(status_code=409, reason="Conflict"),
|
explanation="failed"),
|
||||||
explanation="failed"),
|
__name__="remove_image")
|
||||||
__name__="remove_image")
|
|
||||||
image = "abcd"
|
image = "abcd"
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||||
'docker_custodian.docker_gc.log',
|
|
||||||
autospec=True) as mock_log:
|
|
||||||
docker_gc.api_call(func, image=image)
|
docker_gc.api_call(func, image=image)
|
||||||
|
|
||||||
func.assert_called_once_with(image="abcd")
|
func.assert_called_once_with(image="abcd")
|
||||||
mock_log.warn.assert_called_once_with(
|
mock_log.warn.assert_called_once_with('Error calling remove_image image=abcd '
|
||||||
'Error calling remove_image image=abcd '
|
'409 Client Error: Conflict ("failed")')
|
||||||
'409 Client Error: Conflict ("failed")')
|
|
||||||
|
|
||||||
|
|
||||||
def days_as_seconds(num):
|
def days_as_seconds(num):
|
||||||
@ -425,13 +445,13 @@ def test_get_args_with_defaults():
|
|||||||
|
|
||||||
|
|
||||||
def test_get_args_with_args():
|
def test_get_args_with_args():
|
||||||
with mock.patch(
|
with mock.patch('docker_custodian.docker_gc.timedelta_type',
|
||||||
'docker_custodian.docker_gc.timedelta_type',
|
autospec=True) as mock_timedelta_type:
|
||||||
autospec=True
|
|
||||||
) as mock_timedelta_type:
|
|
||||||
opts = docker_gc.get_args(args=[
|
opts = docker_gc.get_args(args=[
|
||||||
'--max-image-age', '30 days',
|
'--max-image-age',
|
||||||
'--max-container-age', '3d',
|
'30 days',
|
||||||
|
'--max-container-age',
|
||||||
|
'3d',
|
||||||
])
|
])
|
||||||
assert mock_timedelta_type.mock_calls == [
|
assert mock_timedelta_type.mock_calls == [
|
||||||
mock.call('30 days'),
|
mock.call('30 days'),
|
||||||
@ -444,8 +464,7 @@ def test_get_args_with_args():
|
|||||||
def test_get_all_containers(mock_client):
|
def test_get_all_containers(mock_client):
|
||||||
count = 10
|
count = 10
|
||||||
mock_client.containers.return_value = [mock.Mock() for _ in range(count)]
|
mock_client.containers.return_value = [mock.Mock() for _ in range(count)]
|
||||||
with mock.patch('docker_custodian.docker_gc.log',
|
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||||
autospec=True) as mock_log:
|
|
||||||
containers = docker_gc.get_all_containers(mock_client)
|
containers = docker_gc.get_all_containers(mock_client)
|
||||||
assert containers == mock_client.containers.return_value
|
assert containers == mock_client.containers.return_value
|
||||||
mock_client.containers.assert_called_once_with(all=True)
|
mock_client.containers.assert_called_once_with(all=True)
|
||||||
@ -455,8 +474,7 @@ def test_get_all_containers(mock_client):
|
|||||||
def test_get_all_images(mock_client):
|
def test_get_all_images(mock_client):
|
||||||
count = 7
|
count = 7
|
||||||
mock_client.images.return_value = [mock.Mock() for _ in range(count)]
|
mock_client.images.return_value = [mock.Mock() for _ in range(count)]
|
||||||
with mock.patch('docker_custodian.docker_gc.log',
|
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||||
autospec=True) as mock_log:
|
|
||||||
images = docker_gc.get_all_images(mock_client)
|
images = docker_gc.get_all_images(mock_client)
|
||||||
assert images == mock_client.images.return_value
|
assert images == mock_client.images.return_value
|
||||||
mock_log.info.assert_called_with("Found %s images", count)
|
mock_log.info.assert_called_with("Found %s images", count)
|
||||||
@ -464,11 +482,8 @@ def test_get_all_images(mock_client):
|
|||||||
|
|
||||||
def test_get_dangling_volumes(mock_client):
|
def test_get_dangling_volumes(mock_client):
|
||||||
count = 4
|
count = 4
|
||||||
mock_client.volumes.return_value = {
|
mock_client.volumes.return_value = {'Volumes': [mock.Mock() for _ in range(count)]}
|
||||||
'Volumes': [mock.Mock() for _ in range(count)]
|
with mock.patch('docker_custodian.docker_gc.log', autospec=True) as mock_log:
|
||||||
}
|
|
||||||
with mock.patch('docker_custodian.docker_gc.log',
|
|
||||||
autospec=True) as mock_log:
|
|
||||||
volumes = docker_gc.get_dangling_volumes(mock_client)
|
volumes = docker_gc.get_dangling_volumes(mock_client)
|
||||||
assert volumes == mock_client.volumes.return_value['Volumes']
|
assert volumes == mock_client.volumes.return_value['Volumes']
|
||||||
mock_log.info.assert_called_with("Found %s dangling volumes", count)
|
mock_log.info.assert_called_with("Found %s dangling volumes", count)
|
||||||
@ -480,7 +495,8 @@ def test_build_exclude_set():
|
|||||||
'repo/foo:12345',
|
'repo/foo:12345',
|
||||||
'duplicate:latest',
|
'duplicate:latest',
|
||||||
]
|
]
|
||||||
exclude_image_file = StringIO(textwrap.dedent("""
|
exclude_image_file = StringIO(
|
||||||
|
textwrap.dedent("""
|
||||||
# Exclude this one because
|
# Exclude this one because
|
||||||
duplicate:latest
|
duplicate:latest
|
||||||
# Also this one
|
# Also this one
|
||||||
@ -516,13 +532,9 @@ def test_build_exclude_set_empty():
|
|||||||
|
|
||||||
|
|
||||||
def test_main(mock_client):
|
def test_main(mock_client):
|
||||||
with mock.patch(
|
with mock.patch('docker_custodian.docker_gc.docker.APIClient', return_value=mock_client):
|
||||||
'docker_custodian.docker_gc.docker.APIClient',
|
|
||||||
return_value=mock_client):
|
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch('docker_custodian.docker_gc.get_args', autospec=True) as mock_get_args:
|
||||||
'docker_custodian.docker_gc.get_args',
|
|
||||||
autospec=True) as mock_get_args:
|
|
||||||
mock_get_args.return_value = mock.Mock(
|
mock_get_args.return_value = mock.Mock(
|
||||||
max_image_age=100,
|
max_image_age=100,
|
||||||
max_container_age=200,
|
max_container_age=200,
|
||||||
|
@ -10,9 +10,16 @@ default_section = THIRDPARTY
|
|||||||
known_first_party = dockertidy
|
known_first_party = dockertidy
|
||||||
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||||
force_single_line = true
|
force_single_line = true
|
||||||
line_length = 110
|
line_length = 99
|
||||||
skip_glob = **/env/*,**/docs/*
|
skip_glob = **/env/*,**/docs/*
|
||||||
|
|
||||||
|
[yapf]
|
||||||
|
based_on_style = google
|
||||||
|
column_limit = 99
|
||||||
|
dedent_closing_brackets = true
|
||||||
|
coalesce_brackets = true
|
||||||
|
split_before_logical_operator = true
|
||||||
|
|
||||||
[tool:pytest]
|
[tool:pytest]
|
||||||
filterwarnings =
|
filterwarnings =
|
||||||
ignore::FutureWarning
|
ignore::FutureWarning
|
||||||
|
4
setup.py
4
setup.py
@ -71,7 +71,7 @@ setup(
|
|||||||
"colorama==0.4.3",
|
"colorama==0.4.3",
|
||||||
"docker==4.2.0",
|
"docker==4.2.0",
|
||||||
"docker-pycreds==0.4.0",
|
"docker-pycreds==0.4.0",
|
||||||
"environs==7.2.0",
|
"environs==7.3.0",
|
||||||
"idna==2.9",
|
"idna==2.9",
|
||||||
"importlib-metadata==1.5.0; python_version < '3.8'",
|
"importlib-metadata==1.5.0; python_version < '3.8'",
|
||||||
"ipaddress==1.0.23",
|
"ipaddress==1.0.23",
|
||||||
@ -90,7 +90,7 @@ setup(
|
|||||||
"six==1.14.0",
|
"six==1.14.0",
|
||||||
"urllib3==1.25.8",
|
"urllib3==1.25.8",
|
||||||
"websocket-client==0.57.0",
|
"websocket-client==0.57.0",
|
||||||
"zipp==3.0.0",
|
"zipp==3.1.0",
|
||||||
],
|
],
|
||||||
dependency_links=[],
|
dependency_links=[],
|
||||||
setup_requires=["setuptools_scm",],
|
setup_requires=["setuptools_scm",],
|
||||||
|
Loading…
Reference in New Issue
Block a user