fork: initial commit

This commit is contained in:
Robert Kaussow 2020-03-01 18:42:29 +01:00
parent cc0fdbbfd9
commit 5218ebd29c
40 changed files with 2408 additions and 539 deletions

343
.drone.jsonnet Normal file
View File

@ -0,0 +1,343 @@
local PythonVersion(pyversion='3.5') = {
name: 'python' + std.strReplace(pyversion, '.', '') + '-pytest',
image: 'python:' + pyversion,
environment: {
PY_COLORS: 1,
},
commands: [
'pip install -r test-requirements.txt -qq',
'pip install -qq .',
'docker-tidy --help',
],
depends_on: [
'clone',
],
};
local PipelineLint = {
kind: 'pipeline',
name: 'lint',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'flake8',
image: 'python:3.7',
environment: {
PY_COLORS: 1,
},
commands: [
'pip install -r test-requirements.txt -qq',
'pip install -qq .',
'flake8 ./dockertidy',
],
},
],
trigger: {
ref: ['refs/heads/master', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineTest = {
kind: 'pipeline',
name: 'test',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
PythonVersion(pyversion='3.5'),
PythonVersion(pyversion='3.6'),
PythonVersion(pyversion='3.7'),
PythonVersion(pyversion='3.8-rc'),
],
depends_on: [
'lint',
],
trigger: {
ref: ['refs/heads/master', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineSecurity = {
kind: 'pipeline',
name: 'security',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'bandit',
image: 'python:3.7',
environment: {
PY_COLORS: 1,
},
commands: [
'pip install -r test-requirements.txt -qq',
'pip install -qq .',
'bandit -r ./dockertidy -x ./dockertidy/tests',
],
},
],
depends_on: [
'test',
],
trigger: {
ref: ['refs/heads/master', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineBuildPackage = {
kind: 'pipeline',
name: 'build-package',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'build',
image: 'python:3.7',
commands: [
'python setup.py sdist bdist_wheel',
],
},
{
name: 'checksum',
image: 'alpine',
commands: [
'cd dist/ && sha256sum * > ../sha256sum.txt',
],
},
{
name: 'publish-github',
image: 'plugins/github-release',
settings: {
overwrite: true,
api_key: { from_secret: 'github_token' },
files: ['dist/*', 'sha256sum.txt'],
title: '${DRONE_TAG}',
note: 'CHANGELOG.md',
},
when: {
ref: ['refs/tags/**'],
},
},
{
name: 'publish-pypi',
image: 'plugins/pypi',
settings: {
username: { from_secret: 'pypi_username' },
password: { from_secret: 'pypi_password' },
repository: 'https://upload.pypi.org/legacy/',
skip_build: true,
},
when: {
ref: ['refs/tags/**'],
},
},
],
depends_on: [
'security',
],
trigger: {
ref: ['refs/heads/master', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineBuildContainer(arch='amd64') = {
kind: 'pipeline',
name: 'build-container-' + arch,
platform: {
os: 'linux',
arch: arch,
},
steps: [
{
name: 'build',
image: 'python:3.7',
commands: [
'python setup.py bdist_wheel',
],
},
{
name: 'dryrun',
image: 'plugins/docker:18-linux-' + arch,
settings: {
dry_run: true,
dockerfile: 'Dockerfile',
repo: 'xoxys/${DRONE_REPO_NAME}',
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
},
when: {
ref: ['refs/pull/**'],
},
},
{
name: 'publish',
image: 'plugins/docker:18-linux-' + arch,
settings: {
auto_tag: true,
auto_tag_suffix: arch,
dockerfile: 'Dockerfile',
repo: 'xoxys/${DRONE_REPO_NAME}',
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
},
when: {
ref: ['refs/heads/master', 'refs/tags/**'],
},
},
],
depends_on: [
'security',
],
trigger: {
ref: ['refs/heads/master', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineDocs = {
kind: 'pipeline',
name: 'docs',
platform: {
os: 'linux',
arch: 'amd64',
},
concurrency: {
limit: 1,
},
steps: [
{
name: 'assets',
image: 'byrnedo/alpine-curl',
commands: [
'mkdir -p docs/themes/hugo-geekdoc/',
'curl -L https://github.com/xoxys/hugo-geekdoc/releases/latest/download/hugo-geekdoc.tar.gz | tar -xz -C docs/themes/hugo-geekdoc/ --strip-components=1',
],
},
{
name: 'test',
image: 'klakegg/hugo:0.59.1-ext-alpine',
commands: [
'cd docs/ && hugo-official',
],
},
{
name: 'freeze',
image: 'appleboy/drone-ssh:1.5.5',
settings: {
host: { from_secret: 'ssh_host' },
key: { from_secret: 'ssh_key' },
script: [
'cp -R /var/www/virtual/geeklab/html/docker-tidy.geekdocs.de/ /var/www/virtual/geeklab/html/dockertidy_freeze/',
'ln -sfn /var/www/virtual/geeklab/html/dockertidy_freeze /var/www/virtual/geeklab/docker-tidy.geekdocs.de',
],
username: { from_secret: 'ssh_username' },
},
},
{
name: 'publish',
image: 'appleboy/drone-scp',
settings: {
host: { from_secret: 'ssh_host' },
key: { from_secret: 'ssh_key' },
rm: true,
source: 'docs/public/*',
strip_components: 2,
target: '/var/www/virtual/geeklab/html/docker-tidy.geekdocs.de/',
username: { from_secret: 'ssh_username' },
},
},
{
name: 'cleanup',
image: 'appleboy/drone-ssh:1.5.5',
settings: {
host: { from_secret: 'ssh_host' },
key: { from_secret: 'ssh_key' },
script: [
'ln -sfn /var/www/virtual/geeklab/html/docker-tidy.geekdocs.de /var/www/virtual/geeklab/docker-tidy.geekdocs.de',
'rm -rf /var/www/virtual/geeklab/html/dockertidy_freeze/',
],
username: { from_secret: 'ssh_username' },
},
},
],
depends_on: [
'build-package',
'build-container-amd64',
'build-container-arm64',
'build-container-arm',
],
trigger: {
ref: ['refs/heads/master', 'refs/tags/**'],
},
};
local PipelineNotifications = {
kind: 'pipeline',
name: 'notifications',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
image: 'plugins/manifest',
name: 'manifest',
settings: {
ignore_missing: true,
auto_tag: true,
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
spec: 'manifest.tmpl',
},
},
{
name: 'readme',
image: 'sheogorath/readme-to-dockerhub',
environment: {
DOCKERHUB_USERNAME: { from_secret: 'docker_username' },
DOCKERHUB_PASSWORD: { from_secret: 'docker_password' },
DOCKERHUB_REPO_PREFIX: 'xoxys',
DOCKERHUB_REPO_NAME: '${DRONE_REPO_NAME}',
README_PATH: 'README.md',
SHORT_DESCRIPTION: 'docker-tidy - Simple annotation based documentation for your roles',
},
},
{
name: 'matrix',
image: 'plugins/matrix',
settings: {
homeserver: { from_secret: 'matrix_homeserver' },
roomid: { from_secret: 'matrix_roomid' },
template: 'Status: **{{ build.status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.link }}) ({{ build.branch }}) by {{ build.author }}<br/> Message: {{ build.message }}',
username: { from_secret: 'matrix_username' },
password: { from_secret: 'matrix_password' },
},
},
],
depends_on: [
'docs',
],
trigger: {
ref: ['refs/heads/master', 'refs/tags/**'],
status: ['success', 'failure'],
},
};
[
PipelineLint,
PipelineTest,
PipelineSecurity,
PipelineBuildPackage,
PipelineBuildContainer(arch='amd64'),
PipelineBuildContainer(arch='arm64'),
PipelineBuildContainer(arch='arm'),
PipelineDocs,
PipelineNotifications,
]

18
.flake8 Normal file
View File

@ -0,0 +1,18 @@
[flake8]
ignore = D103
max-line-length = 110
inline-quotes = double
exclude =
.git
.tox
__pycache__
build
dist
tests
*.pyc
*.egg-info
.cache
.eggs
env*
application-import-names = dockertidy
format = ${cyan}%(path)s:%(row)d:%(col)d${reset}: ${red_bold}%(code)s${reset} %(text)s

113
.gitignore vendored
View File

@ -1,9 +1,108 @@
*.py? # ---> Python
.*.swp # Byte-compiled / optimized / DLL files
.tox __pycache__/
dist *.py[cod]
build/ *$py.class
*.pyc
__pycache__
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
parts/
sdist/
var/
wheels/
*.egg-info/ *.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# dotenv
.env
# virtualenv
.venv
venv/
ENV/
env/
env*/
# Spyder project settings
.spyderproject
# Rope project settings
.ropeproject
# Ignore ide addons
.server-script
.on-save.json
.vscode
.pytest_cache
pip-wheel-metadata
# Hugo documentation
docs/themes/
docs/public/
resources/_gen/

View File

@ -1,21 +0,0 @@
repos:
- repo: git://github.com/pre-commit/pre-commit-hooks
rev: v2.2.1
hooks:
- id: check-added-large-files
- id: check-docstring-first
- id: check-merge-conflict
- id: check-yaml
- id: debug-statements
- id: end-of-file-fixer
exclude: CHANGELOG.md
- id: flake8
- id: name-tests-test
- id: requirements-txt-fixer
- id: trailing-whitespace
- repo: git://github.com/Yelp/detect-secrets
rev: v0.12.2
hooks:
- id: detect-secrets
args: ['--baseline', '.secrets.baseline']
exclude: tests/.*

View File

@ -1,22 +0,0 @@
{
"exclude": {
"files": "tests/.*",
"lines": null
},
"generated_at": "2019-04-24T14:36:38Z",
"plugins_used": [
{
"base64_limit": 4.5,
"name": "Base64HighEntropyString"
},
{
"hex_limit": 3,
"name": "HexHighEntropyString"
},
{
"name": "PrivateKeyDetector"
}
],
"results": {},
"version": "0.12.2"
}

View File

@ -1,23 +0,0 @@
language: python
matrix:
include:
- python: 2.7
env: TOXENV=py27
- python: 3.5
env: TOXENV=py35
- python: 3.6
env: TOXENV=py36
install:
- pip install tox
script:
- tox
deploy:
- provider: pypi
user: yelplabs
password:
secure: R3TcB5uz56Pu3qRAb0X1/uLQ/hCzNEi1MEAxDxNbwCfKbDZ1s/IxcjeQa4m5W8JgxBNEfa/ygbcV7UKe4wKHnuuXIve4XSd0Slde+cEY1awOFzgbeaxhnu8a1Z6lFXfOriq1fDKX3fGLvxKhoR0uonvRQO0Hx3oZWPMeT8XnoqkPTiAXkGdDhbnQbgFj5fxOq6Fd/InIkWnTSScCjHxaf4FJZISumGCAFF7PBWHOJhkYCVx/CoMK2h2Ch8rqbiVQUIYDiDrDeXkoXYXjCnoGZ1xjLqBS7TLkZgC0Ic8XZwcGdmQvUroOXnGRvs+P7J8clhO4hhEawoSAzcWcSeHBlm+45iW+s1wXD+yJ5BzZpXZHG5BlU8tPnpbY/MV8+2aYq/EPzcGbc6FR9dYyBw2Elja9pFBYzh6ZZqMuH47g12PHs95GakZ31SvqOmWG91KMKBOFDEnwrMd4Vwfn94wuMACf8y8oinO+Irvu2/FYyJ5+KIEjJwkDAEcSE4SJWCoFqcQaiSJizgJh85TIytJq39PJtHc3eax7+/uTcAqBnS+g9iGcsWSelzMJhPfUPch37jWPurDibwR6ui4S8+zpwB7LIGrzarcuqUXZmAaWrxNhCIasmcsmBbfq2YYHuV0DMRKRhhN+urRxkk8luMOQmUR7isb3YZ2b18HZGkNEEec=
on:
tags: true
condition: "$TOXENV == py27"
repo: Yelp/docker-custodian

View File

@ -1,9 +0,0 @@
FROM alpine:3.2
MAINTAINER Kyle Anderson <kwa@yelp.com>
RUN apk add -U python py-pip
ADD requirements.txt /code/requirements.txt
RUN pip install -r /code/requirements.txt
ADD docker_custodian/ /code/docker_custodian/
ADD setup.py /code/
RUN pip install --no-deps -e /code

View File

@ -187,7 +187,7 @@
same "printed page" as the copyright notice for easier same "printed page" as the copyright notice for easier
identification within third-party archives. identification within third-party archives.
Copyright [yyyy] [name of copyright owner] Copyright 2020 Robert Kaussow <mail@geeklabor.de>
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. you may not use this file except in compliance with the License.

View File

@ -1,26 +0,0 @@
.PHONY: all clean tag test
PACKAGE_VERSION = $(shell python setup.py --version)
DOCKER_REPO ?= ${USER}
BUILD_TAG ?= ${PACKAGE_VERSION}
all: test
clean:
git clean -fdx -- debian
rm -f ./dist
find . -iname '*.pyc' -delete
tag:
git tag v${PACKAGE_VERSION}
test:
tox
tests: test
.PHONY: build
build:
docker build -t ${DOCKER_REPO}/docker-custodian:${BUILD_TAG} .

48
Pipfile Normal file
View File

@ -0,0 +1,48 @@
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
pipenv-setup = "*"
pydocstyle = "<4.0.0"
flake8 = "*"
flake8-colors = "*"
flake8-blind-except = "*"
flake8-builtins = "*"
flake8-docstrings = "<=3.0.0"
flake8-isort = "*"
flake8-logging-format = "*"
flake8-polyfill = "*"
flake8-quotes = "*"
pep8-naming = "*"
pytest = "*"
pytest-mock = "*"
pytest-cov = "*"
bandit = "*"
docker-tidy = {editable = true,path = "."}
[packages]
importlib-metadata = {version = "*",markers = "python_version<'3.8'"}
certifi = "*"
chardet = "*"
docker = "*"
docker-pycreds = "*"
idna = "*"
ipaddress = "*"
python-dateutil = "*"
pytimeparse = "*"
requests = "*"
appdirs = "*"
colorama = "*"
anyconfig = "*"
pathspec = "*"
python-json-logger = "*"
jsonschema = "*"
environs = "*"
nested-lookup = "*"
"ruamel.yaml" = "*"
websocket-client = "*"
[requires]
python_version = "3.7"

967
Pipfile.lock generated Normal file
View File

@ -0,0 +1,967 @@
{
"_meta": {
"hash": {
"sha256": "5435cb449b46e93e063eb55b6d7bd5d990e1c552d7648a35b4a5eef846914075"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.7"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"anyconfig": {
"hashes": [
"sha256:4e1674d184e5d9e56aad5321ee65612abaa7a05a03081ccf2ee452b2d557aeed"
],
"index": "pypi",
"version": "==0.9.10"
},
"appdirs": {
"hashes": [
"sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92",
"sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"
],
"index": "pypi",
"version": "==1.4.3"
},
"attrs": {
"hashes": [
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"version": "==19.3.0"
},
"certifi": {
"hashes": [
"sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3",
"sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
],
"index": "pypi",
"version": "==2019.11.28"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"index": "pypi",
"version": "==3.0.4"
},
"colorama": {
"hashes": [
"sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff",
"sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"
],
"index": "pypi",
"version": "==0.4.3"
},
"docker": {
"hashes": [
"sha256:1c2ddb7a047b2599d1faec00889561316c674f7099427b9c51e8cb804114b553",
"sha256:ddae66620ab5f4bce769f64bcd7934f880c8abe6aa50986298db56735d0f722e"
],
"index": "pypi",
"version": "==4.2.0"
},
"docker-pycreds": {
"hashes": [
"sha256:6ce3270bcaf404cc4c3e27e4b6c70d3521deae82fb508767870fdbf772d584d4",
"sha256:7266112468627868005106ec19cd0d722702d2b7d5912a28e19b826c3d37af49"
],
"index": "pypi",
"version": "==0.4.0"
},
"environs": {
"hashes": [
"sha256:2291ce502c9e61b8e208c8c9be4ac474e0f523c4dc23e0beb23118086e43b324",
"sha256:44700c562fb6f783640f90c2225d9a80d85d24833b4dd02d20b8ff1c83901e47"
],
"index": "pypi",
"version": "==7.2.0"
},
"idna": {
"hashes": [
"sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
"sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
],
"index": "pypi",
"version": "==2.9"
},
"importlib-metadata": {
"hashes": [
"sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302",
"sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b"
],
"index": "pypi",
"markers": "python_version < '3.8'",
"version": "==1.5.0"
},
"ipaddress": {
"hashes": [
"sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc",
"sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2"
],
"index": "pypi",
"version": "==1.0.23"
},
"jsonschema": {
"hashes": [
"sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163",
"sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"
],
"index": "pypi",
"version": "==3.2.0"
},
"marshmallow": {
"hashes": [
"sha256:3a94945a7461f2ab4df9576e51c97d66bee2c86155d3d3933fab752b31effab8",
"sha256:4b95c7735f93eb781dfdc4dded028108998cad759dda8dd9d4b5b4ac574cbf13"
],
"version": "==3.5.0"
},
"nested-lookup": {
"hashes": [
"sha256:23789e328bd1d0b3f9db93cf51b7103a978dd0d8a834770d2c19b365e934ab96"
],
"index": "pypi",
"version": "==0.2.21"
},
"pathspec": {
"hashes": [
"sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424",
"sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96"
],
"index": "pypi",
"version": "==0.7.0"
},
"pyrsistent": {
"hashes": [
"sha256:cdc7b5e3ed77bed61270a47d35434a30617b9becdf2478af76ad2c6ade307280"
],
"version": "==0.15.7"
},
"python-dateutil": {
"hashes": [
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
"sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
],
"index": "pypi",
"version": "==2.8.1"
},
"python-dotenv": {
"hashes": [
"sha256:81822227f771e0cab235a2939f0f265954ac4763cafd806d845801c863bf372f",
"sha256:92b3123fb2d58a284f76cc92bfe4ee6c502c32ded73e8b051c4f6afc8b6751ed"
],
"version": "==0.12.0"
},
"python-json-logger": {
"hashes": [
"sha256:b7a31162f2a01965a5efb94453ce69230ed208468b0bbc7fdfc56e6d8df2e281"
],
"index": "pypi",
"version": "==0.1.11"
},
"pytimeparse": {
"hashes": [
"sha256:04b7be6cc8bd9f5647a6325444926c3ac34ee6bc7e69da4367ba282f076036bd",
"sha256:e86136477be924d7e670646a98561957e8ca7308d44841e21f5ddea757556a0a"
],
"index": "pypi",
"version": "==1.1.8"
},
"requests": {
"hashes": [
"sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
"sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
],
"index": "pypi",
"version": "==2.23.0"
},
"ruamel.yaml": {
"hashes": [
"sha256:0962fd7999e064c4865f96fb1e23079075f4a2a14849bcdc5cdba53a24f9759b",
"sha256:099c644a778bf72ffa00524f78dd0b6476bca94a1da344130f4bf3381ce5b954"
],
"index": "pypi",
"version": "==0.16.10"
},
"ruamel.yaml.clib": {
"hashes": [
"sha256:1e77424825caba5553bbade750cec2277ef130647d685c2b38f68bc03453bac6",
"sha256:392b7c371312abf27fb549ec2d5e0092f7ef6e6c9f767bfb13e83cb903aca0fd",
"sha256:4d55386129291b96483edcb93b381470f7cd69f97585829b048a3d758d31210a",
"sha256:550168c02d8de52ee58c3d8a8193d5a8a9491a5e7b2462d27ac5bf63717574c9",
"sha256:57933a6986a3036257ad7bf283529e7c19c2810ff24c86f4a0cfeb49d2099919",
"sha256:615b0396a7fad02d1f9a0dcf9f01202bf9caefee6265198f252c865f4227fcc6",
"sha256:77556a7aa190be9a2bd83b7ee075d3df5f3c5016d395613671487e79b082d784",
"sha256:7aee724e1ff424757b5bd8f6c5bbdb033a570b2b4683b17ace4dbe61a99a657b",
"sha256:8073c8b92b06b572e4057b583c3d01674ceaf32167801fe545a087d7a1e8bf52",
"sha256:9c6d040d0396c28d3eaaa6cb20152cb3b2f15adf35a0304f4f40a3cf9f1d2448",
"sha256:a0ff786d2a7dbe55f9544b3f6ebbcc495d7e730df92a08434604f6f470b899c5",
"sha256:b1b7fcee6aedcdc7e62c3a73f238b3d080c7ba6650cd808bce8d7761ec484070",
"sha256:b66832ea8077d9b3f6e311c4a53d06273db5dc2db6e8a908550f3c14d67e718c",
"sha256:be018933c2f4ee7de55e7bd7d0d801b3dfb09d21dad0cce8a97995fd3e44be30",
"sha256:d0d3ac228c9bbab08134b4004d748cf9f8743504875b3603b3afbb97e3472947",
"sha256:d10e9dd744cf85c219bf747c75194b624cc7a94f0c80ead624b06bfa9f61d3bc",
"sha256:ea4362548ee0cbc266949d8a441238d9ad3600ca9910c3fe4e82ee3a50706973",
"sha256:ed5b3698a2bb241b7f5cbbe277eaa7fe48b07a58784fba4f75224fd066d253ad",
"sha256:f9dcc1ae73f36e8059589b601e8e4776b9976effd76c21ad6a855a74318efd6e"
],
"markers": "platform_python_implementation == 'CPython' and python_version < '3.9'",
"version": "==0.2.0"
},
"six": {
"hashes": [
"sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
"sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
],
"version": "==1.14.0"
},
"urllib3": {
"hashes": [
"sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc",
"sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"
],
"version": "==1.25.8"
},
"websocket-client": {
"hashes": [
"sha256:0fc45c961324d79c781bab301359d5a1b00b13ad1b10415a4780229ef71a5549",
"sha256:d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010"
],
"index": "pypi",
"version": "==0.57.0"
},
"zipp": {
"hashes": [
"sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2",
"sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"
],
"version": "==3.0.0"
}
},
"develop": {
"anyconfig": {
"hashes": [
"sha256:4e1674d184e5d9e56aad5321ee65612abaa7a05a03081ccf2ee452b2d557aeed"
],
"index": "pypi",
"version": "==0.9.10"
},
"appdirs": {
"hashes": [
"sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92",
"sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"
],
"index": "pypi",
"version": "==1.4.3"
},
"attrs": {
"hashes": [
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"version": "==19.3.0"
},
"bandit": {
"hashes": [
"sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952",
"sha256:41e75315853507aa145d62a78a2a6c5e3240fe14ee7c601459d0df9418196065"
],
"index": "pypi",
"version": "==1.6.2"
},
"black": {
"hashes": [
"sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b",
"sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"
],
"markers": "python_version >= '3.6'",
"version": "==19.10b0"
},
"cached-property": {
"hashes": [
"sha256:3a026f1a54135677e7da5ce819b0c690f156f37976f3e30c5430740725203d7f",
"sha256:9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504"
],
"version": "==1.5.1"
},
"cerberus": {
"hashes": [
"sha256:302e6694f206dd85cb63f13fd5025b31ab6d38c99c50c6d769f8fa0b0f299589"
],
"version": "==1.3.2"
},
"certifi": {
"hashes": [
"sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3",
"sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
],
"index": "pypi",
"version": "==2019.11.28"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"index": "pypi",
"version": "==3.0.4"
},
"click": {
"hashes": [
"sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13",
"sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
],
"version": "==7.0"
},
"colorama": {
"hashes": [
"sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff",
"sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"
],
"index": "pypi",
"version": "==0.4.3"
},
"coverage": {
"hashes": [
"sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3",
"sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c",
"sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0",
"sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477",
"sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a",
"sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf",
"sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691",
"sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73",
"sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987",
"sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894",
"sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e",
"sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef",
"sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf",
"sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68",
"sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8",
"sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954",
"sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2",
"sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40",
"sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc",
"sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc",
"sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e",
"sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d",
"sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f",
"sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc",
"sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301",
"sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea",
"sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb",
"sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af",
"sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52",
"sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37",
"sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0"
],
"version": "==5.0.3"
},
"distlib": {
"hashes": [
"sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21"
],
"version": "==0.3.0"
},
"docker": {
"hashes": [
"sha256:1c2ddb7a047b2599d1faec00889561316c674f7099427b9c51e8cb804114b553",
"sha256:ddae66620ab5f4bce769f64bcd7934f880c8abe6aa50986298db56735d0f722e"
],
"index": "pypi",
"version": "==4.2.0"
},
"docker-pycreds": {
"hashes": [
"sha256:6ce3270bcaf404cc4c3e27e4b6c70d3521deae82fb508767870fdbf772d584d4",
"sha256:7266112468627868005106ec19cd0d722702d2b7d5912a28e19b826c3d37af49"
],
"index": "pypi",
"version": "==0.4.0"
},
"docker-tidy": {
"editable": true,
"path": "."
},
"entrypoints": {
"hashes": [
"sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
"sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
],
"version": "==0.3"
},
"environs": {
"hashes": [
"sha256:2291ce502c9e61b8e208c8c9be4ac474e0f523c4dc23e0beb23118086e43b324",
"sha256:44700c562fb6f783640f90c2225d9a80d85d24833b4dd02d20b8ff1c83901e47"
],
"index": "pypi",
"version": "==7.2.0"
},
"first": {
"hashes": [
"sha256:8d8e46e115ea8ac652c76123c0865e3ff18372aef6f03c22809ceefcea9dec86",
"sha256:ff285b08c55f8c97ce4ea7012743af2495c9f1291785f163722bd36f6af6d3bf"
],
"version": "==2.0.2"
},
"flake8": {
"hashes": [
"sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb",
"sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca"
],
"index": "pypi",
"version": "==3.7.9"
},
"flake8-blind-except": {
"hashes": [
"sha256:aca3356633825544cec51997260fe31a8f24a1a2795ce8e81696b9916745e599"
],
"index": "pypi",
"version": "==0.1.1"
},
"flake8-builtins": {
"hashes": [
"sha256:29bc0f7e68af481d088f5c96f8aeb02520abdfc900500484e3af969f42a38a5f",
"sha256:c44415fb19162ef3737056e700d5b99d48c3612a533943b4e16419a5d3de3a64"
],
"index": "pypi",
"version": "==1.4.2"
},
"flake8-colors": {
"hashes": [
"sha256:508fcf6efc15826f2146b42172ab41999555e07af43fcfb3e6a28ad596189560"
],
"index": "pypi",
"version": "==0.1.6"
},
"flake8-docstrings": {
"hashes": [
"sha256:3d5a31c7ec6b7367ea6506a87ec293b94a0a46c0bce2bb4975b7f1d09b6f3717",
"sha256:a256ba91bc52307bef1de59e2a009c3cf61c3d0952dbe035d6ff7208940c2edc"
],
"index": "pypi",
"version": "==1.5.0"
},
"flake8-isort": {
"hashes": [
"sha256:64454d1f154a303cfe23ee715aca37271d4f1d299b2f2663f45b73bff14e36a9",
"sha256:aa0c4d004e6be47e74f122f5b7f36554d0d78ad8bf99b497a460dedccaa7cce9"
],
"index": "pypi",
"version": "==2.8.0"
},
"flake8-logging-format": {
"hashes": [
"sha256:ca5f2b7fc31c3474a0aa77d227e022890f641a025f0ba664418797d979a779f8"
],
"index": "pypi",
"version": "==0.6.0"
},
"flake8-polyfill": {
"hashes": [
"sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9",
"sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"
],
"index": "pypi",
"version": "==1.0.2"
},
"flake8-quotes": {
"hashes": [
"sha256:11a15d30c92ca5f04c2791bd7019cf62b6f9d3053eb050d02a135557eb118bfc"
],
"index": "pypi",
"version": "==2.1.1"
},
"gitdb": {
"hashes": [
"sha256:284a6a4554f954d6e737cddcff946404393e030b76a282c6640df8efd6b3da5e",
"sha256:598e0096bb3175a0aab3a0b5aedaa18a9a25c6707e0eca0695ba1a0baf1b2150"
],
"version": "==4.0.2"
},
"gitpython": {
"hashes": [
"sha256:43da89427bdf18bf07f1164c6d415750693b4d50e28fc9b68de706245147b9dd",
"sha256:e426c3b587bd58c482f0b7fe6145ff4ac7ae6c82673fc656f489719abca6f4cb"
],
"version": "==3.1.0"
},
"idna": {
"hashes": [
"sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
"sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
],
"index": "pypi",
"version": "==2.9"
},
"importlib-metadata": {
"hashes": [
"sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302",
"sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b"
],
"index": "pypi",
"markers": "python_version < '3.8'",
"version": "==1.5.0"
},
"ipaddress": {
"hashes": [
"sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc",
"sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2"
],
"index": "pypi",
"version": "==1.0.23"
},
"isort": {
"extras": [
"pyproject"
],
"hashes": [
"sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
"sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
],
"version": "==4.3.21"
},
"jsonschema": {
"hashes": [
"sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163",
"sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"
],
"index": "pypi",
"version": "==3.2.0"
},
"marshmallow": {
"hashes": [
"sha256:3a94945a7461f2ab4df9576e51c97d66bee2c86155d3d3933fab752b31effab8",
"sha256:4b95c7735f93eb781dfdc4dded028108998cad759dda8dd9d4b5b4ac574cbf13"
],
"version": "==3.5.0"
},
"mccabe": {
"hashes": [
"sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
"sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
],
"version": "==0.6.1"
},
"more-itertools": {
"hashes": [
"sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c",
"sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507"
],
"version": "==8.2.0"
},
"nested-lookup": {
"hashes": [
"sha256:23789e328bd1d0b3f9db93cf51b7103a978dd0d8a834770d2c19b365e934ab96"
],
"index": "pypi",
"version": "==0.2.21"
},
"orderedmultidict": {
"hashes": [
"sha256:04070bbb5e87291cc9bfa51df413677faf2141c73c61d2a5f7b26bea3cd882ad",
"sha256:43c839a17ee3cdd62234c47deca1a8508a3f2ca1d0678a3bf791c87cf84adbf3"
],
"version": "==1.0.1"
},
"packaging": {
"hashes": [
"sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47",
"sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"
],
"version": "==19.2"
},
"pathspec": {
"hashes": [
"sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424",
"sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96"
],
"index": "pypi",
"version": "==0.7.0"
},
"pbr": {
"hashes": [
"sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b",
"sha256:61aa52a0f18b71c5cc58232d2cf8f8d09cd67fcad60b742a60124cb8d6951488"
],
"version": "==5.4.4"
},
"pep517": {
"hashes": [
"sha256:5ce351f3be71d01bb094d63253854b6139931fcaba8e2f380c02102136c51e40",
"sha256:882e2eeeffe39ccd6be6122d98300df18d80950cb5f449766d64149c94c5614a"
],
"version": "==0.8.1"
},
"pep8-naming": {
"hashes": [
"sha256:45f330db8fcfb0fba57458c77385e288e7a3be1d01e8ea4268263ef677ceea5f",
"sha256:a33d38177056321a167decd6ba70b890856ba5025f0a8eca6a3eda607da93caf"
],
"index": "pypi",
"version": "==0.9.1"
},
"pip-shims": {
"hashes": [
"sha256:1cc3e2e4e5d5863edd4760d2032b180a6ef81719277fe95404df1bb0e58b7261",
"sha256:b5bb01c4394a2e0260bddb4cfdc7e6fdd9d6e61c8febd18c3594e2ea2596c190"
],
"version": "==0.5.0"
},
"pipenv-setup": {
"hashes": [
"sha256:18ce5474261bab22b9a3cd919d70909b578b57438d452ebb88dbe22ca70f2ef2",
"sha256:5b69f8a91dd922806577d4e0c84acda1ce274657aab800749f088b46fcfe76cb"
],
"index": "pypi",
"version": "==3.0.1"
},
"pipfile": {
"hashes": [
"sha256:f7d9f15de8b660986557eb3cc5391aa1a16207ac41bc378d03f414762d36c984"
],
"version": "==0.0.2"
},
"plette": {
"extras": [
"validation"
],
"hashes": [
"sha256:46402c03e36d6eadddad2a5125990e322dd74f98160c8f2dcd832b2291858a26",
"sha256:d6c9b96981b347bddd333910b753b6091a2c1eb2ef85bb373b4a67c9d91dca16"
],
"version": "==0.2.3"
},
"pluggy": {
"hashes": [
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
],
"version": "==0.13.1"
},
"py": {
"hashes": [
"sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa",
"sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"
],
"version": "==1.8.1"
},
"pycodestyle": {
"hashes": [
"sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
"sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
],
"version": "==2.5.0"
},
"pydocstyle": {
"hashes": [
"sha256:2258f9b0df68b97bf3a6c29003edc5238ff8879f1efb6f1999988d934e432bd8",
"sha256:5741c85e408f9e0ddf873611085e819b809fca90b619f5fd7f34bd4959da3dd4",
"sha256:ed79d4ec5e92655eccc21eb0c6cf512e69512b4a97d215ace46d17e4990f2039"
],
"index": "pypi",
"version": "==3.0.0"
},
"pyflakes": {
"hashes": [
"sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
"sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
],
"version": "==2.1.1"
},
"pyparsing": {
"hashes": [
"sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f",
"sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"
],
"version": "==2.4.6"
},
"pyrsistent": {
"hashes": [
"sha256:cdc7b5e3ed77bed61270a47d35434a30617b9becdf2478af76ad2c6ade307280"
],
"version": "==0.15.7"
},
"pytest": {
"hashes": [
"sha256:0d5fe9189a148acc3c3eb2ac8e1ac0742cb7618c084f3d228baaec0c254b318d",
"sha256:ff615c761e25eb25df19edddc0b970302d2a9091fbce0e7213298d85fb61fef6"
],
"index": "pypi",
"version": "==5.3.5"
},
"pytest-cov": {
"hashes": [
"sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b",
"sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626"
],
"index": "pypi",
"version": "==2.8.1"
},
"pytest-mock": {
"hashes": [
"sha256:b35eb281e93aafed138db25c8772b95d3756108b601947f89af503f8c629413f",
"sha256:cb67402d87d5f53c579263d37971a164743dc33c159dfb4fb4a86f37c5552307"
],
"index": "pypi",
"version": "==2.0.0"
},
"python-dateutil": {
"hashes": [
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
"sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
],
"index": "pypi",
"version": "==2.8.1"
},
"python-dotenv": {
"hashes": [
"sha256:81822227f771e0cab235a2939f0f265954ac4763cafd806d845801c863bf372f",
"sha256:92b3123fb2d58a284f76cc92bfe4ee6c502c32ded73e8b051c4f6afc8b6751ed"
],
"version": "==0.12.0"
},
"python-json-logger": {
"hashes": [
"sha256:b7a31162f2a01965a5efb94453ce69230ed208468b0bbc7fdfc56e6d8df2e281"
],
"index": "pypi",
"version": "==0.1.11"
},
"pytimeparse": {
"hashes": [
"sha256:04b7be6cc8bd9f5647a6325444926c3ac34ee6bc7e69da4367ba282f076036bd",
"sha256:e86136477be924d7e670646a98561957e8ca7308d44841e21f5ddea757556a0a"
],
"index": "pypi",
"version": "==1.1.8"
},
"pyyaml": {
"hashes": [
"sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6",
"sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf",
"sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5",
"sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e",
"sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811",
"sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e",
"sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d",
"sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20",
"sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689",
"sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994",
"sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615"
],
"version": "==5.3"
},
"regex": {
"hashes": [
"sha256:01b2d70cbaed11f72e57c1cfbaca71b02e3b98f739ce33f5f26f71859ad90431",
"sha256:046e83a8b160aff37e7034139a336b660b01dbfe58706f9d73f5cdc6b3460242",
"sha256:113309e819634f499d0006f6200700c8209a2a8bf6bd1bdc863a4d9d6776a5d1",
"sha256:200539b5124bc4721247a823a47d116a7a23e62cc6695744e3eb5454a8888e6d",
"sha256:25f4ce26b68425b80a233ce7b6218743c71cf7297dbe02feab1d711a2bf90045",
"sha256:269f0c5ff23639316b29f31df199f401e4cb87529eafff0c76828071635d417b",
"sha256:5de40649d4f88a15c9489ed37f88f053c15400257eeb18425ac7ed0a4e119400",
"sha256:7f78f963e62a61e294adb6ff5db901b629ef78cb2a1cfce3cf4eeba80c1c67aa",
"sha256:82469a0c1330a4beb3d42568f82dffa32226ced006e0b063719468dcd40ffdf0",
"sha256:8c2b7fa4d72781577ac45ab658da44c7518e6d96e2a50d04ecb0fd8f28b21d69",
"sha256:974535648f31c2b712a6b2595969f8ab370834080e00ab24e5dbb9d19b8bfb74",
"sha256:99272d6b6a68c7ae4391908fc15f6b8c9a6c345a46b632d7fdb7ef6c883a2bbb",
"sha256:9b64a4cc825ec4df262050c17e18f60252cdd94742b4ba1286bcfe481f1c0f26",
"sha256:9e9624440d754733eddbcd4614378c18713d2d9d0dc647cf9c72f64e39671be5",
"sha256:9ff16d994309b26a1cdf666a6309c1ef51ad4f72f99d3392bcd7b7139577a1f2",
"sha256:b33ebcd0222c1d77e61dbcd04a9fd139359bded86803063d3d2d197b796c63ce",
"sha256:bba52d72e16a554d1894a0cc74041da50eea99a8483e591a9edf1025a66843ab",
"sha256:bed7986547ce54d230fd8721aba6fd19459cdc6d315497b98686d0416efaff4e",
"sha256:c7f58a0e0e13fb44623b65b01052dae8e820ed9b8b654bb6296bc9c41f571b70",
"sha256:d58a4fa7910102500722defbde6e2816b0372a4fcc85c7e239323767c74f5cbc",
"sha256:f1ac2dc65105a53c1c2d72b1d3e98c2464a133b4067a51a3d2477b28449709a0"
],
"version": "==2020.2.20"
},
"requests": {
"hashes": [
"sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
"sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
],
"index": "pypi",
"version": "==2.23.0"
},
"requirementslib": {
"hashes": [
"sha256:50731ac1052473e4c7df59a44a1f3aa20f32e687110bc05d73c3b4109eebc23d",
"sha256:8b594ab8b6280ee97cffd68fc766333345de150124d5b76061dd575c3a21fe5a"
],
"version": "==1.5.3"
},
"ruamel.yaml": {
"hashes": [
"sha256:0962fd7999e064c4865f96fb1e23079075f4a2a14849bcdc5cdba53a24f9759b",
"sha256:099c644a778bf72ffa00524f78dd0b6476bca94a1da344130f4bf3381ce5b954"
],
"index": "pypi",
"version": "==0.16.10"
},
"ruamel.yaml.clib": {
"hashes": [
"sha256:1e77424825caba5553bbade750cec2277ef130647d685c2b38f68bc03453bac6",
"sha256:392b7c371312abf27fb549ec2d5e0092f7ef6e6c9f767bfb13e83cb903aca0fd",
"sha256:4d55386129291b96483edcb93b381470f7cd69f97585829b048a3d758d31210a",
"sha256:550168c02d8de52ee58c3d8a8193d5a8a9491a5e7b2462d27ac5bf63717574c9",
"sha256:57933a6986a3036257ad7bf283529e7c19c2810ff24c86f4a0cfeb49d2099919",
"sha256:615b0396a7fad02d1f9a0dcf9f01202bf9caefee6265198f252c865f4227fcc6",
"sha256:77556a7aa190be9a2bd83b7ee075d3df5f3c5016d395613671487e79b082d784",
"sha256:7aee724e1ff424757b5bd8f6c5bbdb033a570b2b4683b17ace4dbe61a99a657b",
"sha256:8073c8b92b06b572e4057b583c3d01674ceaf32167801fe545a087d7a1e8bf52",
"sha256:9c6d040d0396c28d3eaaa6cb20152cb3b2f15adf35a0304f4f40a3cf9f1d2448",
"sha256:a0ff786d2a7dbe55f9544b3f6ebbcc495d7e730df92a08434604f6f470b899c5",
"sha256:b1b7fcee6aedcdc7e62c3a73f238b3d080c7ba6650cd808bce8d7761ec484070",
"sha256:b66832ea8077d9b3f6e311c4a53d06273db5dc2db6e8a908550f3c14d67e718c",
"sha256:be018933c2f4ee7de55e7bd7d0d801b3dfb09d21dad0cce8a97995fd3e44be30",
"sha256:d0d3ac228c9bbab08134b4004d748cf9f8743504875b3603b3afbb97e3472947",
"sha256:d10e9dd744cf85c219bf747c75194b624cc7a94f0c80ead624b06bfa9f61d3bc",
"sha256:ea4362548ee0cbc266949d8a441238d9ad3600ca9910c3fe4e82ee3a50706973",
"sha256:ed5b3698a2bb241b7f5cbbe277eaa7fe48b07a58784fba4f75224fd066d253ad",
"sha256:f9dcc1ae73f36e8059589b601e8e4776b9976effd76c21ad6a855a74318efd6e"
],
"markers": "platform_python_implementation == 'CPython' and python_version < '3.9'",
"version": "==0.2.0"
},
"six": {
"hashes": [
"sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
"sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
],
"version": "==1.14.0"
},
"smmap": {
"hashes": [
"sha256:171484fe62793e3626c8b05dd752eb2ca01854b0c55a1efc0dc4210fccb65446",
"sha256:5fead614cf2de17ee0707a8c6a5f2aa5a2fc6c698c70993ba42f515485ffda78"
],
"version": "==3.0.1"
},
"snowballstemmer": {
"hashes": [
"sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0",
"sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"
],
"version": "==2.0.0"
},
"stevedore": {
"hashes": [
"sha256:18afaf1d623af5950cc0f7e75e70f917784c73b652a34a12d90b309451b5500b",
"sha256:a4e7dc759fb0f2e3e2f7d8ffe2358c19d45b9b8297f393ef1256858d82f69c9b"
],
"version": "==1.32.0"
},
"testfixtures": {
"hashes": [
"sha256:799144b3cbef7b072452d9c36cbd024fef415ab42924b96aad49dfd9c763de66",
"sha256:cdfc3d73cb6d3d4dc3c67af84d912e86bf117d30ae25f02fe823382ef99383d2"
],
"version": "==6.14.0"
},
"toml": {
"hashes": [
"sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c",
"sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"
],
"version": "==0.10.0"
},
"tomlkit": {
"hashes": [
"sha256:4e1bd6c9197d984528f9ff0cc9db667c317d8881288db50db20eeeb0f6b0380b",
"sha256:f044eda25647882e5ef22b43a1688fb6ab12af2fc50e8456cdfc751c873101cf"
],
"version": "==0.5.11"
},
"typed-ast": {
"hashes": [
"sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355",
"sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919",
"sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa",
"sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652",
"sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75",
"sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01",
"sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d",
"sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1",
"sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907",
"sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c",
"sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3",
"sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b",
"sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614",
"sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb",
"sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b",
"sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41",
"sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6",
"sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34",
"sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe",
"sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4",
"sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"
],
"version": "==1.4.1"
},
"typing": {
"hashes": [
"sha256:91dfe6f3f706ee8cc32d38edbbf304e9b7583fb37108fef38229617f8b3eba23",
"sha256:c8cabb5ab8945cd2f54917be357d134db9cc1eb039e59d1606dc1e60cb1d9d36",
"sha256:f38d83c5a7a7086543a0f649564d661859c5146a85775ab90c0d2f93ffaa9714"
],
"version": "==3.7.4.1"
},
"urllib3": {
"hashes": [
"sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc",
"sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"
],
"version": "==1.25.8"
},
"vistir": {
"hashes": [
"sha256:33f8e905d40a77276b3d5310c8b57c1479a4e46930042b4894fcf7ed60ad76c4",
"sha256:e47afdec8baf35032a8d17116765f751ecd2f2146d47e5af457c5de1fe5a334e"
],
"version": "==0.5.0"
},
"wcwidth": {
"hashes": [
"sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603",
"sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8"
],
"version": "==0.1.8"
},
"websocket-client": {
"hashes": [
"sha256:0fc45c961324d79c781bab301359d5a1b00b13ad1b10415a4780229ef71a5549",
"sha256:d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010"
],
"index": "pypi",
"version": "==0.57.0"
},
"wheel": {
"hashes": [
"sha256:8788e9155fe14f54164c1b9eb0a319d98ef02c160725587ad60f14ddc57b6f96",
"sha256:df277cb51e61359aba502208d680f90c0493adec6f0e848af94948778aed386e"
],
"version": "==0.34.2"
},
"zipp": {
"hashes": [
"sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2",
"sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"
],
"version": "==3.0.0"
}
}
}

21
README.md Normal file
View File

@ -0,0 +1,21 @@
# docker-tidy
[![Build Status](https://img.shields.io/drone/build/xoxys/docker-tidy?logo=drone)](https://cloud.drone.io/xoxys/docker-tidy)
[![Docker Hub](https://img.shields.io/badge/docker-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/xoxys/docker-tidy)
[![Python Version](https://img.shields.io/pypi/pyversions/docker-tidy.svg)](https://pypi.org/project/docker-tidy/)
[![PyPi Status](https://img.shields.io/pypi/status/docker-tidy.svg)](https://pypi.org/project/docker-tidy/)
[![PyPi Release](https://img.shields.io/pypi/v/docker-tidy.svg)](https://pypi.org/project/docker-tidy/)
[![Codecov](https://img.shields.io/codecov/c/github/xoxys/docker-tidy)](https://codecov.io/gh/xoxys/docker-tidy)
[![License: MIT](https://img.shields.io/github/license/xoxys/docker-tidy)](LICENSE)
This is a fork of [Yelp/docker-custodian)](https://github.com/Yelp/docker-custodian). Keep docker hosts tidy.
You can find the full documentation at [https://docker-tidy.geekdocs.de](https://docker-tidy.geekdocs.de/).
## License
This project is licensed under the Apache License 2.0 - see the [LICENSE](LICENSE) file for details.
## Maintainers and Contributors
[Robert Kaussow](https://github.com/xoxys)

View File

@ -1,132 +0,0 @@
Docker Custodian
================
.. image:: https://travis-ci.org/Yelp/docker-custodian.svg
:target: https://travis-ci.org/Yelp/docker-custodian
Keep docker hosts tidy.
.. contents::
:backlinks: none
Install
-------
There are three installation options
Container
~~~~~~~~~
.. code::
docker pull yelp/docker-custodian
docker run -ti \
-v /var/run/docker.sock:/var/run/docker.sock \
yelp/docker-custodian dcgc --help
Debian/Ubuntu package
~~~~~~~~~~~~~~~~~~~~~
First build the package (requires `dh-virtualenv`)
.. code:: sh
dpkg-buildpackage -us -uc
Then install it
.. code:: sh
dpkg -i ../docker-custodian_*.deb
Source
~~~~~~
.. code:: sh
pip install git+https://github.com/Yelp/docker-custodian.git#egg=docker_custodian
dcgc
----
Remove old docker containers and docker images.
``dcgc`` will remove stopped containers and unused images that are older than
"max age". Running containers, and images which are used by a container are
never removed.
Maximum age can be specificied with any format supported by
`pytimeparse <https://github.com/wroberts/pytimeparse>`_.
Example:
.. code:: sh
dcgc --max-container-age 3days --max-image-age 30days
Prevent images from being removed
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``dcgc`` supports an image exclude list. If you have images that you'd like
to keep around forever you can use the exclude list to prevent them from
being removed.
::
--exclude-image
Never remove images with this tag. May be specified more than once.
--exclude-image-file
Path to a file which contains a list of images to exclude, one
image tag per line.
You also can use basic pattern matching to exclude images with generic tags.
.. code::
user/repositoryA:*
user/repositoryB:?.?
user/repositoryC-*:tag
Prevent containers and associated images from being removed
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``dcgc`` also supports a container exclude list based on labels. If there are
stopped containers that you'd like to keep, then you can check the labels to
prevent them from being removed.
::
--exclude-container-label
Never remove containers that have the label key=value. =value can be
omitted and in that case only the key is checked. May be specified
more than once.
You also can use basic pattern matching to exclude generic labels.
.. code::
foo*
com.docker.compose.project=test*
com.docker*=*bar*
dcstop
------
Stop containers that have been running for too long.
``dcstop`` will ``docker stop`` containers where the container name starts
with `--prefix` and it has been running for longer than `--max-run-time`.
Example:
.. code:: sh
dcstop --max-run-time 2days --prefix "projectprefix_"

7
bin/docker-tidy Executable file
View File

@ -0,0 +1,7 @@
#!/usr/bin/env python
import sys
import dockertidy.__main__
sys.exit(dockertidy.__main__.main())

8
debian/.gitignore vendored
View File

@ -1,8 +0,0 @@
*
!.gitignore
!changelog
!compat
!control
!copyright
!rules
!docker-custodian.links

99
debian/changelog vendored
View File

@ -1,99 +0,0 @@
docker-custodian (0.7.3) lucid; urgency=medium
* Fix handling containers with null labels
-- Matthew Mead-Briggs <mmb@yelp.com> Thu, 25 Apr 2019 03:43:55 -0700
docker-custodian (0.7.2) lucid; urgency=medium
* Fix debian links and release 0.7.2
-- Kyle Anderson <kwa@yelp.com> Wed, 21 Mar 2018 15:48:42 -0700
docker-custodian (0.7.1) lucid; urgency=medium
* Release 0.7.1
-- Kyle Anderson <kwa@yelp.com> Wed, 21 Mar 2018 15:26:16 -0700
docker-custodian (0.7.0) lucid; urgency=low
* Delete volumes along with containers
-- Paul O'Connor <poc@yelp.com> Wed, 05 Oct 2016 00:58:10 -0700
docker-custodian (0.6.1) lucid; urgency=low
* New release for pypi
-- kwa <kwa@kwa-MacBookPro> Wed, 31 Aug 2016 09:49:37 -0700
docker-custodian (0.6.0) lucid; urgency=low
* Remove python 2.6 support
* Remove argparse
-- Daniel Hoherd <hoherd@yelp.com> Fri, 24 Jun 2016 13:55:49 -0700
docker-custodian (0.5.3) lucid; urgency=low
* Update docker-py
-- Alex Dudko <adudko@yelp.com> Mon, 4 Apr 2016 09:44:26 -0800
docker-custodian (0.5.2) lucid; urgency=low
* Fixed bug where never started containers that are not old were getting removed
-- Semir Patel <spatel@yelp.com> Tue, 15 Dec 2015 09:44:26 -0800
docker-custodian (0.5.0) lucid; urgency=low
* Add option to exclude images from removal by dcgc
-- Daniel Nephin <dnephin@yelp.com> Tue, 21 Jul 2015 11:14:38 -0700
docker-custodian (0.4.0) lucid; urgency=low
* Renamed to docker-custodian
* Changed defaults of dcgc to not remove anything
-- Daniel Nephin <dnephin@yelp.com> Mon, 29 Jun 2015 18:48:22 -0700
docker-custodian (0.3.3) lucid; urgency=low
* Bug fixes for removing images by Id and with multiple tags
-- Daniel Nephin <dnephin@yelp.com> Thu, 04 Jun 2015 13:24:14 -0700
docker-custodian (0.3.2) lucid; urgency=low
* docker-custodian should now remove image names before trying to remove
by id, so that images tagged with more than one name are removed
correctly
-- Daniel Nephin <dnephin@yelp.com> Tue, 02 Jun 2015 13:26:56 -0700
docker-custodian (0.3.1) lucid; urgency=low
* Fix broken commands
-- Daniel Nephin <dnephin@yelp.com> Mon, 09 Mar 2015 17:58:03 -0700
docker-custodian (0.3.0) lucid; urgency=low
* Change age and time options to support pytimeparse formats
-- Daniel Nephin <dnephin@yelp.com> Fri, 06 Mar 2015 13:30:36 -0800
docker-custodian (0.2.0) lucid; urgency=low
* Add docker-autostop
-- Daniel Nephin <dnephin@yelp.com> Wed, 28 Jan 2015 15:37:40 -0800
docker-custodian (0.1.0) lucid; urgency=low
* Initial release
-- Daniel Nephin <dnephin@yelp.com> Thu, 02 Oct 2014 11:13:43 -0700

1
debian/compat vendored
View File

@ -1 +0,0 @@
7

9
debian/control vendored
View File

@ -1,9 +0,0 @@
Source: docker-custodian
Maintainer: Daniel Nephin <dnephin@yelp.com>
Build-Depends:
dh-virtualenv,
Depends: python2.7
Package: docker-custodian
Architecture: any
Description: Remove old Docker containers and images that are no longer in use

View File

@ -1,2 +0,0 @@
opt/venvs/docker-custodian/bin/dcgc usr/bin/dcgc
opt/venvs/docker-custodian/bin/dcstop usr/bin/dcstop

21
debian/rules vendored
View File

@ -1,21 +0,0 @@
#!/usr/bin/make -f
# -*- makefile -*-
export DH_OPTIONS
%:
dh $@ --with python-virtualenv
override_dh_virtualenv:
dh_virtualenv --python python2.7
# do not call `make clean` as part of packaging
override_dh_auto_clean:
true
# do not call `make` as part of packaging
override_dh_auto_build:
true
override_dh_auto_test:
true

View File

@ -1,4 +0,0 @@
# -*- coding: utf8 -*-
__version_info__ = (0, 7, 3)
__version__ = '%d.%d.%d' % __version_info__

View File

@ -1 +0,0 @@
# -*- coding: utf8 -*-

95
dockertidy/Cli.py Normal file
View File

@ -0,0 +1,95 @@
#!/usr/bin/env python3
"""Entrypoint and CLI handler."""
import argparse
import logging
import os
import sys
import dockertidy.Exception
from dockertidy.Config import SingleConfig
from dockertidy.Utils import SingleLog
from dockertidy.Utils import timedelta_type
from importlib_metadata import version, PackageNotFoundError
class DockerTidy:
def __init__(self):
self.log = SingleLog()
self.logger = self.log.logger
self.args = self._cli_args()
self.config = self._get_config()
def _cli_args(self):
"""
Use argparse for parsing CLI arguments.
:return: args objec
"""
parser = argparse.ArgumentParser(
description="Generate documentation from annotated Ansible roles using templates")
parser.add_argument("-v", dest="logging.level", action="append_const", const=-1,
help="increase log level")
parser.add_argument("-q", dest="logging.level", action="append_const",
const=1, help="decrease log level")
parser.add_argument("--version", action="version",
version=version(__name__))
subparsers = parser.add_subparsers(help="sub-command help")
parser_gc = subparsers.add_parser(
"gc", help="Run docker garbage collector.")
parser_gc.add_argument(
"--max-container-age",
type=timedelta_type,
help="Maximum age for a container. Containers older than this age "
"will be removed. Age can be specified in any pytimeparse "
"supported format.")
parser_gc.add_argument(
"--max-image-age",
type=timedelta_type,
help="Maxium age for an image. Images older than this age will be "
"removed. Age can be specified in any pytimeparse supported "
"format.")
parser_gc.add_argument(
"--dangling-volumes",
action="store_true",
help="Dangling volumes will be removed.")
parser_gc.add_argument(
"--dry-run", action="store_true",
help="Only log actions, don't remove anything.")
parser_gc.add_argument(
"-t", "--timeout", type=int, default=60,
help="HTTP timeout in seconds for making docker API calls.")
parser_gc.add_argument(
"--exclude-image",
action="append",
help="Never remove images with this tag.")
parser_gc.add_argument(
"--exclude-image-file",
type=argparse.FileType("r"),
help="Path to a file which contains a list of images to exclude, one "
"image tag per line.")
parser_gc.add_argument(
"--exclude-container-label",
action="append", type=str, default=[],
help="Never remove containers with this label key or label key=value")
return parser.parse_args().__dict__
def _get_config(self):
try:
config = SingleConfig(args=self.args)
except dockertidy.Exception.ConfigError as e:
self.log.sysexit_with_message(e)
try:
self.log.set_level(config.config["logging"]["level"])
except ValueError as e:
self.log.sysexit_with_message(
"Can not set log level.\n{}".format(str(e)))
self.logger.info("Using config file {}".format(config.config_file))
return config

316
dockertidy/Config.py Normal file
View File

@ -0,0 +1,316 @@
#!/usr/bin/env python3
"""Global settings definition."""
import logging
import os
import sys
import anyconfig
import environs
import jsonschema.exceptions
import ruamel.yaml
from appdirs import AppDirs
from jsonschema._utils import format_as_index
from pkg_resources import resource_filename
import dockertidy.Exception
from dockertidy.Utils import Singleton
config_dir = AppDirs("docker-tidy").user_config_dir
default_config_file = os.path.join(config_dir, "config.yml")
class Config():
"""
Create an object with all necessary settings.
Settings are loade from multiple locations in defined order (last wins):
- default settings defined by `self._get_defaults()`
- yaml config file, defaults to OS specific user config dir (https://pypi.org/project/appdirs/)
- provides cli parameters
"""
SETTINGS = {
"config_file": {
"default": "",
"env": "CONFIG_FILE",
"type": environs.Env().str
},
"role_dir": {
"default": "",
"env": "ROLE_DIR",
"type": environs.Env().str
},
"role_name": {
"default": "",
"env": "ROLE_NAME",
"type": environs.Env().str
},
"dry_run": {
"default": False,
"env": "DRY_RUN",
"file": True,
"type": environs.Env().bool
},
"logging.level": {
"default": "WARNING",
"env": "LOG_LEVEL",
"file": True,
"type": environs.Env().str
},
"logging.json": {
"default": False,
"env": "LOG_JSON",
"file": True,
"type": environs.Env().bool
},
"output_dir": {
"default": os.getcwd(),
"env": "OUTPUT_DIR",
"file": True,
"type": environs.Env().str
},
"template_dir": {
"default": os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates"),
"env": "TEMPLATE_DIR",
"file": True,
"type": environs.Env().str
},
"template": {
"default": "readme",
"env": "TEMPLATE",
"file": True,
"type": environs.Env().str
},
"force_overwrite": {
"default": False,
"env": "FORCE_OVERWRITE",
"file": True,
"type": environs.Env().bool
},
"custom_header": {
"default": "",
"env": "CUSTOM_HEADER",
"file": True,
"type": environs.Env().str
},
"exclude_files": {
"default": [],
"env": "EXCLUDE_FILES",
"file": True,
"type": environs.Env().list
},
}
ANNOTATIONS = {
"meta": {
"name": "meta",
"automatic": True,
"subtypes": []
},
"todo": {
"name": "todo",
"automatic": True,
"subtypes": []
},
"var": {
"name": "var",
"automatic": True,
"subtypes": [
"value",
"example",
"description"
]
},
"example": {
"name": "example",
"automatic": True,
"subtypes": []
},
"tag": {
"name": "tag",
"automatic": True,
"subtypes": []
},
}
def __init__(self, args={}):
"""
Initialize a new settings class.
:param args: An optional dict of options, arguments and commands from the CLI.
:param config_file: An optional path to a yaml config file.
:returns: None
"""
self._args = args
self._schema = None
self.config_file = default_config_file
self.role_dir = os.getcwd()
self.config = None
self._set_config()
self.is_role = self._set_is_role() or False
def _get_args(self, args):
cleaned = dict(filter(lambda item: item[1] is not None, args.items()))
normalized = {}
for key, value in cleaned.items():
normalized = self._add_dict_branch(normalized, key.split("."), value)
# Override correct log level from argparse
levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
log_level = levels.index(self.SETTINGS["logging.level"]["default"])
if normalized.get("logging"):
for adjustment in normalized["logging"]["level"]:
log_level = min(len(levels) - 1, max(log_level + adjustment, 0))
normalized["logging"]["level"] = levels[log_level]
return normalized
def _get_defaults(self):
normalized = {}
for key, item in self.SETTINGS.items():
normalized = self._add_dict_branch(normalized, key.split("."), item["default"])
# compute role_name default
normalized["role_name"] = os.path.basename(self.role_dir)
self.schema = anyconfig.gen_schema(normalized)
return normalized
def _get_envs(self):
normalized = {}
for key, item in self.SETTINGS.items():
if item.get("env"):
prefix = "ANSIBLE_DOCTOR_"
envname = prefix + item["env"]
try:
value = item["type"](envname)
normalized = self._add_dict_branch(normalized, key.split("."), value)
except environs.EnvError as e:
if '"{}" not set'.format(envname) in str(e):
pass
else:
raise dockertidy.Exception.ConfigError("Unable to read environment variable", str(e))
return normalized
def _set_config(self):
args = self._get_args(self._args)
envs = self._get_envs()
defaults = self._get_defaults()
# preset config file path
if envs.get("config_file"):
self.config_file = self._normalize_path(envs.get("config_file"))
if envs.get("role_dir"):
self.role_dir = self._normalize_path(envs.get("role_dir"))
if args.get("config_file"):
self.config_file = self._normalize_path(args.get("config_file"))
if args.get("role_dir"):
self.role_dir = self._normalize_path(args.get("role_dir"))
source_files = []
source_files.append(self.config_file)
source_files.append(os.path.join(os.getcwd(), ".dockertidy"))
source_files.append(os.path.join(os.getcwd(), ".dockertidy.yml"))
source_files.append(os.path.join(os.getcwd(), ".dockertidy.yaml"))
for config in source_files:
if config and os.path.exists(config):
with open(config, "r", encoding="utf8") as stream:
s = stream.read()
try:
file_dict = ruamel.yaml.safe_load(s)
except (ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError) as e:
message = "{} {}".format(e.context, e.problem)
raise dockertidy.Exception.ConfigError(
"Unable to read config file {}".format(config), message
)
if self._validate(file_dict):
anyconfig.merge(defaults, file_dict, ac_merge=anyconfig.MS_DICTS)
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
if self._validate(envs):
anyconfig.merge(defaults, envs, ac_merge=anyconfig.MS_DICTS)
if self._validate(args):
anyconfig.merge(defaults, args, ac_merge=anyconfig.MS_DICTS)
fix_files = ["output_dir", "template_dir", "custom_header"]
for file in fix_files:
if defaults[file] and defaults[file] != "":
defaults[file] = self._normalize_path(defaults[file])
if "config_file" in defaults:
defaults.pop("config_file")
if "role_dir" in defaults:
defaults.pop("role_dir")
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
self.config = defaults
def _normalize_path(self, path):
if not os.path.isabs(path):
base = os.path.join(os.getcwd(), path)
return os.path.abspath(os.path.expanduser(os.path.expandvars(base)))
else:
return path
def _set_is_role(self):
if os.path.isdir(os.path.join(self.role_dir, "tasks")):
return True
def _validate(self, config):
try:
anyconfig.validate(config, self.schema, ac_schema_safe=False)
except jsonschema.exceptions.ValidationError as e:
schema_error = "Failed validating '{validator}' in schema{schema}\n{message}".format(
validator=e.validator,
schema=format_as_index(list(e.relative_schema_path)[:-1]),
message=e.message
)
raise dockertidy.Exception.ConfigError("Configuration error", schema_error)
return True
def _add_dict_branch(self, tree, vector, value):
key = vector[0]
tree[key] = value \
if len(vector) == 1 \
else self._add_dict_branch(tree[key] if key in tree else {}, vector[1:], value)
return tree
def get_annotations_definition(self, automatic=True):
annotations = {}
if automatic:
for k, item in self.ANNOTATIONS.items():
if "automatic" in item.keys() and item["automatic"]:
annotations[k] = item
return annotations
def get_annotations_names(self, automatic=True):
annotations = []
if automatic:
for k, item in self.ANNOTATIONS.items():
if "automatic" in item.keys() and item["automatic"]:
annotations.append(k)
return annotations
def get_template(self):
"""
Get the base dir for the template to use.
:return: str abs path
"""
template_dir = self.config.get("template_dir")
template = self.config.get("template")
return os.path.realpath(os.path.join(template_dir, template))
class SingleConfig(Config, metaclass=Singleton):
pass

16
dockertidy/Exception.py Normal file
View File

@ -0,0 +1,16 @@
#!/usr/bin/env python3
"""Custom exceptions."""
class TidyError(Exception):
"""Generic exception class for docker-tidy."""
def __init__(self, msg, original_exception=""):
super(TidyError, self).__init__(msg + ("\n%s" % original_exception))
self.original_exception = original_exception
class ConfigError(TidyError):
"""Errors related to config file handling."""
pass

239
dockertidy/Utils.py Normal file
View File

@ -0,0 +1,239 @@
#!/usr/bin/env python3
"""Global utility methods and classes."""
import datetime
import logging
import os
import pprint
import sys
from distutils.util import strtobool
import colorama
from dateutil import tz
from pythonjsonlogger import jsonlogger
from pytimeparse import timeparse
import dockertidy.Exception
CONSOLE_FORMAT = "{}[%(levelname)s]{} %(message)s"
JSON_FORMAT = "(asctime) (levelname) (message)"
def to_bool(string):
return bool(strtobool(str(string)))
def timedelta_type(value):
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
:param value: a string containing a time format supported by
mod:`pytimeparse`
"""
if value is None:
return None
return _datetime_seconds_ago(timeparse.timeparse(value))
def _datetime_seconds_ago(seconds):
now = datetime.datetime.now(tz.tzutc())
return now - datetime.timedelta(seconds=seconds)
def _should_do_markup():
py_colors = os.environ.get("PY_COLORS", None)
if py_colors is not None:
return to_bool(py_colors)
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
colorama.init(autoreset=True, strip=not _should_do_markup())
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class LogFilter(object):
"""A custom log filter which excludes log messages above the logged level."""
def __init__(self, level):
"""
Initialize a new custom log filter.
:param level: Log level limit
:returns: None
"""
self.__level = level
def filter(self, logRecord): # noqa
# https://docs.python.org/3/library/logging.html#logrecord-attributes
return logRecord.levelno <= self.__level
class MultilineFormatter(logging.Formatter):
"""Logging Formatter to reset color after newline characters."""
def format(self, record): # noqa
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
return logging.Formatter.format(self, record)
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
"""Logging Formatter to remove newline characters."""
def format(self, record): # noqa
record.msg = record.msg.replace("\n", " ")
return jsonlogger.JsonFormatter.format(self, record)
class Log:
def __init__(self, level=logging.WARN, name="dockertidy", json=False):
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.logger.addHandler(self._get_error_handler(json=json))
self.logger.addHandler(self._get_warn_handler(json=json))
self.logger.addHandler(self._get_info_handler(json=json))
self.logger.addHandler(self._get_critical_handler(json=json))
self.logger.addHandler(self._get_debug_handler(json=json))
self.logger.propagate = False
def _get_error_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.ERROR)
handler.addFilter(LogFilter(logging.ERROR))
handler.setFormatter(MultilineFormatter(
self.error(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_warn_handler(self, json=False):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.WARN)
handler.addFilter(LogFilter(logging.WARN))
handler.setFormatter(MultilineFormatter(
self.warn(CONSOLE_FORMAT.format(colorama.Fore.YELLOW, colorama.Style.RESET_ALL))))
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_info_handler(self, json=False):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
handler.addFilter(LogFilter(logging.INFO))
handler.setFormatter(MultilineFormatter(
self.info(CONSOLE_FORMAT.format(colorama.Fore.CYAN, colorama.Style.RESET_ALL))))
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_critical_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.CRITICAL)
handler.addFilter(LogFilter(logging.CRITICAL))
handler.setFormatter(MultilineFormatter(
self.critical(CONSOLE_FORMAT.format(colorama.Fore.RED, colorama.Style.RESET_ALL))))
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def _get_debug_handler(self, json=False):
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.DEBUG)
handler.addFilter(LogFilter(logging.DEBUG))
handler.setFormatter(MultilineFormatter(
self.critical(CONSOLE_FORMAT.format(colorama.Fore.BLUE, colorama.Style.RESET_ALL))))
if json:
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
return handler
def set_level(self, s):
self.logger.setLevel(s)
def debug(self, msg):
"""Format info messages and return string."""
return msg
def critical(self, msg):
"""Format critical messages and return string."""
return msg
def error(self, msg):
"""Format error messages and return string."""
return msg
def warn(self, msg):
"""Format warn messages and return string."""
return msg
def info(self, msg):
"""Format info messages and return string."""
return msg
def _color_text(self, color, msg):
"""
Colorize strings.
:param color: colorama color settings
:param msg: string to colorize
:returns: string
"""
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
def sysexit(self, code=1):
sys.exit(code)
def sysexit_with_message(self, msg, code=1):
self.logger.critical(str(msg))
self.sysexit(code)
class SingleLog(Log, metaclass=Singleton):
pass
class FileUtils:
@staticmethod
def create_path(path):
os.makedirs(path, exist_ok=True)
@staticmethod
def query_yes_no(question, default=True):
"""Ask a yes/no question via input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
"""
if default:
prompt = "[Y/n]"
else:
prompt = "[N/y]"
try:
# input() is safe in python3
choice = input("{} {} ".format(question, prompt)) or default # nosec
return to_bool(choice)
except (KeyboardInterrupt, ValueError) as e:
raise dockertidy.Exception.InputError("Error while reading input", e)

8
dockertidy/__init__.py Normal file
View File

@ -0,0 +1,8 @@
"""Default package."""
__author__ = "Robert Kaussow"
__project__ = "docker-tidy"
__license__ = "Apache-2.0"
__maintainer__ = "Robert Kaussow"
__email__ = "mail@geeklabor.de"
__url__ = "https://github.com/xoxys/docker-tidy"

12
dockertidy/__main__.py Normal file
View File

@ -0,0 +1,12 @@
#!/usr/bin/env python3
"""Main program."""
from dockertidy.Cli import DockerTidy
def main():
DockerTidy()
if __name__ == "__main__":
main()

View File

@ -1,8 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python3
""" """Stop long running docker iamges."""
Stop docker container that have been running longer than the max_run_time and
match some prefix.
"""
import argparse import argparse
import logging import logging
import sys import sys
@ -11,39 +9,36 @@ import dateutil.parser
import docker import docker
import docker.errors import docker.errors
import requests.exceptions import requests.exceptions
from docker_custodian.args import timedelta_type
from docker.utils import kwargs_from_env from docker.utils import kwargs_from_env
from docker_custodian.args import timedelta_type
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def stop_containers(client, max_run_time, matcher, dry_run): def stop_containers(client, max_run_time, matcher, dry_run):
for container_summary in client.containers(): for container_summary in client.containers():
container = client.inspect_container(container_summary['Id']) container = client.inspect_container(container_summary["Id"])
name = container['Name'].lstrip('/') name = container["Name"].lstrip("/")
if ( if (
matcher(name) and matcher(name) and has_been_running_since(container, max_run_time)
has_been_running_since(container, max_run_time)
): ):
log.info("Stopping container %s %s: running since %s" % ( log.info("Stopping container %s %s: running since %s" % (
container['Id'][:16], container["Id"][:16],
name, name,
container['State']['StartedAt'])) container["State"]["StartedAt"]))
if not dry_run: if not dry_run:
stop_container(client, container['Id']) stop_container(client, container["Id"])
def stop_container(client, id): def stop_container(client, cid):
try: try:
client.stop(id) client.stop(cid)
except requests.exceptions.Timeout as e: except requests.exceptions.Timeout as e:
log.warn("Failed to stop container %s: %s" % (id, e)) log.warn("Failed to stop container %s: %s" % (cid, e))
except docker.errors.APIError as ae: except docker.errors.APIError as ae:
log.warn("Error stopping %s: %s" % (id, ae)) log.warn("Error stopping %s: %s" % (cid, ae))
def build_container_matcher(prefixes): def build_container_matcher(prefixes):
@ -53,7 +48,7 @@ def build_container_matcher(prefixes):
def has_been_running_since(container, min_time): def has_been_running_since(container, min_time):
started_at = container.get('State', {}).get('StartedAt') started_at = container.get("State", {}).get("StartedAt")
if not started_at: if not started_at:
return False return False
@ -67,7 +62,7 @@ def main():
stream=sys.stdout) stream=sys.stdout)
opts = get_opts() opts = get_opts()
client = docker.APIClient(version='auto', client = docker.APIClient(version="auto",
timeout=opts.timeout, timeout=opts.timeout,
**kwargs_from_env()) **kwargs_from_env())
@ -78,22 +73,22 @@ def main():
def get_opts(args=None): def get_opts(args=None):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
'--max-run-time', "--max-run-time",
type=timedelta_type, type=timedelta_type,
help="Maximum time a container is allows to run. Time may " help="Maximum time a container is allows to run. Time may "
"be specified in any pytimeparse supported format." "be specified in any pytimeparse supported format."
) )
parser.add_argument( parser.add_argument(
'--prefix', action="append", default=[], "--prefix", action="append", default=[],
help="Only stop containers which match one of the " help="Only stop containers which match one of the "
"prefix." "prefix."
) )
parser.add_argument( parser.add_argument(
'--dry-run', action="store_true", "--dry-run", action="store_true",
help="Only log actions, don't stop anything." help="Only log actions, don't stop anything."
) )
parser.add_argument( parser.add_argument(
'-t', '--timeout', type=int, default=60, "-t", "--timeout", type=int, default=60,
help="HTTP timeout in seconds for making docker API calls." help="HTTP timeout in seconds for making docker API calls."
) )
opts = parser.parse_args(args=args) opts = parser.parse_args(args=args)

View File

@ -1,21 +1,18 @@
#!/usr/bin/env python #!/usr/bin/env python3
""" """Remove unused docker containers and images."""
Remove old docker containers and images that are no longer in use.
"""
import argparse import argparse
import fnmatch import fnmatch
import logging import logging
import sys import sys
from collections import namedtuple
import dateutil.parser import dateutil.parser
import docker import docker
import docker.errors import docker.errors
import requests.exceptions import requests.exceptions
from collections import namedtuple
from docker_custodian.args import timedelta_type
from docker.utils import kwargs_from_env from docker.utils import kwargs_from_env
from docker_custodian.args import timedelta_type
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -23,7 +20,7 @@ log = logging.getLogger(__name__)
# This seems to be something docker uses for a null/zero date # This seems to be something docker uses for a null/zero date
YEAR_ZERO = "0001-01-01T00:00:00Z" YEAR_ZERO = "0001-01-01T00:00:00Z"
ExcludeLabel = namedtuple('ExcludeLabel', ['key', 'value']) ExcludeLabel = namedtuple("ExcludeLabel", ["key", "value"])
def cleanup_containers( def cleanup_containers(
@ -40,7 +37,7 @@ def cleanup_containers(
for container_summary in reversed(list(filtered_containers)): for container_summary in reversed(list(filtered_containers)):
container = api_call( container = api_call(
client.inspect_container, client.inspect_container,
container=container_summary['Id'], container=container_summary["Id"],
) )
if not container or not should_remove_container( if not container or not should_remove_container(
container, container,
@ -49,14 +46,14 @@ def cleanup_containers(
continue continue
log.info("Removing container %s %s %s" % ( log.info("Removing container %s %s %s" % (
container['Id'][:16], container["Id"][:16],
container.get('Name', '').lstrip('/'), container.get("Name", "").lstrip("/"),
container['State']['FinishedAt'])) container["State"]["FinishedAt"]))
if not dry_run: if not dry_run:
api_call( api_call(
client.remove_container, client.remove_container,
container=container['Id'], container=container["Id"],
v=True, v=True,
) )
@ -76,22 +73,22 @@ def filter_excluded_containers(containers, exclude_container_labels):
def should_exclude_container_with_labels(container, exclude_container_labels): def should_exclude_container_with_labels(container, exclude_container_labels):
if container['Labels']: if container["Labels"]:
for exclude_label in exclude_container_labels: for exclude_label in exclude_container_labels:
if exclude_label.value: if exclude_label.value:
matching_keys = fnmatch.filter( matching_keys = fnmatch.filter(
container['Labels'].keys(), container["Labels"].keys(),
exclude_label.key, exclude_label.key,
) )
label_values_to_check = [ label_values_to_check = [
container['Labels'][matching_key] container["Labels"][matching_key]
for matching_key in matching_keys for matching_key in matching_keys
] ]
if fnmatch.filter(label_values_to_check, exclude_label.value): if fnmatch.filter(label_values_to_check, exclude_label.value):
return True return True
else: else:
if fnmatch.filter( if fnmatch.filter(
container['Labels'].keys(), container["Labels"].keys(),
exclude_label.key exclude_label.key
): ):
return True return True
@ -99,20 +96,20 @@ def should_exclude_container_with_labels(container, exclude_container_labels):
def should_remove_container(container, min_date): def should_remove_container(container, min_date):
state = container.get('State', {}) state = container.get("State", {})
if state.get('Running'): if state.get("Running"):
return False return False
if state.get('Ghost'): if state.get("Ghost"):
return True return True
# Container was created, but never started # Container was created, but never started
if state.get('FinishedAt') == YEAR_ZERO: if state.get("FinishedAt") == YEAR_ZERO:
created_date = dateutil.parser.parse(container['Created']) created_date = dateutil.parser.parse(container["Created"])
return created_date < min_date return created_date < min_date
finished_date = dateutil.parser.parse(state['FinishedAt']) finished_date = dateutil.parser.parse(state["FinishedAt"])
return finished_date < min_date return finished_date < min_date
@ -132,22 +129,22 @@ def get_all_images(client):
def get_dangling_volumes(client): def get_dangling_volumes(client):
log.info("Getting dangling volumes") log.info("Getting dangling volumes")
volumes = client.volumes({'dangling': True})['Volumes'] or [] volumes = client.volumes({"dangling": True})["Volumes"] or []
log.info("Found %s dangling volumes", len(volumes)) log.info("Found %s dangling volumes", len(volumes))
return volumes return volumes
def cleanup_images(client, max_image_age, dry_run, exclude_set): def cleanup_images(client, max_image_age, dry_run, exclude_set):
# re-fetch container list so that we don't include removed containers # re-fetch container list so that we don't include removed containers
containers = get_all_containers(client) containers = get_all_containers(client)
images = get_all_images(client) images = get_all_images(client)
if docker.utils.compare_version('1.21', client._version) < 0: if docker.utils.compare_version("1.21", client._version) < 0:
image_tags_in_use = {container['Image'] for container in containers} image_tags_in_use = {container["Image"] for container in containers}
images = filter_images_in_use(images, image_tags_in_use) images = filter_images_in_use(images, image_tags_in_use)
else: else:
# ImageID field was added in 1.21 # ImageID field was added in 1.21
image_ids_in_use = {container['ImageID'] for container in containers} image_ids_in_use = {container["ImageID"] for container in containers}
images = filter_images_in_use_by_id(images, image_ids_in_use) images = filter_images_in_use_by_id(images, image_ids_in_use)
images = filter_excluded_images(images, exclude_set) images = filter_excluded_images(images, exclude_set)
@ -157,7 +154,7 @@ def cleanup_images(client, max_image_age, dry_run, exclude_set):
def filter_excluded_images(images, exclude_set): def filter_excluded_images(images, exclude_set):
def include_image(image_summary): def include_image(image_summary):
image_tags = image_summary.get('RepoTags') image_tags = image_summary.get("RepoTags")
if no_image_tags(image_tags): if no_image_tags(image_tags):
return True return True
for exclude_pattern in exclude_set: for exclude_pattern in exclude_set:
@ -170,10 +167,10 @@ def filter_excluded_images(images, exclude_set):
def filter_images_in_use(images, image_tags_in_use): def filter_images_in_use(images, image_tags_in_use):
def get_tag_set(image_summary): def get_tag_set(image_summary):
image_tags = image_summary.get('RepoTags') image_tags = image_summary.get("RepoTags")
if no_image_tags(image_tags): if no_image_tags(image_tags):
# The repr of the image Id used by client.containers() # The repr of the image Id used by client.containers()
return set(['%s:latest' % image_summary['Id'][:12]]) return set(["%s:latest" % image_summary["Id"][:12]])
return set(image_tags) return set(image_tags)
def image_not_in_use(image_summary): def image_not_in_use(image_summary):
@ -184,21 +181,21 @@ def filter_images_in_use(images, image_tags_in_use):
def filter_images_in_use_by_id(images, image_ids_in_use): def filter_images_in_use_by_id(images, image_ids_in_use):
def image_not_in_use(image_summary): def image_not_in_use(image_summary):
return image_summary['Id'] not in image_ids_in_use return image_summary["Id"] not in image_ids_in_use
return filter(image_not_in_use, images) return filter(image_not_in_use, images)
def is_image_old(image, min_date): def is_image_old(image, min_date):
return dateutil.parser.parse(image['Created']) < min_date return dateutil.parser.parse(image["Created"]) < min_date
def no_image_tags(image_tags): def no_image_tags(image_tags):
return not image_tags or image_tags == ['<none>:<none>'] return not image_tags or image_tags == ["<none>:<none>"]
def remove_image(client, image_summary, min_date, dry_run): def remove_image(client, image_summary, min_date, dry_run):
image = api_call(client.inspect_image, image=image_summary['Id']) image = api_call(client.inspect_image, image=image_summary["Id"])
if not image or not is_image_old(image, min_date): if not image or not is_image_old(image, min_date):
return return
@ -206,13 +203,13 @@ def remove_image(client, image_summary, min_date, dry_run):
if dry_run: if dry_run:
return return
image_tags = image_summary.get('RepoTags') image_tags = image_summary.get("RepoTags")
# If there are no tags, remove the id # If there are no tags, remove the id
if no_image_tags(image_tags): if no_image_tags(image_tags):
api_call(client.remove_image, image=image_summary['Id']) api_call(client.remove_image, image=image_summary["Id"])
return return
# Remove any repository tags so we don't hit 409 Conflict # Remove any repository tags so we don't hit 409 Conflict
for image_tag in image_tags: for image_tag in image_tags:
api_call(client.remove_image, image=image_tag) api_call(client.remove_image, image=image_tag)
@ -221,18 +218,18 @@ def remove_volume(client, volume, dry_run):
if not volume: if not volume:
return return
log.info("Removing volume %s" % volume['Name']) log.info("Removing volume %s" % volume["Name"])
if dry_run: if dry_run:
return return
api_call(client.remove_volume, name=volume['Name']) api_call(client.remove_volume, name=volume["Name"])
def cleanup_volumes(client, dry_run): def cleanup_volumes(client, dry_run):
dangling_volumes = get_dangling_volumes(client) dangling_volumes = get_dangling_volumes(client)
for volume in reversed(dangling_volumes): for volume in reversed(dangling_volumes):
log.info("Removing dangling volume %s", volume['Name']) log.info("Removing dangling volume %s", volume["Name"])
remove_volume(client, volume, dry_run) remove_volume(client, volume, dry_run)
@ -240,31 +237,31 @@ def api_call(func, **kwargs):
try: try:
return func(**kwargs) return func(**kwargs)
except requests.exceptions.Timeout as e: except requests.exceptions.Timeout as e:
params = ','.join('%s=%s' % item for item in kwargs.items()) params = ",".join("%s=%s" % item for item in kwargs.items())
log.warn("Failed to call %s %s %s" % (func.__name__, params, e)) log.warn("Failed to call %s %s %s" % (func.__name__, params, e))
except docker.errors.APIError as ae: except docker.errors.APIError as ae:
params = ','.join('%s=%s' % item for item in kwargs.items()) params = ",".join("%s=%s" % item for item in kwargs.items())
log.warn("Error calling %s %s %s" % (func.__name__, params, ae)) log.warn("Error calling %s %s %s" % (func.__name__, params, ae))
def format_image(image, image_summary): def format_image(image, image_summary):
def get_tags(): def get_tags():
tags = image_summary.get('RepoTags') tags = image_summary.get("RepoTags")
if not tags or tags == ['<none>:<none>']: if not tags or tags == ["<none>:<none>"]:
return '' return ""
return ', '.join(tags) return ", ".join(tags)
return "%s %s" % (image['Id'][:16], get_tags()) return "%s %s" % (image["Id"][:16], get_tags())
def build_exclude_set(image_tags, exclude_file): def build_exclude_set(image_tags, exclude_file):
exclude_set = set(image_tags or []) exclude_set = set(image_tags or [])
def is_image_tag(line): def is_image_tag(line):
return line and not line.startswith('#') return line and not line.startswith("#")
if exclude_file: if exclude_file:
lines = [line.strip() for line in exclude_file.read().split('\n')] lines = [line.strip() for line in exclude_file.read().split("\n")]
exclude_set.update(filter(is_image_tag, lines)) exclude_set.update(filter(is_image_tag, lines))
return exclude_set return exclude_set
@ -272,7 +269,7 @@ def build_exclude_set(image_tags, exclude_file):
def format_exclude_labels(exclude_label_args): def format_exclude_labels(exclude_label_args):
exclude_labels = [] exclude_labels = []
for exclude_label_arg in exclude_label_args: for exclude_label_arg in exclude_label_args:
split_exclude_label = exclude_label_arg.split('=', 1) split_exclude_label = exclude_label_arg.split("=", 1)
exclude_label_key = split_exclude_label[0] exclude_label_key = split_exclude_label[0]
if len(split_exclude_label) == 2: if len(split_exclude_label) == 2:
exclude_label_value = split_exclude_label[1] exclude_label_value = split_exclude_label[1]
@ -294,7 +291,7 @@ def main():
stream=sys.stdout) stream=sys.stdout)
args = get_args() args = get_args()
client = docker.APIClient(version='auto', client = docker.APIClient(version="auto",
timeout=args.timeout, timeout=args.timeout,
**kwargs_from_env()) **kwargs_from_env())
@ -323,39 +320,39 @@ def main():
def get_args(args=None): def get_args(args=None):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
'--max-container-age', "--max-container-age",
type=timedelta_type, type=timedelta_type,
help="Maximum age for a container. Containers older than this age " help="Maximum age for a container. Containers older than this age "
"will be removed. Age can be specified in any pytimeparse " "will be removed. Age can be specified in any pytimeparse "
"supported format.") "supported format.")
parser.add_argument( parser.add_argument(
'--max-image-age', "--max-image-age",
type=timedelta_type, type=timedelta_type,
help="Maxium age for an image. Images older than this age will be " help="Maxium age for an image. Images older than this age will be "
"removed. Age can be specified in any pytimeparse supported " "removed. Age can be specified in any pytimeparse supported "
"format.") "format.")
parser.add_argument( parser.add_argument(
'--dangling-volumes', "--dangling-volumes",
action="store_true", action="store_true",
help="Dangling volumes will be removed.") help="Dangling volumes will be removed.")
parser.add_argument( parser.add_argument(
'--dry-run', action="store_true", "--dry-run", action="store_true",
help="Only log actions, don't remove anything.") help="Only log actions, don't remove anything.")
parser.add_argument( parser.add_argument(
'-t', '--timeout', type=int, default=60, "-t", "--timeout", type=int, default=60,
help="HTTP timeout in seconds for making docker API calls.") help="HTTP timeout in seconds for making docker API calls.")
parser.add_argument( parser.add_argument(
'--exclude-image', "--exclude-image",
action='append', action="append",
help="Never remove images with this tag.") help="Never remove images with this tag.")
parser.add_argument( parser.add_argument(
'--exclude-image-file', "--exclude-image-file",
type=argparse.FileType('r'), type=argparse.FileType("r"),
help="Path to a file which contains a list of images to exclude, one " help="Path to a file which contains a list of images to exclude, one "
"image tag per line.") "image tag per line.")
parser.add_argument( parser.add_argument(
'--exclude-container-label', "--exclude-container-label",
action='append', type=str, default=[], action="append", type=str, default=[],
help="Never remove containers with this label key or label key=value") help="Never remove containers with this label key or label key=value")
return parser.parse_args(args=args) return parser.parse_args(args=args)

View File

@ -1,12 +1,14 @@
import datetime import datetime
from dateutil import tz
from docker_custodian import args
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock import mock
from dateutil import tz
from docker_custodian import args
def test_datetime_seconds_ago(now): def test_datetime_seconds_ago(now):

View File

@ -1,12 +1,13 @@
import datetime import datetime
from dateutil import tz
import docker import docker
import pytest
from dateutil import tz
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock import mock
import pytest
@pytest.fixture @pytest.fixture

View File

@ -3,14 +3,12 @@ try:
except ImportError: except ImportError:
import mock import mock
from docker_custodian.docker_autostop import ( from docker_custodian.docker_autostop import build_container_matcher
build_container_matcher, from docker_custodian.docker_autostop import get_opts
get_opts, from docker_custodian.docker_autostop import has_been_running_since
has_been_running_since, from docker_custodian.docker_autostop import main
main, from docker_custodian.docker_autostop import stop_container
stop_container, from docker_custodian.docker_autostop import stop_containers
stop_containers,
)
def test_stop_containers(mock_client, container, now): def test_stop_containers(mock_client, container, now):

View File

@ -1,14 +1,15 @@
from six import StringIO
import textwrap import textwrap
import docker.errors import docker.errors
import requests.exceptions
from docker_custodian import docker_gc
from io import StringIO
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock import mock
import requests.exceptions
from docker_custodian import docker_gc
class TestShouldRemoveContainer(object): class TestShouldRemoveContainer(object):

View File

@ -1,14 +0,0 @@
backports.ssl-match-hostname==3.5.0.1
certifi==2018.1.18
chardet==3.0.4
docker==3.1.0
docker-pycreds==0.2.2
future==0.16.0
idna==2.6
ipaddress==1.0.19
python-dateutil==2.6.1
pytimeparse==1.1.7
requests==2.20.0
six==1.11.0
urllib3==1.24.2
websocket-client==0.47.0

20
setup.cfg Normal file
View File

@ -0,0 +1,20 @@
[metadata]
description-file = README.md
license_file = LICENSE
[bdist_wheel]
universal = 1
[isort]
default_section = THIRDPARTY
known_first_party = dockertidy
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
force_single_line = true
line_length = 110
skip_glob = **/env/*,**/docs/*
[tool:pytest]
filterwarnings =
ignore::FutureWarning
ignore:.*collections.*:DeprecationWarning
ignore:.*pep8.*:FutureWarning

116
setup.py
View File

@ -1,27 +1,99 @@
# -*- coding: utf-8 -*- #!/usr/bin/env python
from setuptools import setup, find_packages """Setup script for the package."""
from docker_custodian.__about__ import __version__
import io
import os
import re
from setuptools import find_packages
from setuptools import setup
PACKAGE_NAME = "dockertidy"
def get_property(prop, project):
current_dir = os.path.dirname(os.path.realpath(__file__))
result = re.search(
r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop),
open(os.path.join(current_dir, project, "__init__.py")).read(),
)
return result.group(1)
def get_readme(filename="README.md"):
this = os.path.abspath(os.path.dirname(__file__))
with io.open(os.path.join(this, filename), encoding="utf-8") as f:
long_description = f.read()
return long_description
setup( setup(
name='docker_custodian', name=get_property("__project__", PACKAGE_NAME),
version=__version__, use_scm_version={
provides=['docker_custodian'], "version_scheme": "python-simplified-semver",
author='Daniel Nephin', "local_scheme": "no-local-version",
author_email='dnephin@yelp.com', "fallback_version": "unknown",
description='Keep docker hosts tidy.',
packages=find_packages(exclude=['tests*']),
include_package_data=True,
install_requires=[
'python-dateutil',
'docker',
'pytimeparse',
],
license="Apache License 2.0",
entry_points={
'console_scripts': [
'dcstop = docker_custodian.docker_autostop:main',
'dcgc = docker_custodian.docker_gc:main',
],
}, },
description="Keep docker hosts tidy",
keywords="docker gc prune garbage",
author=get_property("__author__", PACKAGE_NAME),
author_email=get_property("__email__", PACKAGE_NAME),
url="https://github.com/xoxys/docker-tidy",
license=get_property("__url__", PACKAGE_NAME),
long_description=get_readme(),
long_description_content_type="text/markdown",
packages=find_packages(exclude=["*.tests", "tests", "tests.*"]),
include_package_data=True,
zip_safe=False,
python_requires=">=3.5",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
"Natural Language :: English",
"Operating System :: POSIX",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Systems Administration",
"Topic :: Utilities",
"Topic :: Software Development",
"Topic :: Software Development :: Documentation",
],
install_requires=[
"anyconfig==0.9.10",
"appdirs==1.4.3",
"attrs==19.3.0",
"certifi==2019.11.28",
"chardet==3.0.4",
"colorama==0.4.3",
"docker==4.2.0",
"docker-pycreds==0.4.0",
"environs==7.2.0",
"idna==2.9",
"importlib-metadata==1.5.0; python_version < '3.8'",
"ipaddress==1.0.23",
"jsonschema==3.2.0",
"marshmallow==3.5.0",
"nested-lookup==0.2.21",
"pathspec==0.7.0",
"pyrsistent==0.15.7",
"python-dateutil==2.8.1",
"python-dotenv==0.12.0",
"python-json-logger==0.1.11",
"pytimeparse==1.1.8",
"requests==2.23.0",
"ruamel.yaml==0.16.10",
"ruamel.yaml.clib==0.2.0; platform_python_implementation == 'CPython' and python_version < '3.9'",
"six==1.14.0",
"urllib3==1.25.8",
"websocket-client==0.57.0",
"zipp==3.0.0",
],
dependency_links=[],
setup_requires=["setuptools_scm",],
entry_points={"console_scripts": ["docker-tidy = dockertidy.__main__:main"]},
test_suite="tests",
) )

14
tox.ini
View File

@ -1,14 +0,0 @@
[tox]
envlist = py27,py35,py36
[testenv]
deps =
-rrequirements.txt
mock
pre-commit
pytest
commands =
py.test {posargs:tests}
pre-commit autoupdate
pre-commit install -f --install-hooks
pre-commit run --all-files