Move package to src/ to properly separate what goes into docker image

This commit is contained in:
Einar Forselv
2019-12-05 10:26:46 +01:00
parent 0af9f2e8ee
commit 130be30268
19 changed files with 2 additions and 2 deletions

19
src/.dockerignore Normal file
View File

@@ -0,0 +1,19 @@
.venv/
.vscode/
extras/
restic_cache/
restic_data/
tests/
.gitignore
*.env
*.log
docker-compose.yaml
*.ini
*.egg-info
__pycache__
.DS_Store
.git
.pytest_cache
.dockerignore
build/
docs/

11
src/Dockerfile Normal file
View File

@@ -0,0 +1,11 @@
FROM restic/restic:0.9.6
RUN apk update && apk add python3 dcron mariadb-client postgresql-client
ADD . /restic-compose-backup
WORKDIR /restic-compose-backup
RUN pip3 install -U pip setuptools && pip3 install -e .
ENV XDG_CACHE_HOME=/cache
ENTRYPOINT []
CMD ["./entrypoint.sh"]

1
src/crontab Normal file
View File

@@ -0,0 +1 @@
0 2 * * * source /env.sh && rcb backup > /proc/1/fd/1

8
src/entrypoint.sh Executable file
View File

@@ -0,0 +1,8 @@
#!/bin/sh
# Dump all env vars so we can source them in cron jobs
printenv | sed 's/^\(.*\)$/export \1/g' > /env.sh
# start cron in the foreground
crontab crontab
crond -f

View File

@@ -0,0 +1,44 @@
import logging
from restic_compose_backup.alerts.smtp import SMTPAlert
from restic_compose_backup.alerts.discord import DiscordWebhookAlert
from restic_compose_backup.config import Config
logger = logging.getLogger(__name__)
ALERT_INFO = 'INFO',
ALERT_ERROR = 'ERROR'
ALERT_TYPES = [ALERT_INFO, ALERT_ERROR]
BACKENDS = [SMTPAlert, DiscordWebhookAlert]
def send(subject: str = None, body: str = None, alert_type: str = 'INFO'):
"""Send alert to all configured backends"""
alert_classes = configured_alert_types()
for instance in alert_classes:
logger.info('Configured: %s', instance.name)
try:
instance.send(
subject=f'[{alert_type}] {subject}',
body=body,
)
except Exception as ex:
logger.error("Exception raised when sending alert [%s]: %s", instance.name, ex)
logger.exception(ex)
if len(alert_classes) == 0:
logger.info("No alerts configured")
def configured_alert_types():
"""Returns a list of configured alert class instances"""
logger.debug('Getting alert backends')
entires = []
for cls in BACKENDS:
instance = cls.create_from_env()
logger.debug("Alert backend '%s' configured: %s", cls.name, instance != None)
if instance:
entires.append(instance)
return entires

View File

@@ -0,0 +1,14 @@
class BaseAlert:
name = None
def create_from_env(self):
return None
@property
def properly_configured(self) -> bool:
return False
def send(self, subject: str = None, body: str = None, alert_type: str = None):
pass

View File

@@ -0,0 +1,46 @@
import os
import logging
from urllib.parse import urlparse
import requests
from restic_compose_backup.alerts.base import BaseAlert
logger = logging.getLogger(__name__)
class DiscordWebhookAlert(BaseAlert):
name = 'discord_webhook'
success_codes = [200]
def __init__(self, webhook_url):
self.url = webhook_url
@classmethod
def create_from_env(cls):
instance = cls(os.environ.get('DISCORD_WEBHOOK'))
if instance.properly_configured:
return instance
return None
@property
def properly_configured(self) -> bool:
return isinstance(self.url, str) and self.url.startswith("https://")
def send(self, subject: str = None, body: str = None, alert_type: str = None):
"""Send basic webhook request. Max embed size is 6000"""
logger.info("Triggering discord webhook")
data = {
'embeds': [
{
'title': subject,
'description': body[:5000],
},
]
}
response = requests.post(self.url, params={'wait': True}, json=data)
if response.status_code not in self.success_codes:
log.error("Discord webhook failed: %s: %s", response.status_code, response.content)
else:
logger.info('Discord webhook successful')

View File

@@ -0,0 +1,56 @@
import os
import smtplib
import logging
from email.mime.text import MIMEText
from restic_compose_backup.alerts.base import BaseAlert
logger = logging.getLogger(__name__)
class SMTPAlert(BaseAlert):
name = 'smtp'
def __init__(self, host, port, user, password, to):
self.host = host
self.port = port
self.user = user
self.password = password
self.to = to
@classmethod
def create_from_env(cls):
instance = cls(
os.environ.get('EMAIL_HOST'),
os.environ.get('EMAIL_PORT'),
os.environ.get('EMAIL_HOST_USER'),
os.environ.get('EMAIL_HOST_PASSWORD'),
(os.environ.get('EMAIL_SEND_TO') or "").split(','),
)
if instance.properly_configured:
return instance
return None
@property
def properly_configured(self) -> bool:
return self.host and self.port and self.user and self.password and len(self.to) > 0
def send(self, subject: str = None, body: str = None, alert_type: str = 'INFO'):
# send_mail("Hello world!")
msg = MIMEText(body)
msg['Subject'] = f"[{alert_type}] {subject}"
msg['From'] = self.user
msg['To'] = ', '.join(self.to)
try:
logger.info("Connecting to %s port %s", self.host, self.port)
server = smtplib.SMTP_SSL(self.host, self.port)
server.ehlo()
server.login(self.user, self.password)
server.sendmail(self.user, self.to, msg.as_string())
logger.info('Email sent')
except Exception as ex:
logger.exception(ex)
finally:
server.close()

View File

@@ -0,0 +1,59 @@
import logging
import os
import docker
from restic_compose_backup.config import Config
logger = logging.getLogger(__name__)
def run(image: str = None, command: str = None, volumes: dict = None,
environment: dict = None, labels: dict = None, source_container_id: str = None):
logger.info("Starting backup container")
config = Config()
client = docker.DockerClient(base_url=config.docker_base_url)
container = client.containers.run(
image,
command,
labels=labels,
# auto_remove=True, # We remove the container further down
detach=True,
environment=environment,
volumes=volumes,
network_mode=f'container:{source_container_id}', # Reuse original container's network stack.
working_dir=os.getcwd(),
tty=True,
)
logger.info("Backup process container: %s", container.name)
log_generator = container.logs(stdout=True, stderr=True, stream=True, follow=True)
def readlines(stream):
"""Read stream line by line"""
while True:
line = ""
while True:
try:
line += next(stream).decode()
if line.endswith('\n'):
break
except StopIteration:
break
if line:
yield line.rstrip()
else:
break
with open('backup.log', 'w') as fd:
for line in readlines(log_generator):
fd.write(line)
fd.write('\n')
logger.info(line)
container.reload()
logger.debug("Container ExitCode %s", container.attrs['State']['ExitCode'])
container.stop()
container.remove()
return container.attrs['State']['ExitCode']

View File

@@ -0,0 +1,220 @@
import argparse
import pprint
import logging
from restic_compose_backup import (
alerts,
backup_runner,
log,
restic,
)
from restic_compose_backup.config import Config
from restic_compose_backup.containers import RunningContainers
logger = logging.getLogger(__name__)
def main():
"""CLI entrypoint"""
args = parse_args()
config = Config()
log.setup(level=args.log_level or config.log_level)
containers = RunningContainers()
# Ensure log level is propagated to parent container if overridden
if args.log_level:
containers.this_container.set_config_env('LOG_LEVEL', args.log_level)
if args.action == 'status':
status(config, containers)
elif args.action == 'snapshots':
snapshots(config, containers)
elif args.action == 'backup':
backup(config, containers)
elif args.action == 'start-backup-process':
start_backup_process(config, containers)
elif args.action == 'cleanup':
cleanup(config, containers)
elif args.action == 'alert':
alert(config, containers)
def status(config, containers):
"""Outputs the backup config for the compose setup"""
logger.info("Status for compose project '%s'", containers.project_name)
logger.info("Backup currently running?: %s", containers.backup_process_running)
logger.info("%s Detected Config %s", "-" * 25, "-" * 25)
backup_containers = containers.containers_for_backup()
for container in backup_containers:
logger.info('service: %s', container.service_name)
if container.volume_backup_enabled:
for mount in container.filter_mounts():
logger.info(' - volume: %s', mount.source)
if container.database_backup_enabled:
instance = container.instance
ping = instance.ping()
logger.info(' - %s (is_ready=%s)', instance.container_type, ping == 0)
if ping != 0:
logger.error("Database '%s' in service %s cannot be reached", instance.container_type, container.service_name)
if len(backup_containers) == 0:
logger.info("No containers in the project has 'restic-compose-backup.enabled' label")
logger.info("-" * 67)
def backup(config, containers):
"""Request a backup to start"""
# Make sure we don't spawn multiple backup processes
if containers.backup_process_running:
raise ValueError("Backup process already running")
logger.info("Initializing repository (may fail if already initalized)")
# TODO: Errors when repo already exists
restic.init_repo(config.repository)
# Map all volumes from the backup container into the backup process container
volumes = containers.this_container.volumes
# Map volumes from other containers we are backing up
mounts = containers.generate_backup_mounts('/volumes')
volumes.update(mounts)
try:
result = backup_runner.run(
image=containers.this_container.image,
command='restic-compose-backup start-backup-process',
volumes=volumes,
environment=containers.this_container.environment,
source_container_id=containers.this_container.id,
labels={
"restic-compose-backup.backup_process": 'True',
"com.docker.compose.project": containers.project_name,
},
)
except Exception as ex:
logger.exception(ex)
alerts.send(
subject="Exception during backup",
body=str(ex),
alert_type='ERROR',
)
return
logger.info('Backup container exit code: %s', result)
# Alert the user if something went wrong
if result != 0:
alerts.send(
subject="Backup process exited with non-zero code",
body=open('backup.log').read(),
alert_type='ERROR',
)
def start_backup_process(config, containers):
"""The actual backup process running inside the spawned container"""
if (not containers.backup_process_container
or containers.this_container == containers.backup_process_container is False):
logger.error(
"Cannot run backup process in this container. Use backup command instead. "
"This will spawn a new container with the necessary mounts."
)
return
status(config, containers)
errors = False
# Back up volumes
try:
logger.info('Backing up volumes')
vol_result = restic.backup_files(config.repository, source='/volumes')
logger.debug('Volume backup exit code: %s', vol_result)
if vol_result != 0:
logger.error('Backup command exited with non-zero code: %s', vol_result)
errors = True
except Exception as ex:
logger.exception(ex)
errors = True
# back up databases
for container in containers.containers_for_backup():
if container.database_backup_enabled:
try:
instance = container.instance
logger.info('Backing up %s in service %s', instance.container_type, instance.service_name)
result = instance.backup()
logger.debug('Exit code: %s', result)
if result != 0:
logger.error('Backup command exited with non-zero code: %s', result)
errors = True
except Exception as ex:
logger.exception(ex)
errors = True
if errors:
exit(1)
# Only run cleanup if backup was successful
result = cleanup(config, container)
logger.debug('cleanup exit code: %s', errors)
if result != 0:
exit(1)
def cleanup(config, containers):
"""Run forget / prune to minimize storage space"""
logger.info('Forget outdated snapshots')
forget_result = restic.forget(
config.repository,
config.keep_daily,
config.keep_weekly,
config.keep_monthly,
config.keep_yearly,
)
logger.info('Prune stale data freeing storage space')
prune_result = restic.prune(config.repository)
return forget_result == 0 and prune_result == 0
def snapshots(config, containers):
"""Display restic snapshots"""
stdout, stderr = restic.snapshots(config.repository, last=True)
for line in stdout.decode().split('\n'):
print(line)
def alert(config, containers):
"""Test alerts"""
logger.info("Testing alerts")
alerts.send(
subject="{}: Test Alert".format(containers.project_name),
body="Test message",
)
def parse_args():
parser = argparse.ArgumentParser(prog='restic_compose_backup')
parser.add_argument(
'action',
choices=['status', 'snapshots', 'backup', 'start-backup-process', 'alert', 'cleanup'],
)
parser.add_argument(
'--log-level',
default=None,
choices=list(log.LOG_LEVELS.keys()),
help="Log level"
)
return parser.parse_args()
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,73 @@
import logging
from typing import List, Tuple
from subprocess import Popen, PIPE
logger = logging.getLogger(__name__)
def test():
return run_command(['ls', '/volumes'])
def ping_mysql(host, port, username) -> int:
"""Check if the mysql is up and can be reached"""
return run([
'mysqladmin',
'ping',
'--host',
host,
'--port',
port,
'--user',
username,
])
def ping_mariadb(host, port, username): #, password) -> int:
"""Check if the mariadb is up and can be reached"""
return run([
'mysqladmin',
'ping',
'--host',
host,
'--port',
port,
'--user',
username,
])
def ping_postgres(host, port, username, password) -> int:
"""Check if postgres can be reached"""
return run([
"pg_isready",
f"--host={host}",
f"--port={port}",
f"--username={username}",
])
def run(cmd: List[str]) -> int:
"""Run a command with parameters"""
logger.debug('cmd: %s', ' '.join(cmd))
child = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdoutdata, stderrdata = child.communicate()
if stdoutdata:
logger.debug(stdoutdata.decode().strip())
logger.debug('-' * 28)
if stderrdata:
logger.error('%s STDERR %s', '-' * 10, '-' * 10)
logger.error(stderrdata.decode().strip())
logger.error('-' * 28)
logger.debug("returncode %s", child.returncode)
return child.returncode
def run_capture_std(cmd: List[str]) -> Tuple[str, str]:
"""Run a command with parameters and return stdout, stderr"""
logger.debug('cmd: %s', ' '.join(cmd))
child = Popen(cmd, stdout=PIPE, stderr=PIPE)
return child.communicate()

View File

@@ -0,0 +1,29 @@
import os
class Config:
"""Bag for config values"""
def __init__(self, check=True):
# Mandatory values
self.repository = os.environ.get('RESTIC_REPOSITORY')
self.password = os.environ.get('RESTIC_REPOSITORY')
self.docker_base_url = os.environ.get('DOCKER_BASE_URL') or "unix://tmp/docker.sock"
# Log
self.log_level = os.environ.get('LOG_LEVEL')
# forget / keep
self.keep_daily = os.environ.get('KEEP_DAILY') or "7"
self.keep_weekly = os.environ.get('KEEP_WEEKLY') or "4"
self.keep_monthly = os.environ.get('KEEP_MONTHLY') or "12"
self.keep_yearly = os.environ.get('KEEP_YEARLY') or "3"
if check:
self.check()
def check(self):
if not self.repository:
raise ValueError("RESTIC_REPOSITORY env var not set")
if not self.password:
raise ValueError("RESTIC_REPOSITORY env var not set")

View File

@@ -0,0 +1,365 @@
import os
from pathlib import Path
from typing import List
from restic_compose_backup import utils
VOLUME_TYPE_BIND = "bind"
VOLUME_TYPE_VOLUME = "volume"
class Container:
"""Represents a docker container"""
container_type = None
def __init__(self, data: dict):
self._data = data
self._state = data.get('State')
self._config = data.get('Config')
self._mounts = [Mount(mnt, container=self) for mnt in data.get('Mounts')]
if not self._state:
raise ValueError('Container meta missing State')
if self._config is None:
raise ValueError('Container meta missing Config')
self._labels = self._config.get('Labels')
if self._labels is None:
raise ValueError('Container meta missing Config->Labels')
self._include = self._parse_pattern(self.get_label('restic-compose-backup.volumes.include'))
self._exclude = self._parse_pattern(self.get_label('restic-compose-backup.volumes.exclude'))
@property
def instance(self) -> 'Container':
"""Container: Get a service specific subclass instance"""
# TODO: Do this smarter in the future (simple registry)
if self.database_backup_enabled:
from restic_compose_backup import containers_db
if self.mariadb_backup_enabled:
return containers_db.MariadbContainer(self._data)
if self.mysql_backup_enabled:
return containers_db.MysqlContainer(self._data)
if self.postgresql_backup_enabled:
return containers_db.PostgresContainer(self._data)
else:
return self
@property
def id(self) -> str:
"""str: The id of the container"""
return self._data.get('Id')
@property
def hostname(self) -> str:
"""12 character hostname based on id"""
return self.id[:12]
@property
def image(self) -> str:
"""Image name"""
return self.get_config('Image')
@property
def environment(self) -> list:
"""All configured env vars for the container as a list"""
return self.get_config('Env')
def get_config_env(self, name) -> str:
"""Get a config environment variable by name"""
# convert to dict and fetch env var by name
data = {i[0:i.find('=')]: i[i.find('=')+1:] for i in self.environment}
return data.get(name)
def set_config_env(self, name, value):
"""Set an environment variable"""
env = self.environment
new_value = f'{name}={value}'
for i, entry in enumerate(env):
if f'{name}=' in entry:
env[i] = new_value
break
else:
env.append(new_value)
@property
def volumes(self) -> dict:
"""
Return volumes for the container in the following format:
{'/home/user1/': {'bind': '/mnt/vol2', 'mode': 'rw'},}
"""
volumes = {}
for mount in self._mounts:
volumes[mount.source] = {
'bind': mount.destination,
'mode': 'rw',
}
return volumes
@property
def backup_enabled(self) -> bool:
"""Is backup enabled for this container?"""
return any([
self.volume_backup_enabled,
self.database_backup_enabled,
])
@property
def volume_backup_enabled(self) -> bool:
return utils.is_true(self.get_label('restic-compose-backup.volumes'))
@property
def database_backup_enabled(self) -> bool:
"""bool: Is database backup enabled in any shape or form?"""
return any([
self.mysql_backup_enabled,
self.mariadb_backup_enabled,
self.postgresql_backup_enabled,
])
@property
def mysql_backup_enabled(self) -> bool:
return utils.is_true(self.get_label('restic-compose-backup.mysql'))
@property
def mariadb_backup_enabled(self) -> bool:
return utils.is_true(self.get_label('restic-compose-backup.mariadb'))
@property
def postgresql_backup_enabled(self) -> bool:
return utils.is_true(self.get_label('restic-compose-backup.postgres'))
@property
def is_backup_process_container(self) -> bool:
"""Is this container the running backup process?"""
return self.get_label('restic-compose-backup.backup_process') == 'True'
@property
def is_running(self) -> bool:
"""Is the container running?"""
return self._state.get('Running', False)
@property
def name(self) -> str:
"""Container name"""
return self._data['Name'].replace('/', '')
@property
def service_name(self) -> str:
"""Name of the container/service"""
return self.get_label('com.docker.compose.service', default='')
@property
def project_name(self) -> str:
"""Name of the compose setup"""
return self.get_label('com.docker.compose.project', default='')
@property
def is_oneoff(self) -> bool:
"""Was this container started with run command?"""
return self.get_label('com.docker.compose.oneoff', default='False') == 'True'
def get_config(self, name, default=None):
"""Get value from config dict"""
return self._config.get(name, default)
def get_label(self, name, default=None):
"""Get a label by name"""
return self._labels.get(name, None)
def filter_mounts(self):
"""Get all mounts for this container matching include/exclude filters"""
filtered = []
if not self.volume_backup_enabled:
return filtered
if self._include:
for mount in self._mounts:
for pattern in self._include:
if pattern in mount.source:
break
else:
continue
filtered.append(mount)
elif self._exclude:
for mount in self._mounts:
for pattern in self._exclude:
if pattern in mount.source:
break
else:
filtered.append(mount)
else:
return self._mounts
return filtered
def volumes_for_backup(self, source_prefix='/volumes', mode='ro'):
"""Get volumes configured for backup"""
mounts = self.filter_mounts()
volumes = {}
for mount in mounts:
volumes[mount.source] = {
'bind': str(Path(source_prefix) / self.service_name / Path(utils.strip_root(mount.destination))),
'mode': mode,
}
return volumes
def get_credentials(self) -> dict:
"""dict: get credentials for the service"""
raise NotImplementedError("Base container class don't implement this")
def ping(self) -> bool:
"""Check the availability of the service"""
raise NotImplementedError("Base container class don't implement this")
def backup(self):
"""Back up this service"""
raise NotImplementedError("Base container class don't implement this")
def dump_command(self) -> list:
"""list: create a dump command restic and use to send data through stdin"""
raise NotImplementedError("Base container class don't implement this")
def _parse_pattern(self, value: str) -> List[str]:
"""list: Safely parse include/exclude pattern from user"""
if not value:
return None
if type(value) is not str:
return None
value = value.strip()
if len(value) == 0:
return None
return value.split(',')
def __eq__(self, other):
"""Compare container by id"""
if other is None:
return False
if not isinstance(other, Container):
return False
return self.id == other.id
def __repr__(self):
return str(self)
def __str__(self):
return "<Container {}>".format(self.name)
class Mount:
"""Represents a volume mount (volume or bind)"""
def __init__(self, data, container=None):
self._data = data
self._container = container
@property
def container(self) -> Container:
"""The container this mount belongs to"""
return self._container
@property
def type(self) -> str:
"""bind/volume"""
return self._data.get('Type')
@property
def name(self) -> str:
"""Name of the mount"""
return self._data.get('Name')
@property
def source(self) -> str:
"""Source of the mount. Volume name or path"""
return self._data.get('Source')
@property
def destination(self) -> str:
"""Destination path for the volume mount in the container"""
return self._data.get('Destination')
def __repr__(self) -> str:
return str(self)
def __str__(self) -> str:
return str(self._data)
def __hash__(self):
"""Uniqueness for a volume"""
if self.type == VOLUME_TYPE_VOLUME:
return hash(self.name)
elif self.type == VOLUME_TYPE_BIND:
return hash(self.source)
else:
raise ValueError("Unknown volume type: {}".format(self.type))
class RunningContainers:
def __init__(self):
all_containers = utils.list_containers()
self.containers = []
self.this_container = None
self.backup_process_container = None
# Find the container we are running in.
# If we don't have this information we cannot continue
for container_data in all_containers:
if container_data.get('Id').startswith(os.environ['HOSTNAME']):
self.this_container = Container(container_data)
if not self.this_container:
raise ValueError("Cannot find metadata for backup container")
# Gather all containers in the current compose setup
for container_data in all_containers:
container = Container(container_data)
# Detect running backup process container
if container.is_backup_process_container:
self.backup_process_container = container
# Detect containers belonging to the current compose setup
if (container.project_name == self.this_container.project_name
and not container.is_oneoff):
if container.id != self.this_container.id:
self.containers.append(container)
@property
def project_name(self) -> str:
"""str: Name of the compose project"""
return self.this_container.project_name
@property
def backup_process_running(self) -> bool:
"""Is the backup process container running?"""
return self.backup_process_container is not None
def containers_for_backup(self):
"""Obtain all containers with backup enabled"""
return [container for container in self.containers if container.backup_enabled]
def generate_backup_mounts(self, dest_prefix='/volumes') -> dict:
"""Generate mounts for backup for the entire compose setup"""
mounts = {}
for container in self.containers_for_backup():
if container.volume_backup_enabled:
mounts.update(container.volumes_for_backup(source_prefix=dest_prefix, mode='ro'))
return mounts
def get_service(self, name) -> Container:
for container in self.containers:
if container.service_name == name:
return container
return None

View File

@@ -0,0 +1,146 @@
from restic_compose_backup.containers import Container
from restic_compose_backup.config import Config
from restic_compose_backup import (
commands,
restic,
)
from restic_compose_backup import utils
class MariadbContainer(Container):
container_type = 'mariadb'
def get_credentials(self) -> dict:
"""dict: get credentials for the service"""
return {
'host': self.hostname,
'username': self.get_config_env('MYSQL_USER'),
'password': self.get_config_env('MYSQL_PASSWORD'),
'port': "3306",
}
def ping(self) -> bool:
"""Check the availability of the service"""
creds = self.get_credentials()
with utils.environment('MYSQL_PWD', creds['password']):
return commands.ping_mariadb(
creds['host'],
creds['port'],
creds['username'],
)
def dump_command(self) -> list:
"""list: create a dump command restic and use to send data through stdin"""
creds = self.get_credentials()
return [
"mysqldump",
f"--host={creds['host']}",
f"--port={creds['port']}",
f"--user={creds['username']}",
"--all-databases",
]
def backup(self):
config = Config()
creds = self.get_credentials()
with utils.environment('MYSQL_PWD', creds['password']):
return restic.backup_from_stdin(
config.repository,
f'/databases/{self.service_name}/all_databases.sql',
self.dump_command(),
)
class MysqlContainer(Container):
container_type = 'mysql'
def get_credentials(self) -> dict:
"""dict: get credentials for the service"""
return {
'host': self.hostname,
'username': self.get_config_env('MYSQL_USER'),
'password': self.get_config_env('MYSQL_PASSWORD'),
'port': "3306",
}
def ping(self) -> bool:
"""Check the availability of the service"""
creds = self.get_credentials()
with utils.environment('MYSQL_PWD', creds['password']):
return commands.ping_mysql(
creds['host'],
creds['port'],
creds['username'],
)
def dump_command(self) -> list:
"""list: create a dump command restic and use to send data through stdin"""
creds = self.get_credentials()
return [
"mysqldump",
f"--host={creds['host']}",
f"--port={creds['port']}",
f"--user={creds['username']}",
"--all-databases",
]
def backup(self):
config = Config()
creds = self.get_credentials()
with utils.environment('MYSQL_PWD', creds['password']):
return restic.backup_from_stdin(
config.repository,
f'/databases/{self.service_name}/all_databases.sql',
self.dump_command(),
)
class PostgresContainer(Container):
container_type = 'postgres'
def get_credentials(self) -> dict:
"""dict: get credentials for the service"""
return {
'host': self.hostname,
'username': self.get_config_env('POSTGRES_USER'),
'password': self.get_config_env('POSTGRES_PASSWORD'),
'port': "5432",
'database': self.get_config_env('POSTGRES_DB'),
}
def ping(self) -> bool:
"""Check the availability of the service"""
creds = self.get_credentials()
return commands.ping_postgres(
creds['host'],
creds['port'],
creds['username'],
creds['password'],
)
def dump_command(self) -> list:
"""list: create a dump command restic and use to send data through stdin"""
# NOTE: Backs up a single database from POSTGRES_DB env var
creds = self.get_credentials()
return [
"pg_dump",
f"--host={creds['host']}",
f"--port={creds['port']}",
f"--username={creds['username']}",
creds['database'],
]
def backup(self):
config = Config()
creds = self.get_credentials()
with utils.environment('PGPASSWORD', creds['password']):
return restic.backup_from_stdin(
config.repository,
f"/databases/{self.service_name}/{creds['database']}.sql",
self.dump_command(),
)

View File

@@ -0,0 +1,27 @@
import logging
import os
import sys
logger = logging.getLogger('restic_compose_backup')
HOSTNAME = os.environ['HOSTNAME']
DEFAULT_LOG_LEVEL = logging.INFO
LOG_LEVELS = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
}
def setup(level: str = 'warning'):
"""Set up logging"""
level = level or ""
level = LOG_LEVELS.get(level.lower(), DEFAULT_LOG_LEVEL)
logger.setLevel(level)
ch = logging.StreamHandler(stream=sys.stdout)
ch.setLevel(level)
# ch.setFormatter(logging.Formatter(f'%(asctime)s - {HOSTNAME} - %(name)s - %(levelname)s - %(message)s'))
# ch.setFormatter(logging.Formatter(f'%(asctime)s - {HOSTNAME} - %(levelname)s - %(message)s'))
ch.setFormatter(logging.Formatter(f'%(asctime)s - %(levelname)s: %(message)s'))
logger.addHandler(ch)

View File

@@ -0,0 +1,102 @@
"""
Restic commands
"""
import logging
from typing import List, Tuple
from subprocess import Popen, PIPE
from restic_compose_backup import commands
logger = logging.getLogger(__name__)
def init_repo(repository: str):
"""
Attempt to initialize the repository.
Doing this after the repository is initialized
"""
return commands.run(restic(repository, [
"init",
]))
def backup_files(repository: str, source='/volumes'):
return commands.run(restic(repository, [
"--verbose",
"backup",
source,
]))
def backup_from_stdin(repository: str, filename: str, source_command: List[str]):
"""
Backs up from stdin running the source_command passed in.
It will appear in restic with the filename (including path) passed in.
"""
dest_command = restic(repository, [
'backup',
'--stdin',
'--stdin-filename',
filename,
])
# pipe source command into dest command
# NOTE: Using the default buffer size: io.DEFAULT_BUFFER_SIZE = 8192
# We might want to tweak that to speed up large dumps.
# Actual tests tests must be done.
source_process = Popen(source_command, stdout=PIPE)
dest_process = Popen(dest_command, stdin=source_process.stdout, stdout=PIPE, stderr=PIPE)
stdout, stderr = dest_process.communicate()
if stdout:
for line in stdout.decode().split('\n'):
logger.debug(line)
if stderr:
for line in stderr.decode().split('\n'):
logger.error(line)
# Ensure both processes exited with code 0
source_exit, dest_exit = source_process.poll(), dest_process.poll()
return 0 if (source_exit == 0 and dest_exit == 0) else 1
def snapshots(repository: str, last=True) -> Tuple[str, str]:
args = ["snapshots"]
if last:
args.append('--last')
return commands.run_capture_std(restic(repository, args))
def forget(repository: str, daily: str, weekly: str, monthly: str, yearly: str):
return restic(repository, [
'forget',
'--keep-daily',
daily,
'--keep-weekly',
weekly,
'--keep-monthly',
monthly,
'--keep-yearly',
yearly,
])
def prune(repository: str):
return restic(repository, [
'prune',
])
def check(repository: str):
return commands.run(restic(repository, [
"check",
]))
def restic(repository: str, args: List[str]):
"""Generate restic command"""
return [
"restic",
"-r",
repository,
] + args

View File

@@ -0,0 +1,54 @@
import os
from contextlib import contextmanager
import docker
from restic_compose_backup.config import Config
TRUE_VALUES = ['1', 'true', 'True', True, 1]
def list_containers():
"""
List all containers.
Returns:
List of raw container json data from the api
"""
config = Config()
client = docker.DockerClient(base_url=config.docker_base_url)
all_containers = client.containers.list()
client.close()
return [c.attrs for c in all_containers]
def is_true(value):
"""
Evaluates the truthfullness of a bool value in container labels
"""
return value in TRUE_VALUES
def strip_root(path):
"""
Removes the root slash in a path.
Example: /srv/data becomes srv/data
"""
path = path.strip()
if path.startswith('/'):
return path[1:]
return path
@contextmanager
def environment(name, value):
"""Tempset env var"""
old_val = os.environ.get(name)
os.environ[name] = value
try:
yield
finally:
if old_val is None:
del os.environ[name]
else:
os.environ[name] = old_val

17
src/setup.py Normal file
View File

@@ -0,0 +1,17 @@
from setuptools import setup, find_namespace_packages
setup(
name="restic-compose-backup",
url="https://github.com/ZettaIO/restic-compose-backup",
version="0.3.0",
author="Einar Forselv",
author_email="eforselv@gmail.com",
packages=find_namespace_packages(include=['restic_compose_backup']),
install_requires=[
'docker==4.1.*',
],
entry_points={'console_scripts': [
'restic-compose-backup = restic_compose_backup.cli:main',
'rcb = restic_compose_backup.cli:main',
]},
)