Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ Contributions are welcome regardless of experience level.

## Python environment

Use [`uv`](https://docs.astral.sh/uv/) within the `src/` directory to manage your development environment.
Use [`uv`](https://docs.astral.sh/uv/) within the repo root directory to manage your development environment.

```bash
git clone https://github.com/lawndoc/stack-back.git
Expand Down
10 changes: 10 additions & 0 deletions docs/guide/configuration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,16 @@ docker volumes. Often host binds are only used
for mapping in configuration. This saves the user
from manually excluding these bind volumes.

INCLUDE_ALL_COMPOSE_PROJECTS
~~~~~~~~~~~~~~~~~~~~~~~~~~~~

If defined all compose projects found will be available for backup.
By default only the compose project the backup container is
running in is available for backup.

This is useful when not wanting to run a separate backup container
for each compose project.

SWARM_MODE
~~~~~~~~~~

Expand Down
11 changes: 9 additions & 2 deletions src/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,13 @@ rcb dump-env > /.env
# Write crontab
rcb crontab > crontab

# start cron in the foreground
# Start cron in the background and capture its PID
crontab crontab
crond -f
crond -f &
CRON_PID=$!

# Trap termination signals and kill the cron process
trap 'kill $CRON_PID; exit 0' TERM INT

# Wait for cron and handle signals
wait $CRON_PID
12 changes: 8 additions & 4 deletions src/restic_compose_backup/backup_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,24 +12,28 @@ def run(
volumes: dict = None,
environment: dict = None,
labels: dict = None,
source_container_id: str = None,
network_names: set[str] = set(),
):
logger.info("Starting backup container")
client = utils.docker_client()

container = client.containers.run(
container = client.containers.create(
image,
command,
labels=labels,
# auto_remove=True, # We remove the container further down
detach=True,
environment=environment + ["BACKUP_PROCESS_CONTAINER=true"],
volumes=volumes,
network_mode=f"container:{source_container_id}", # Reuse original container's network stack.
working_dir=os.getcwd(),
tty=True,
)

for network_name in network_names:
network = client.networks.get(network_name)
network.connect(container)

container.start()

logger.info("Backup process container: %s", container.name)
log_generator = container.logs(stdout=True, stderr=True, stream=True, follow=True)

Expand Down
8 changes: 6 additions & 2 deletions src/restic_compose_backup/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,10 @@ def status(config, containers):
"Exclude bind mounts from backups?: %s",
utils.is_true(config.exclude_bind_mounts),
)
logger.debug(
"Include all compose projects?: %s",
utils.is_true(config.include_all_compose_projects),
)
logger.debug(
f"Use cache for integrity check?: {utils.is_true(config.check_with_cache)}"
)
Expand Down Expand Up @@ -138,7 +142,7 @@ def status(config, containers):
logger.info("-" * 67)


def backup(config, containers):
def backup(config, containers: RunningContainers):
"""Request a backup to start"""
# Make sure we don't spawn multiple backup processes
if containers.backup_process_running:
Expand Down Expand Up @@ -169,7 +173,7 @@ def backup(config, containers):
command="rcb start-backup-process",
volumes=volumes,
environment=containers.this_container.environment,
source_container_id=containers.this_container.id,
network_names=containers.networks_for_backup(),
labels={
containers.backup_process_label: "True",
"com.docker.compose.project": containers.project_name,
Expand Down
3 changes: 3 additions & 0 deletions src/restic_compose_backup/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ def __init__(self, check=True):
self.swarm_mode = os.environ.get("SWARM_MODE") or False
self.include_project_name = os.environ.get("INCLUDE_PROJECT_NAME") or False
self.exclude_bind_mounts = os.environ.get("EXCLUDE_BIND_MOUNTS") or False
self.include_all_compose_projects = (
os.environ.get("INCLUDE_ALL_COMPOSE_PROJECTS") or False
)
self.include_all_volumes = os.environ.get("INCLUDE_ALL_VOLUMES") or False
if self.include_all_volumes:
logger.warning(
Expand Down
48 changes: 29 additions & 19 deletions src/restic_compose_backup/containers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
import logging
from pathlib import Path
import socket
from typing import List

from restic_compose_backup import enums, utils
Expand Down Expand Up @@ -35,6 +35,10 @@ def __init__(self, data: dict):
self._include = self._parse_pattern(self.get_label(enums.LABEL_VOLUMES_INCLUDE))
self._exclude = self._parse_pattern(self.get_label(enums.LABEL_VOLUMES_EXCLUDE))

network_settings: dict = data.get("NetworkSettings", {})
networks: dict = network_settings.get("Networks", {})
self._network_details: dict = list(networks.values())[0]

@property
def instance(self) -> "Container":
"""Container: Get a service specific subclass instance"""
Expand All @@ -57,9 +61,14 @@ def id(self) -> str:
return self._data.get("Id")

@property
def hostname(self) -> str:
"""Hostname of the container"""
return self.get_config("Hostname", default=self.id[0:12])
def network_name(self) -> str:
"""str: Name of the first network the container is connected to"""
return self._network_details.get("NetworkID", "")

@property
def ip_address(self) -> str:
"""str: IP address the container has on its first network"""
return self._network_details.get("IPAddress", "")

@property
def image(self) -> str:
Expand Down Expand Up @@ -407,13 +416,13 @@ def __init__(self):
# Find the container we are running in.
# If we don't have this information we cannot continue
for container_data in all_containers:
if container_data.get("Id").startswith(os.environ["HOSTNAME"]):
if container_data.get("Id").startswith(socket.gethostname()):
self.this_container = Container(container_data)

if not self.this_container:
raise ValueError("Cannot find metadata for backup container")

# Gather all running containers in the current compose setup
# Gather relevant containers
for container_data in all_containers:
container = Container(container_data)

Expand All @@ -429,25 +438,22 @@ def __init__(self):
if not container.is_running:
continue

# If not swarm mode we need to filter in compose project
if (
not config.swarm_mode
and not config.include_all_compose_projects
and container.project_name != self.this_container.project_name
):
continue

# Gather stop during backup containers
if container.stop_during_backup:
if config.swarm_mode:
self.stop_during_backup_containers.append(container)
else:
if container.project_name == self.this_container.project_name:
self.stop_during_backup_containers.append(container)
self.stop_during_backup_containers.append(container)

# Detect running backup process container
if container.is_backup_process_container:
self.backup_process_container = container

# --- Determine what containers should be evaluated

# If not swarm mode we need to filter in compose project
if not config.swarm_mode:
if container.project_name != self.this_container.project_name:
continue

# Containers started manually are not included
if container.is_oneoff:
continue
Expand All @@ -473,10 +479,14 @@ def backup_process_running(self) -> bool:
"""Is the backup process container running?"""
return self.backup_process_container is not None

def containers_for_backup(self):
def containers_for_backup(self) -> list[Container]:
"""Obtain all containers with backup enabled"""
return [container for container in self.containers if container.backup_enabled]

def networks_for_backup(self) -> set[str]:
"""Obtain all networks needed for backup"""
return {container.network_name for container in self.containers_for_backup()}

def generate_backup_mounts(self, dest_prefix="/volumes") -> dict:
"""Generate mounts for backup for the entire compose setup"""
mounts = {}
Expand Down
6 changes: 3 additions & 3 deletions src/restic_compose_backup/containers_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def get_credentials(self) -> dict:
username = self.get_config_env("MARIADB_USER")
password = self.get_config_env("MARIADB_PASSWORD")
return {
"host": self.hostname,
"host": self.ip_address,
"username": username,
"password": password,
"port": "3306",
Expand Down Expand Up @@ -91,7 +91,7 @@ def get_credentials(self) -> dict:
username = self.get_config_env("MYSQL_USER")
password = self.get_config_env("MYSQL_PASSWORD")
return {
"host": self.hostname,
"host": self.ip_address,
"username": username,
"password": password,
"port": "3306",
Expand Down Expand Up @@ -155,7 +155,7 @@ class PostgresContainer(Container):
def get_credentials(self) -> dict:
"""dict: get credentials for the service"""
return {
"host": self.hostname,
"host": self.ip_address,
"username": self.get_config_env("POSTGRES_USER"),
"password": self.get_config_env("POSTGRES_PASSWORD"),
"port": "5432",
Expand Down
4 changes: 0 additions & 4 deletions src/restic_compose_backup/log.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import logging
import os
import sys

logger = logging.getLogger("restic_compose_backup")
HOSTNAME = os.environ["HOSTNAME"]

DEFAULT_LOG_LEVEL = logging.INFO
LOG_LEVELS = {
Expand All @@ -22,7 +20,5 @@ def setup(level: str = "warning"):

ch = logging.StreamHandler(stream=sys.stdout)
ch.setLevel(level)
# ch.setFormatter(logging.Formatter('%(asctime)s - {HOSTNAME} - %(name)s - %(levelname)s - %(message)s'))
# ch.setFormatter(logging.Formatter('%(asctime)s - {HOSTNAME} - %(levelname)s - %(message)s'))
ch.setFormatter(logging.Formatter("%(asctime)s - %(levelname)s: %(message)s"))
logger.addHandler(ch)
6 changes: 6 additions & 0 deletions src/tests/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,12 @@ def wrapper(*args, **kwargs):
"Status": "running",
"Running": True,
},
"NetworkSettings": {
"Networks": {
"NetworkID": "network-id",
"IPAddress": "10.0.0.1",
}
},
}
for container in containers
]
Expand Down
20 changes: 13 additions & 7 deletions src/tests/tests.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import json
import os
import unittest
from unittest import mock
Expand All @@ -16,16 +15,21 @@
class BaseTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Set up basic environment variables"""
# os.environ['RESTIC_REPOSITORY'] = "test"
# os.environ['RESTIC_PASSWORD'] = "password"
cls.backup_hash = fixtures.generate_sha256()

cls.hostname_patcher = mock.patch(
"socket.gethostname", return_value=cls.backup_hash[:8]
)
cls.hostname_patcher.start()

@classmethod
def tearDownClass(cls):
cls.hostname_patcher.stop()

def createContainers(self):
backup_hash = fixtures.generate_sha256()
os.environ["HOSTNAME"] = backup_hash[:8]
return [
{
"id": backup_hash,
"id": self.backup_hash,
"service": "backup",
}
]
Expand Down Expand Up @@ -377,10 +381,12 @@ def test_stop_container_during_backup_database(self):
class IncludeAllVolumesTests(BaseTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
config.config.auto_backup_all = "true"

@classmethod
def tearDownClass(cls):
super().tearDownClass()
config.config = config.Config()

def test_all_volumes(self):
Expand Down
1 change: 1 addition & 0 deletions stack-back.env.template
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
# SWARM_MODE=
INCLUDE_PROJECT_NAME=false
EXCLUDE_BIND_MOUNTS=false
INCLUDE_ALL_COMPOSE_PROJECTS=false
AUTO_BACKUP_ALL=true

RESTIC_REPOSITORY=/restic_backups
Expand Down