Yes I do. I cooked a small python script that runs at the end of every daily backup
import subprocess
import json
import os
# Output directory
OUTPUT_DIR = "/data/dockerimages"
try:
os.mkdir(OUTPUT_DIR)
except:
pass
# Grab all the docker images. Each line a json string defining the image
imagenes = subprocess.Popen(["docker", "images", "--format", "json"], stdout = subprocess.PIPE, stderr = subprocess.DEVNULL).communicate()[0].decode().split("\n")
for imagen in imagenes[:-1]:
datos = json.loads(imagen)
# ID of the image to save
imageid = datos["ID"]
# Compose the output name like this
# ghcr.io-immich-app-immich-machine-learning:release:2026-01-28:3c42f025fb7c.tar
outputname = f"{datos["Repository"]}:{datos["Tag"]}:{datos["CreatedAt"].split(" ")[0]}:{imageid}.tar".replace("/", "-")
# If the file already exists just skip it
if not os.path.isfile(f"{OUTPUT_DIR}/{outputname}"):
print(f"Saving {outputname}...")
subprocess.run(["docker", "save", imageid, "-o", f"{OUTPUT_DIR}/{outputname}"])
else:
print(f"Already exists {outputname}")












Glad I bought drives in december. As long as they dont break, should hold me easily into 2030