Automatic deployment of webpages using git, hugo, and cron

My old workflow for updating this site involved running hugo locally, uploading to the webhost, then fixing up permissions and moving to the right location. The following script instead does this automatically:

#! /usr/bin/env python3
import pathlib
import tempfile
import os
from subprocess import check_call
import stat
import shutil

PUBLISH_DIR = pathlib.Path("your_web_root")


def apply_to_tree(path: pathlib.Path, function):
    for item in path.iterdir():
        if item.is_dir():
            apply_to_tree(item, function)
        function(item)


if __name__ == "__main__":
    tempdir = tempfile.TemporaryDirectory()
    print(tempdir.name)
    os.chdir(tempdir.name)

    # In my case repourl points to the local directory where gitea
    # stores the repositories
    check_call(["git", "clone", "repourl"])
    os.chdir("name-of-repo")
    check_call(["hugo"])

    def change_permissions(p: pathlib.Path):
        if p.is_dir():
            return
        os.chmod(p, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH),

    apply_to_tree(
        pathlib.Path("public"),
        change_permissions,
    )
    apply_to_tree(
        pathlib.Path("public"), lambda p: shutil.chown(p, "www-data", "www-data")
    )
    backup_dir = PUBLISH_DIR.with_suffix(".backup")
    shutil.move(PUBLISH_DIR, backup_dir)
    try:
        shutil.move("public", PUBLISH_DIR)
    except Exception:
        shutil.move(backup_dir, PUBLISH_DIR)
    shutil.rmtree(backup_dir)

This is run by root in a cronjob:

30 2 * * * /usr/sbin/deploy_me.py