~mclehman/ryobackup

14f67c443da4480f2ad351b26d6c849a0bcf13e7 — 0xFORDCOMMA 3 years ago master
Initial commit.
2 files changed, 102 insertions(+), 0 deletions(-)

A META6.json
A bin/ryobackup
A  => META6.json +18 -0
@@ 1,18 @@
{
  "api": 1,
  "authors": [
    "0xford"
  ],
  "description": "",
  "license": "https://opensource.org/licenses/GPL-3.0",
  "name": "ryobackup",
  "perl": "6.d",
  "production": false,
  "provides": {

  },
  "scripts": {
    "test": "zef test ."
  },
  "version": "2020.01.17-break"
}

A  => bin/ryobackup +84 -0
@@ 1,84 @@
#!/usr/bin/env raku

use Wrapper::Utils;
use Wrapper::Borg;
use Kludges::YAML;

sub load-config($file = "%*ENV<HOME>/.backuprc") {
    return load-yaml-file($file);
}


# If not given explicit targets, use all targets in config.
multi MAIN(:%config = load-config()) {
    samewith |%config<repos>>><name>, :%config
}

multi MAIN(*@targets, :%config = load-config()) {
    my %sync-config = %config<sync>;
    my @repos = |%config<repos>;
    unless all(@targets) (elem) @repos>><name> {
        note "Unknown target, aborting backup.";
        exit 1;
    }

    # Invoking these processes freely in parallel isn't safe when they require certain env vars to be set,
    #     this lock ensures processes start with the expected env vars.
    my $env-lock = Lock.new;

    # The admittedly ugly double await allows each target to in turn execute async processes, such as running all hooks in parallel.
    # TODO: See about refactoring this.
    await await @repos.grep({ $_<name> ~~ any(@targets) }).map({ start { repo-backup $_, $env-lock } });

    # TODO: Currently sync feels like an unnecessary special case. Adding support for phases as a higher level abstraction over targets
    #           in the configuration format could address this but then requires adding type tags for backup vs. syncing.
    await do for |%sync-config<targets> {
        note "[$_<process>]\t" ~ "Beginning sync.";
        my $sync-proc = realize-proc($_);
        if $_<env>.defined {
            %*ENV = %(|%*ENV, |$_<env>);
        }
        $sync-proc.start;
    }
}

sub conditions-ok(@conditions) {
    return all await(@conditions.map({ realize-proc($_).start })).map(*.exitcode == 0);
}

sub repo-backup(%config, $env-lock) {
    note "[%config<name>]\t" ~ "Beginning backup.";

    if %config<preconditions>.defined {
        note "[%config<name>]\t" ~ "Checking preconditions.";
        unless conditions-ok %config<preconditions> {
            # Return dummy successful promise signifying completion if preconditions fail.
            note "[%config<name>]\t" ~ "Precondition failed, skipping backup.";
            return Promise.kept();
        }
    }

    if %config<hooks><pre>.defined {
        note "[%config<name>]\t" ~ "Running pre-hooks.";
        await do for |%config<hooks><pre> {
            realize-proc($_).start;
        }
    }

    my $backup-proc = borg-create(repo => %config<repo-path>,
                                  compression => %config<compression>,
                                  |%config<targets>.values.map({ |realize-path-hash($_)}));

    log-merged($backup-proc, %config<log> // '/dev/null');

    # Prevent another thread from clobbering our environment updates
    my $backup-promise;
    $env-lock.protect({
        # Splice desired env vars into our current environment and reset before releasing lock.
        my %original-env = %*ENV.clone;
        %*ENV = %(|%*ENV, |(%config<env> // {}));
        $backup-promise = $backup-proc.start;
        %*ENV = %original-env;
    });
    return $backup-promise;
}