diff --git a/.github/workflows/molecule_tests.yml b/.github/workflows/molecule_tests.yml index c1faf3f707..72face7df1 100644 --- a/.github/workflows/molecule_tests.yml +++ b/.github/workflows/molecule_tests.yml @@ -30,6 +30,7 @@ jobs: - figgy_filewatcher_worker - figgy_pubsub_worker - freetds + - golang - gitlab # - geoserver - hr_share @@ -56,6 +57,7 @@ jobs: # - pas - passenger - php + - plakar - postfix - postgresql # - pulfalight diff --git a/devbox.json b/devbox.json index 2029779295..4a8857539d 100644 --- a/devbox.json +++ b/devbox.json @@ -6,17 +6,19 @@ "python311Packages.virtualenv", "ruby_3_2", "nodejs_20", - "awscli2", "lastpass-cli", - "git" + "git", + "google-cloud-sdk@latest", + "awscli@latest", + "awscli2@latest" ], "env": { - "PYTHON_VERSION": "3.11.1", - "RUBY_VERSION": "3.2.0", - "NODE_VERSION": "20", - "LPASS_AGENT_TIMEOUT": "32400", - "PATH": "$DEVBOX_PROJECT_ROOT/.venv/bin:$PATH", - "VIRTUAL_ENV": "$DEVBOX_PROJECT_ROOT/.venv", + "PYTHON_VERSION": "3.11.1", + "RUBY_VERSION": "3.2.0", + "NODE_VERSION": "20", + "LPASS_AGENT_TIMEOUT": "32400", + "PATH": "$DEVBOX_PROJECT_ROOT/.venv/bin:$PATH", + "VIRTUAL_ENV": "$DEVBOX_PROJECT_ROOT/.venv", "ANSIBLE_VAULT_PASSWORD_FILE": "$DEVBOX_PROJECT_ROOT/bin/lastpass-ansible", "ANSIBLE_VAULT_IDENTITY_LIST": "pul@$DEVBOX_PROJECT_ROOT/bin/lastpass-ansible,princeton@$DEVBOX_PROJECT_ROOT/bin/lastpass-ansible,ansible@$DEVBOX_PROJECT_ROOT/bin/lastpass-ansible,default@$DEVBOX_PROJECT_ROOT/bin/lastpass-ansible" }, diff --git a/devbox.lock b/devbox.lock index 6056c3909d..bb2343823c 100644 --- a/devbox.lock +++ b/devbox.lock @@ -1,17 +1,131 @@ { "lockfile_version": "1", "packages": { - "awscli2": { - "resolved": "github:NixOS/nixpkgs/32f313e49e42f715491e1ea7b306a87c16fe0388?narHash=sha256-nNaeJjo861wFR0tjHDyCnHs1rbRtrMgxAKMoig9Sj%2Fw%3D#awscli2", - "source": "nixpkg", + "awscli2@latest": { + "last_modified": "2025-11-30T18:29:45Z", + "resolved": "github:NixOS/nixpkgs/23258e03aaa49b3a68597e3e50eb0cbce7e42e9d#awscli2", + "source": "devbox-search", + "version": "2.31.39", "systems": { "aarch64-darwin": { "outputs": [ { - "path": "/nix/store/jg3mq99j92671mkxsv2y595mhz1idm0z-awscli2-2.28.1", + "name": "out", + "path": "/nix/store/h4b0scch07rfyy3jr5lv3cmj7myhn1nl-awscli2-2.31.39", "default": true + }, + { + "name": "dist", + "path": "/nix/store/nhnmx2dgfd8rd8pnyv396qzsq4gf0933-awscli2-2.31.39-dist" } - ] + ], + "store_path": "/nix/store/h4b0scch07rfyy3jr5lv3cmj7myhn1nl-awscli2-2.31.39" + }, + "aarch64-linux": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/bxbryfgqh3srva8c7snqr97iir5qrjka-awscli2-2.31.39", + "default": true + }, + { + "name": "dist", + "path": "/nix/store/gc65qq3jlkk17ajk4yscz5j1kws6bcmb-awscli2-2.31.39-dist" + } + ], + "store_path": "/nix/store/bxbryfgqh3srva8c7snqr97iir5qrjka-awscli2-2.31.39" + }, + "x86_64-darwin": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/lpzh5s8cgp0z8a5l8dscmx2v0hcr3adm-awscli2-2.31.39", + "default": true + }, + { + "name": "dist", + "path": "/nix/store/dxaq2vpl8pykf25bydc433mq521iha4q-awscli2-2.31.39-dist" + } + ], + "store_path": "/nix/store/lpzh5s8cgp0z8a5l8dscmx2v0hcr3adm-awscli2-2.31.39" + }, + "x86_64-linux": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/g0i13qaim2j6amiz22qzxinxyvgs0pb3-awscli2-2.31.39", + "default": true + }, + { + "name": "dist", + "path": "/nix/store/b2l8xsgwsw7sg0jwwbsz53rpnv1sc1ns-awscli2-2.31.39-dist" + } + ], + "store_path": "/nix/store/g0i13qaim2j6amiz22qzxinxyvgs0pb3-awscli2-2.31.39" + } + } + }, + "awscli@latest": { + "last_modified": "2025-11-23T21:50:36Z", + "resolved": "github:NixOS/nixpkgs/ee09932cedcef15aaf476f9343d1dea2cb77e261#awscli", + "source": "devbox-search", + "version": "1.42.18", + "systems": { + "aarch64-darwin": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/2clwsf4dvcdh3p7jzfhpa744j467bh87-awscli-1.42.18", + "default": true + }, + { + "name": "dist", + "path": "/nix/store/y89wxym3smjws52ddngi8ml283aifv0n-awscli-1.42.18-dist" + } + ], + "store_path": "/nix/store/2clwsf4dvcdh3p7jzfhpa744j467bh87-awscli-1.42.18" + }, + "aarch64-linux": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/8g9bjs1b1pzfy6v53m4580p3bw6w7rcb-awscli-1.42.18", + "default": true + }, + { + "name": "dist", + "path": "/nix/store/j7841jck3pspvc6xvy8bg3avggv55d3s-awscli-1.42.18-dist" + } + ], + "store_path": "/nix/store/8g9bjs1b1pzfy6v53m4580p3bw6w7rcb-awscli-1.42.18" + }, + "x86_64-darwin": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/5vkjqj9fbkay8l7jjkhwnflkbh9qk44l-awscli-1.42.18", + "default": true + }, + { + "name": "dist", + "path": "/nix/store/06d27cjqr6p9j7lsxql85y75182xrwnx-awscli-1.42.18-dist" + } + ], + "store_path": "/nix/store/5vkjqj9fbkay8l7jjkhwnflkbh9qk44l-awscli-1.42.18" + }, + "x86_64-linux": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/kv1428ahrpdrgidq8g2g9bj2488n2p1k-awscli-1.42.18", + "default": true + }, + { + "name": "dist", + "path": "/nix/store/9jrlm8iz2pcgkwp8lxr2ifd9rla0sb3y-awscli-1.42.18-dist" + } + ], + "store_path": "/nix/store/kv1428ahrpdrgidq8g2g9bj2488n2p1k-awscli-1.42.18" } } }, @@ -33,6 +147,54 @@ "last_modified": "2025-08-29T03:42:44Z", "resolved": "github:NixOS/nixpkgs/c73522789a3c7552b1122773d6eaa34e1491cc1c?lastModified=1756438964&narHash=sha256-yo473URkISSmBZeIE1o6Mf94VRSn5qFVFS9phb7l6eg%3D" }, + "google-cloud-sdk@latest": { + "last_modified": "2025-12-03T20:43:00Z", + "resolved": "github:NixOS/nixpkgs/ebc94f855ef25347c314258c10393a92794e7ab9#google-cloud-sdk", + "source": "devbox-search", + "version": "548.0.0", + "systems": { + "aarch64-darwin": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/47dysafhb5mm3hxkwx6hyxasv05nhxjb-google-cloud-sdk-548.0.0", + "default": true + } + ], + "store_path": "/nix/store/47dysafhb5mm3hxkwx6hyxasv05nhxjb-google-cloud-sdk-548.0.0" + }, + "aarch64-linux": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/mri3dcpp8v1a2rdj3899cwywic9x3qz6-google-cloud-sdk-548.0.0", + "default": true + } + ], + "store_path": "/nix/store/mri3dcpp8v1a2rdj3899cwywic9x3qz6-google-cloud-sdk-548.0.0" + }, + "x86_64-darwin": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/an736haac0wqqxyg20wwnwf0qywqax0y-google-cloud-sdk-548.0.0", + "default": true + } + ], + "store_path": "/nix/store/an736haac0wqqxyg20wwnwf0qywqax0y-google-cloud-sdk-548.0.0" + }, + "x86_64-linux": { + "outputs": [ + { + "name": "out", + "path": "/nix/store/w69yfd51aagvba8lnq8ab556bypmywlq-google-cloud-sdk-548.0.0", + "default": true + } + ], + "store_path": "/nix/store/w69yfd51aagvba8lnq8ab556bypmywlq-google-cloud-sdk-548.0.0" + } + } + }, "lastpass-cli": { "resolved": "github:NixOS/nixpkgs/32f313e49e42f715491e1ea7b306a87c16fe0388?narHash=sha256-nNaeJjo861wFR0tjHDyCnHs1rbRtrMgxAKMoig9Sj%2Fw%3D#lastpass-cli", "source": "nixpkg", diff --git a/group_vars/nfsserver/production.yml b/group_vars/nfsserver/production.yml index f9b2d6e869..61cf40c98e 100644 --- a/group_vars/nfsserver/production.yml +++ b/group_vars/nfsserver/production.yml @@ -1,4 +1,14 @@ --- +# plakar backups +plakar_store_name: "nfs_prod_aws" +plakar_repo_passphrase: "{{ vault_plakar_repo_passphrase }}" +plakar_repo_bucket: "pul-nfs-backup" +plakar_repo_access_key: "{{ vault_plakar_aws_access_key }}" +plakar_repo_secret_key: "{{ vault_plakar_aws_secret_key }}" +plakar_snapshot_root: "/var/nfs" +plakar_configure_backup: true +plakar_run_initial_backup: false +plakar_scheduler_enabled: true # servers bibdata_prod1: "128.112.201.179" bibdata_prod2: "128.112.203.79" diff --git a/group_vars/nfsserver/qa.yml b/group_vars/nfsserver/qa.yml index 8906e9e813..5bead35dc4 100644 --- a/group_vars/nfsserver/qa.yml +++ b/group_vars/nfsserver/qa.yml @@ -1,4 +1,13 @@ --- +# plakar backups +plakar_store_name: "nfs_qa_aws" +plakar_repo_passphrase: "{{ vault_plakar_repo_passphrase }}" +plakar_repo_bucket: "pul-nfs-backup" +plakar_repo_access_key: "{{ vault_plakar_aws_access_key }}" +plakar_repo_secret_key: "{{ vault_plakar_aws_secret_key }}" +plakar_snapshot_root: "/var/nfs" +plakar_configure_backup: true +plakar_run_initial_backup: false # servers bibdata_qa1: "172.20.80.89" bibdata_qa2: "172.20.80.97" diff --git a/group_vars/nfsserver/staging.yml b/group_vars/nfsserver/staging.yml index 9093961657..19b40fbdef 100644 --- a/group_vars/nfsserver/staging.yml +++ b/group_vars/nfsserver/staging.yml @@ -1,4 +1,13 @@ --- +# plakar backups +plakar_store_name: "nfs_aws" +plakar_repo_passphrase: "{{ vault_plakar_repo_passphrase }}" +plakar_repo_bucket: "pul-nfs-backup" +plakar_repo_access_key: "{{ vault_plakar_aws_access_key }}" +plakar_repo_secret_key: "{{ vault_plakar_aws_secret_key }}" +plakar_snapshot_root: "/var/nfs" +plakar_configure_backup: true +plakar_run_initial_backup: false # servers bibdata_staging1: "172.20.80.66" bibdata_staging2: "172.20.80.64" diff --git a/group_vars/nfsserver/vault.yml b/group_vars/nfsserver/vault.yml new file mode 100644 index 0000000000..1ba842ae5d --- /dev/null +++ b/group_vars/nfsserver/vault.yml @@ -0,0 +1,15 @@ +$ANSIBLE_VAULT;1.2;AES256;pul +62653165653039313136353938343030633139646338326364353037623566656561353866376630 +3262616261373131616231623861363431343234333833360a393932643732613533626238633635 +37343864643837616365626465383664303930393930303132613839393137353566316264303539 +3361663762623137640a346635666136623236643235613730353036343265393835323062343063 +39343665666132626230313065626461303630313762373539313739613563346161373163663832 +61343463623532376564303233393763653233336231386131616165613934326366353131666537 +33383136343466313164346331663964386335383732373431663732313735363961343431656139 +30343139666364303563633162383038633338353637653566323266643765663965333831373637 +64376130656165366137333437643363356634396631653635393732386665303431653534363131 +30613638666635643038613132373132386633666561366533633832643234303661313833383138 +39653935666234626535356664363739396236623631326138653366613633366130353133633163 +35386266616537373436633135343562383365313463303961363238643234353935656561356332 +63643137633833326633623163626662663537616334373436643963633666383033363738613038 +6463333863383335303262323035633039613433613766643037 diff --git a/playbooks/nfsserver.yml b/playbooks/nfsserver.yml index 13b9ec7ee8..f522a09bc1 100644 --- a/playbooks/nfsserver.yml +++ b/playbooks/nfsserver.yml @@ -10,7 +10,9 @@ vars_files: - ../group_vars/nfsserver/{{ runtime_env | default('staging') }}.yml - ../group_vars/nfsserver/common.yml + - ../group_vars/nfsserver/vault.yml roles: + - role: roles/plakar - role: roles/nfsserver post_tasks: diff --git a/roles/golang/README.md b/roles/golang/README.md new file mode 100644 index 0000000000..3edb0e25dc --- /dev/null +++ b/roles/golang/README.md @@ -0,0 +1,92 @@ +# golang + +This role installs a specific version of the Go toolchain from the official +`go.dev` tarballs into `/usr/local/go`, and ensures `go` is available on +`$PATH` via `/usr/local/bin/go`. + +--- + +## What it does + +On each run, the role: + +1. Figures out the correct architecture string (`amd64` / `arm64`) if you + didn’t override it. +2. Checks if `{{ golang_install_dir }}/bin/go` exists and what version it is. +3. If the version doesn’t match `golang_version`: + - Downloads `https://go.dev/dl/go{{ golang_version }}.linux-{{ golang_arch }}.tar.gz` + into `{{ golang_download_dir }}`. + - Removes any existing installation at `{{ golang_install_dir }}`. + - Extracts the new Go tree into `/usr/local`. +4. Ensures a symlink `/usr/local/bin/go` → `{{ golang_install_dir }}/bin/go` + exists so `go` is on the PATH for non-interactive commands. + +The role is idempotent: if the requested version is already installed, no +downloads or changes occur. + +> Note: This role assumes a typical Linux layout where `/usr/local` is +> writable by `root` and is intended to be run with `become: true`. + +--- + +## Default variables + +Defined in `roles/golang/defaults/main.yml`: + +```yaml +# Go version to install (from go.dev) +golang_version: "1.25.5" + +# Architecture string for Go tarball. Override if needed. +# Normally auto-detected from ansible_architecture, but you can force it. +golang_arch: "amd64" + +# Where to cache downloaded tarballs +golang_download_dir: "/usr/local/src" + +# Where Go will be installed +golang_install_dir: "/usr/local/go" +You can override these in group/host vars as needed, for example to pin a +different version: + +``` + +```yaml +golang_version: "1.23.3" +``` + +Example usage +Simple playbook: + +```yaml +- name: Install modern Go from go.dev + hosts: my_build_hosts + become: true + + roles: + - role: golang +``` + +With overrides: + +```yaml +- name: Install Go 1.23.3 on AMD64 + hosts: my_build_hosts + become: true + + vars: + golang_version: "1.23.3" + golang_download_dir: "/var/cache/go-downloads" + + roles: + - role: golang +``` + +After the role runs, you should see something like: + +```bash +$ go version +go1.23.3 linux/amd64 +``` + +and the binaries under `/usr/local/go/bin.` diff --git a/roles/golang/defaults/main.yml b/roles/golang/defaults/main.yml new file mode 100644 index 0000000000..90c3f0c03b --- /dev/null +++ b/roles/golang/defaults/main.yml @@ -0,0 +1,12 @@ +--- +# defaults file for roles/golang +golang_version: "1.25.5" + +# Architecture string for Go tarball. Override if needed. (for local molecule? building) +golang_arch: "amd64" + +# Where to cache downloaded tarballs +golang_download_dir: "/usr/local/src" + +# Where Go will be installed +golang_install_dir: "/usr/local/go" diff --git a/roles/golang/handlers/main.yml b/roles/golang/handlers/main.yml new file mode 100644 index 0000000000..ab405ed241 --- /dev/null +++ b/roles/golang/handlers/main.yml @@ -0,0 +1,2 @@ +--- +# handlers file for roles/golang diff --git a/roles/golang/meta/main.yml b/roles/golang/meta/main.yml new file mode 100644 index 0000000000..2185c24753 --- /dev/null +++ b/roles/golang/meta/main.yml @@ -0,0 +1,18 @@ +--- +galaxy_info: + role_name: golang + company: Princeton University Library + description: Golang Role + author: pulibrary + + license: MIT + + min_ansible_version: 2.9 + + platforms: + - name: Ubuntu + versions: + - jammy +dependencies: + # uncomment the line below and remove the `[]` above in your role + - role: "common" diff --git a/roles/golang/molecule/default/ansible.cfg b/roles/golang/molecule/default/ansible.cfg new file mode 100644 index 0000000000..15849adec5 --- /dev/null +++ b/roles/golang/molecule/default/ansible.cfg @@ -0,0 +1,2 @@ +[defaults] +remote_tmp = /tmp/ansible diff --git a/roles/golang/molecule/default/converge.yml b/roles/golang/molecule/default/converge.yml new file mode 100644 index 0000000000..4ea706f721 --- /dev/null +++ b/roles/golang/molecule/default/converge.yml @@ -0,0 +1,11 @@ +--- +- name: Converge + hosts: all + vars: + - running_on_server: false + become: true + tasks: + # Change this when copying to another role + - name: "Include golang" + ansible.builtin.include_role: + name: golang diff --git a/roles/golang/molecule/default/molecule.yml b/roles/golang/molecule/default/molecule.yml new file mode 100644 index 0000000000..f4654c2a1c --- /dev/null +++ b/roles/golang/molecule/default/molecule.yml @@ -0,0 +1,38 @@ +--- +scenario: + name: default + test_sequence: + - dependency + - syntax + - create + - prepare + - converge + - verify + - destroy +driver: + name: docker +lint: | + set -e + yamllint . + ansible-lint +platforms: + - name: instance + image: "ghcr.io/pulibrary/vm-builds/ubuntu-22.04" + command: "" # image above already has the /sbin/init + tmpfs: + - /run + - /run/lock + - /tmp + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:rw + cgroupns_mode: host + privileged: true + pre_build_image: true +provisioner: + name: ansible + config_options: + defaults: + remote_tmp: /tmp + log: true +verifier: + name: ansible diff --git a/roles/golang/molecule/default/verify.yml b/roles/golang/molecule/default/verify.yml new file mode 100644 index 0000000000..98801b6543 --- /dev/null +++ b/roles/golang/molecule/default/verify.yml @@ -0,0 +1,42 @@ +--- +- name: Verify + hosts: all + gather_facts: false + vars: + - expected_golang_version: "1.25.5" + tasks: + - name: Check if Go binary exists at install location + ansible.builtin.stat: + path: /usr/local/go/bin/go + register: go_binary + + - name: Assert Go binary is present and executable + ansible.builtin.assert: + that: + - go_binary.stat.exists + - go_binary.stat.executable + fail_msg: "Go binary not found or not executable at /usr/local/go/bin/go" + + - name: Check if the symlink exists in /usr/local/bin + ansible.builtin.stat: + path: /usr/local/bin/go + register: go_symlink + + - name: Assert symlink is valid and points to the correct source + ansible.builtin.assert: + that: + - go_symlink.stat.exists + - go_symlink.stat.islnk + - go_symlink.stat.lnk_source == '/usr/local/go/bin/go' + fail_msg: "/usr/local/bin/go is missing or does not point to /usr/local/go/bin/go" + + - name: Get installed Go version + ansible.builtin.command: /usr/local/bin/go version + register: go_version_output + changed_when: false + + - name: Assert installed version matches expectation + ansible.builtin.assert: + that: + - "'go' ~ expected_golang_version in go_version_output.stdout" + fail_msg: "Expected Go version {{ expected_golang_version }}, but got: {{ go_version_output.stdout }}" diff --git a/roles/golang/tasks/main.yml b/roles/golang/tasks/main.yml new file mode 100644 index 0000000000..73b021e779 --- /dev/null +++ b/roles/golang/tasks/main.yml @@ -0,0 +1,58 @@ +--- +# roles/golang +- name: Golang | Ensure download directory exists + ansible.builtin.file: + path: "{{ golang_download_dir }}" + state: directory + mode: "0755" + +- name: Golang | Determine architecture string (if not overridden) + ansible.builtin.set_fact: + golang_arch: >- + {{ 'amd64' if ansible_architecture in ['x86_64', 'amd64'] else + 'arm64' if ansible_architecture in ['aarch64', 'arm64'] else + golang_arch }} + when: golang_arch is not defined or golang_arch | length == 0 + +- name: Golang | Check existing Go installation + ansible.builtin.command: "{{ golang_install_dir }}/bin/go version" + register: golang_current_version_cmd + ignore_errors: true + changed_when: false + +- name: Golang | Parse current Go version + ansible.builtin.set_fact: + golang_current_version: >- + {{ (golang_current_version_cmd.stdout.split()[2] | default('')) | regex_replace('^go', '') }} + when: golang_current_version_cmd.rc == 0 + +- name: Golang | Decide if install/upgrade is needed + ansible.builtin.set_fact: + golang_install_needed: "{{ golang_current_version is not defined or golang_current_version != golang_version }}" + +- name: Golang | Download Go tarball for {{ golang_version }} + ansible.builtin.get_url: + url: "https://go.dev/dl/go{{ golang_version }}.linux-{{ golang_arch }}.tar.gz" + dest: "{{ golang_download_dir }}/go{{ golang_version }}.linux-{{ golang_arch }}.tar.gz" + mode: "0644" + checksum: "{{ golang_tarball_checksum | default(omit) }}" + when: golang_install_needed + +- name: Golang | Remove existing installation at {{ golang_install_dir }} + ansible.builtin.file: + path: "{{ golang_install_dir }}" + state: absent + when: golang_install_needed + +- name: Golang | Extract Go {{ golang_version }} + ansible.builtin.unarchive: + src: "{{ golang_download_dir }}/go{{ golang_version }}.linux-{{ golang_arch }}.tar.gz" + dest: "/usr/local" + remote_src: true + when: golang_install_needed + +- name: Golang | Ensure /usr/local/bin/go symlink exists + ansible.builtin.file: + src: "{{ golang_install_dir }}/bin/go" + dest: "/usr/local/bin/go" + state: link diff --git a/roles/golang/tests/inventory b/roles/golang/tests/inventory new file mode 100644 index 0000000000..878877b077 --- /dev/null +++ b/roles/golang/tests/inventory @@ -0,0 +1,2 @@ +localhost + diff --git a/roles/golang/tests/test.yml b/roles/golang/tests/test.yml new file mode 100644 index 0000000000..126e0d1825 --- /dev/null +++ b/roles/golang/tests/test.yml @@ -0,0 +1,5 @@ +--- +- hosts: localhost + remote_user: root + roles: + - roles/golang diff --git a/roles/golang/vars/main.yml b/roles/golang/vars/main.yml new file mode 100644 index 0000000000..5cf27a0769 --- /dev/null +++ b/roles/golang/vars/main.yml @@ -0,0 +1,2 @@ +--- +# vars file for roles/golang diff --git a/roles/plakar/README.md b/roles/plakar/README.md new file mode 100644 index 0000000000..dea4d1e073 --- /dev/null +++ b/roles/plakar/README.md @@ -0,0 +1,375 @@ +# Plakar + +This role installs [plakar](https://plakar.io), builds the S3 integration plugin, and configures a backup repository stored in **AWS S3**. + +It is designed so that: + +- Servers never use personal GitHub or personal plakar.io accounts. (which is what the project documentation focuses on) +- S3 credentials and repository passphrase live in **Ansible Vault**. +- Each project (NFS, app servers, etc.) can define its own: + - S3 bucket + - IAM user / access keys + - store name (`plakar_store_name`) + - snapshot root path (`plakar_snapshot_root`) + +--- + +## AWS Setup (one-time per bucket / project) + +These steps are written for a single project (e.g. NFS backups). +Other projects can repeat the same pattern with a different bucket and IAM user. + +### Step 1: Create the S3 bucket + +1. Log in to the **AWS Management Console**. +2. Go to **S3** (search for “S3” in the top search bar). +3. Click **Create bucket**. +4. Set: + - **Bucket name**: e.g. `pul-nfs-backup` (must be globally unique). + - **AWS Region**: `us-east-1` +5. Under **Block Public Access**: + - Ensure **“Block all public access”** is **checked** (enabled). +6. Click **Create bucket** at the bottom. + +### Step 2: Create a dedicated IAM user + +Never use the AWS root account or a human user for automated backups. + +1. Go to **IAM**. +2. Click **Users** in the left sidebar. +3. Click **Create user**. +4. Set: + - **User name**: `plakar-nfs-backup-agent` (or similar, for example `plakar-postgresql-backup-agent`). +5. Click **Next** to go to permissions. + +### Step 3: Grant permissions + +Add the user to the `plakar-backup` Group with the `S3Full` Permissions + +### Step 4: Generate access keys + +1. After creating the user, click on the user name (e.g. `plakar-backup-agent`). +2. Go to the **Security credentials** tab. +3. Scroll to **Access keys** and click **Create access key**. +4. Choose **Command Line Interface (CLI)** (or “Other”). +5. Confirm the warning (checkbox) and click **Next**. +6. Click **Create access key**. + +You’ll now see: + +- **Access key ID** +- **Secret access key** + +### Step 5: Save the keys in Ansible Vault + +1. Copy both the **Access key ID** and **Secret access key**. +2. In your Ansible repo, open the appropriate Vault file (e.g. `group_vars/nfsserver/vault.yml`) and add: + + ```yaml + vault_plakar_aws_access_key: "AKIA...." + vault_plakar_aws_secret_key: "super-secret-key-here" + vault_plakar_repo_passphrase: "super-long-random-passphrase" +``` + +3. Save and re-encrypt the file if needed: + + ```bash + ansible-vault edit group_vars/nfsserver/vault.yml + ``` + +> You will not be able to see the Secret Access Key in AWS again after leaving that page. +Treat it like a password. + +## Variables + +The role ships with sensible defaults in roles/plakar/defaults/main.yml, but these are the key variables you typically override per project / environment: + +```yaml +# Which Unix user runs plakar (and owns ~/.config/.plakar) +plakar_user: "pulsys" + +# Name of the AWS-backed store as seen by plakar +plakar_store_name: "nfs_aws" + +# AWS S3 configuration +plakar_repo_bucket: "pul-nfs-backup" # your bucket +plakar_repo_region: "us-east-1" + +# From Ansible Vault +plakar_repo_access_key: "{{ vault_plakar_aws_access_key }}" +plakar_repo_secret_key: "{{ vault_plakar_aws_secret_key }}" +plakar_repo_passphrase: "{{ vault_plakar_repo_passphrase }}" + +# What to back up +plakar_snapshot_root: "/var/nfs" +plakar_snapshot_excludes: + - "*.tmp" + - "cache/" + +# Control whether the role creates the repo + runs an initial backup +plakar_configure_backup: true +plakar_run_initial_backup: true +``` + +The role will: + + 1. Install plakar from the upstream apt repository. + + 2. Build and install the S3 plugin if it’s not already installed. + + 3. Configure an AWS store: + + ```bash + plakar store add nfs_aws \ + s3://s3.us-east-1.amazonaws.com/pul-nfs-backup-aws \ + access_key=... secret_access_key=... + ``` + 1. Initialize the repository in that store: + + ```bash + plakar at @nfs_aws create + ``` + + 1. Run the initial backup (if plakar_run_initial_backup: true): + + ```bash + plakar at @nfs_aws backup /var/nfs + ``` + +### Example: using this role for NFS backups + +Example: using this role for NFS backups (`playbooks/nfsserver.yml`) + +```yaml +- name: configure nfsserver connection + hosts: lib_fs_{{ runtime_env | default('staging') }} + remote_user: pulsys + become: true + + vars_files: + - ../group_vars/nfsserver/{{ runtime_env | default('staging') }}.yml + - ../group_vars/nfsserver/common.yml + - ../group_vars/nfsserver/vault.yml + + roles: + - role: roles/plakar + - role: roles/nfsserver +``` + +Group vars (`group_vars/nfsserver/staging.yml`): + +```yaml +# plakar backups +plakar_store_name: "nfs_aws" +plakar_repo_passphrase: "{{ vault_plakar_repo_passphrase }}" +plakar_repo_bucket: "pul-nfs-backup-aws" +plakar_repo_access_key: "{{ vault_plakar_aws_access_key }}" +plakar_repo_secret_key: "{{ vault_plakar_aws_secret_key }}" +plakar_snapshot_root: "/var/nfs" +plakar_configure_backup: true +plakar_run_initial_backup: true +``` + +After running the playbook, you can verify from the host: + +```bash +sudo -u pulsys plakar store show nfs_aws +sudo -u pulsys plakar at @nfs_aws ls +``` + +And from AWS: + +```bash +aws s3 ls s3://pul-nfs-backup +``` + +You should see plakar’s packfiles stored in the bucket. + +#### Reusing the role for other projects + +For a different project (e.g. backups of /var/lib/postgresql): + + 1. Create a new S3 bucket + IAM user as above (or reuse an existing one). + + 2. Add new vaulted secrets for that project’s access key / secret. + + 3. Create a new group_vars/postgresql/.yml that sets: + + ```yaml + plakar_store_name: "postgresql_aws" + plakar_repo_bucket: "pul-postgresql-backup" + plakar_repo_access_key: "{{ vault_plakar_postgresql_aws_access_key }}" + plakar_repo_secret_key: "{{ vault_plakar_postgresql_aws_secret_key }}" + plakar_repo_passphrase: "{{ vault_plakar_postgresql_repo_passphrase }}" + plakar_snapshot_root: "/var/lib/postgresql" + plakar_configure_backup: true + plakar_run_initial_backup: true + ``` + 1. Include the `plakar` role in that project’s playbook. + +## Restore / Disaster Recovery + +This section covers how to restore data from plakar when the repository is stored in AWS S3 via the `@` pattern used by this role (e.g. `@nfs_aws`). + +There are two common scenarios: + +1. **Restore on the same host** (repo + store already configured by Ansible). +2. **Restore on a new host** (fresh machine, but S3 bucket still exists). + +In both cases you’ll use `plakar restore` against the AWS-backed store. The syntax is: + +```bash +plakar at @ restore -to [:path] +``` + +as documented in the plakar restore / quickstart guides. + +> **Passphrase reminder:** The Kloset store is encrypted. When prompted, use the passphrase stored in Ansible Vault as `vault_plakar_repo_passphrase` + +### Restoring on the same host + +Assumptions: + +- The host is already managed by Ansible with this role. + +- The store name is `nfs_aws` (or whatever you set in `plakar_store_name`). + +- The S3 bucket and repo already exist. + +#### 1.1 List available snapshots + +As the `plakar_user` (usually `pulsys`): + +```bash +sudo -u pulsys plakar at @nfs_aws ls +``` + +You’ll see a list of snapshot IDs (e.g. `9abc3294...`). + +If plakar asks for a **repository passphrase**, use the value from: + +```yaml +vault_plakar_repo_passphrase: "..." +``` + +in your project's Vault file. + +#### 1.2 Restore an entire snapshot to a temporary location + +Pick a snapshot ID (e.g. `9abc3294`) and restore it under /mnt/restore: + +```bash +sudo -u pulsys plakar at @nfs_aws restore -to /mnt/restore 9abc3294 +``` + +This will recreate the snapshot’s directory tree under /mnt/restore. + +#### 1.3 Restore only a specific path + +You can restore just a subdirectory (e.g. `/var/nfs/pas`) from that snapshot: + +```bash +sudo -u pulsys plakar at @nfs_aws restore -to /mnt/restore 9abc3294:/var/nfs/bibdata +``` + +This is useful if you only need to recover one exported share. + +#### 1.4 Copy restored data back into place + +Once you’ve inspected the restored files under `/mnt/restore`, you can copy them back into the live filesystem. For example: + +```bash +# Example only – adjust paths/flags to your policy +sudo rsync -aHAX --delete /mnt/restore/var/nfs/ /var/nfs/ +``` + +### 2. Restoring on a new host + +Assumptions: + +- The original host was lost, but the S3 bucket is intact. + +- You still have the Ansible repo and Vault file with: + + - `vault_plakar_aws_access_key` + + - `vault_plakar_aws_secret_key` + + - `vault_plakar_repo_passphrase` + +#### 2.1 Rebuild the host and run Ansible + +Provision a new VM / server. + +1. Attach storage and create the mount point (e.g. `/var/nfs`). + +2. Run the same Ansible playbook you use normally, with the same inventory / vars: + +```bash +ansible-playbook playbooks/nfsserver.yml +``` + +This will: + +- Install plakar and the S3 plugin. + +- Re-create the `nfs_aws` store pointing at your existing S3 bucket. + +- Not overwrite data in `/var/nfs` unless you also run the backup step. + +#### 2.2 Verify connectivity to the store + +On the new host: + +```bash +sudo -u pulsys plakar store show nfs_aws +sudo -u pulsys plakar at @nfs_aws ls +``` + +You should see the existing snapshots from S3. + +#### 2.3 Restore data to the new host + +Restore all data to a staging location first: + +```bash +sudo -u pulsys plakar at @nfs_aws restore -to /mnt/restore 9abc3294 +``` + +Inspect the restored tree, then copy it into place: + +```bash +sudo rsync -aHAX --delete /mnt/restore/var/nfs/ /var/nfs/ +``` + +When you’re satisfied: + +### 3. Quick reference + +List snapshots: + +```bash +sudo -u pulsys plakar at @nfs_aws ls +``` + +Inspect a snapshot in the UI: + +```bash +sudo -u pulsys plakar at @nfs_aws ui +``` + +The tunnel to the resulting localhost: + +Restore everything from a snapshot: + +```bash +sudo -u pulsys plakar at @nfs_aws restore -to /mnt/restore SNAPSHOT_ID +``` + +Restore a single directory: + +```bash +sudo -u pulsys plakar at @nfs_aws restore -to /mnt/restore SNAPSHOT_ID:/var/nfs/bibdata +``` + +Remember: when prompted for the passphrase, use vault_plakar_repo_passphrase from Ansible Vault. diff --git a/roles/plakar/defaults/main.yml b/roles/plakar/defaults/main.yml new file mode 100644 index 0000000000..750e21f2bb --- /dev/null +++ b/roles/plakar/defaults/main.yml @@ -0,0 +1,46 @@ +--- +# defaults file for roles/plakar +plakar_user: "pulsys" +plakar_apt_repo_url: "https://packages.plakar.io/deb" +plakar_apt_repo_suite: "stable" +plakar_apt_repo_component: "main" +plakar_apt_key_url: "https://packages.plakar.io/keys/plakar.gpg" +plakar_apt_key_path: "/usr/share/keyrings/plakar.gpg" + +# Directory used to build the S3 plugin (ptar ends up here) +plakar_s3_build_dir: "/var/lib/plakar/plugins-build" + +# S3 plugin version & arch for the expected ptar filename. +# Adjust arch or version as needed if upstream changes. +plakar_s3_plugin_version: "1.0.7" +plakar_s3_plugin_arch: "linux_amd64" + +# Derived plugin filename (override if needed) +plakar_s3_plugin_filename: "s3_v{{ plakar_s3_plugin_version }}_{{ plakar_s3_plugin_arch }}.ptar" + +# Whether to include configure_backup.yml +plakar_configure_backup: false + +# Whether to actually run "plakar create" and "plakar backup" +# We turn this on for a one-time bootstrap run with a true +plakar_run_initial_backup: false + +# Repository (AWS S3 API) config +plakar_store_name: "" +plakar_repo_bucket: "YOUR_S3_BUCKET_NAME" +plakar_repo_region: "us-east-1" + +plakar_repo_access_key: "AWS_ACCESS_KEY" +plakar_repo_secret_key: "AWS_SECRET_KEY" + +plakar_repo_passphrase: "CHANGE-ME-IN-VAULT" + +# Snapshot config +plakar_snapshot_root: "/path/to/backup" +plakar_snapshot_excludes: + - "*.tmp" + - "cache/" +# Scheduler configuration +plakar_scheduler_enabled: false +plakar_scheduler_config_path: "/etc/plakar/scheduler.yaml" +plakar_scheduler_interval: "24h" diff --git a/roles/plakar/handlers/main.yml b/roles/plakar/handlers/main.yml new file mode 100644 index 0000000000..ffea8825ac --- /dev/null +++ b/roles/plakar/handlers/main.yml @@ -0,0 +1,2 @@ +--- +# handlers file for roles/plakar diff --git a/roles/plakar/meta/main.yml b/roles/plakar/meta/main.yml new file mode 100644 index 0000000000..789b0c0aa7 --- /dev/null +++ b/roles/plakar/meta/main.yml @@ -0,0 +1,17 @@ +--- +galaxy_info: + role_name: plakar + company: Princeton University Library + description: Plakar Backup + author: pulibrary + + license: MIT + + min_ansible_version: 2.2 + + platforms: + - name: Ubuntu + versions: + - jammy +dependencies: + - role: "golang" diff --git a/roles/plakar/molecule/default/ansible.cfg b/roles/plakar/molecule/default/ansible.cfg new file mode 100644 index 0000000000..15849adec5 --- /dev/null +++ b/roles/plakar/molecule/default/ansible.cfg @@ -0,0 +1,2 @@ +[defaults] +remote_tmp = /tmp/ansible diff --git a/roles/plakar/molecule/default/converge.yml b/roles/plakar/molecule/default/converge.yml new file mode 100644 index 0000000000..209ffec807 --- /dev/null +++ b/roles/plakar/molecule/default/converge.yml @@ -0,0 +1,11 @@ +--- +- name: Converge + hosts: all + vars: + - running_on_server: false + become: true + tasks: + # Change this when copying to another role + - name: "Include plakar" + ansible.builtin.include_role: + name: plakar diff --git a/roles/plakar/molecule/default/molecule.yml b/roles/plakar/molecule/default/molecule.yml new file mode 100644 index 0000000000..f4654c2a1c --- /dev/null +++ b/roles/plakar/molecule/default/molecule.yml @@ -0,0 +1,38 @@ +--- +scenario: + name: default + test_sequence: + - dependency + - syntax + - create + - prepare + - converge + - verify + - destroy +driver: + name: docker +lint: | + set -e + yamllint . + ansible-lint +platforms: + - name: instance + image: "ghcr.io/pulibrary/vm-builds/ubuntu-22.04" + command: "" # image above already has the /sbin/init + tmpfs: + - /run + - /run/lock + - /tmp + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:rw + cgroupns_mode: host + privileged: true + pre_build_image: true +provisioner: + name: ansible + config_options: + defaults: + remote_tmp: /tmp + log: true +verifier: + name: ansible diff --git a/roles/plakar/molecule/default/verify.yml b/roles/plakar/molecule/default/verify.yml new file mode 100644 index 0000000000..e5bfae8164 --- /dev/null +++ b/roles/plakar/molecule/default/verify.yml @@ -0,0 +1,34 @@ +--- +- name: Verify + hosts: all + gather_facts: false + become: true + vars: + # Mirror defaults for verification logic + plakar_s3_build_dir: "/var/lib/plakar/plugins-build" + plakar_s3_plugin_version: "1.0.7" + plakar_s3_plugin_arch: "linux_amd64" + plakar_s3_plugin_filename: "s3_v{{ plakar_s3_plugin_version }}_{{ plakar_s3_plugin_arch }}.ptar" + tasks: + - name: Check if Plakar binary exists + ansible.builtin.stat: + path: /usr/bin/plakar + register: plakar_binary + + - name: Assert Plakar binary is present and executable + ansible.builtin.assert: + that: + - plakar_binary.stat.exists + - plakar_binary.stat.executable + fail_msg: "Plakar binary not found or not executable at /usr/bin/plakar" + + - name: Get Plakar version + ansible.builtin.command: plakar version + register: plakar_version_output + changed_when: false + + - name: Assert Plakar runs successfully + ansible.builtin.assert: + that: + - plakar_version_output.rc == 0 + fail_msg: "Plakar version command failed with exit code {{ plakar_version_output.rc }}" diff --git a/roles/plakar/tasks/configure_backup.yml b/roles/plakar/tasks/configure_backup.yml new file mode 100644 index 0000000000..d459703126 --- /dev/null +++ b/roles/plakar/tasks/configure_backup.yml @@ -0,0 +1,124 @@ +--- +- name: Plakar | Ensure config directory exists + ansible.builtin.file: + path: "/etc/plakar" + state: directory + mode: "0755" + owner: "{{ plakar_user }}" + group: "{{ plakar_user }}" + +- name: Plakar | Render /etc/plakar.yaml + ansible.builtin.template: + src: plakar.yaml.j2 + dest: /etc/plakar/plakar.yaml + owner: root + group: root + mode: "0640" + +# Make sure the snapshot root exists +- name: Plakar | Ensure snapshot root directory exists + ansible.builtin.file: + path: "{{ plakar_snapshot_root }}" + state: directory + mode: "0755" + +# Check if the AWS store is already configured +- name: Plakar | Check if AWS store exists + become: true + become_user: "{{ plakar_user }}" + ansible.builtin.command: > + plakar store show {{ plakar_store_name }} + register: plakar_store_show + changed_when: false + failed_when: false + +# Configure the AWS store if missing +# Example (from plakar docs): +# plakar store add mys3bucket \ +# location=s3://s3.eu-west-3.amazonaws.com/backups \ +# access_key="access_key" \ +# secret_access_key="secret_key" +- name: Plakar | Configure AWS store + become: true + become_user: "{{ plakar_user }}" + ansible.builtin.command: > + plakar store add {{ plakar_store_name }} + location=s3://s3.{{ plakar_repo_region }}.amazonaws.com/{{ plakar_repo_bucket }} + access_key={{ plakar_repo_access_key }} + secret_access_key={{ plakar_repo_secret_key }} + when: plakar_store_show.rc != 0 + +# Keep AWS credentials in sync if they change in Ansible +- name: Plakar | Ensure AWS store credentials are up to date + become: true + become_user: "{{ plakar_user }}" + ansible.builtin.command: > + plakar store set {{ plakar_store_name }} + access_key={{ plakar_repo_access_key }} + secret_access_key={{ plakar_repo_secret_key }} + changed_when: false + +# Check if a repository already exists in the AWS store +- name: Plakar | Check if AWS repository exists + become: true + become_user: "{{ plakar_user }}" + ansible.builtin.command: > + plakar at @{{ plakar_store_name }} ls + environment: + PLAKAR_PASSPHRASE: "{{ plakar_repo_passphrase }}" + register: plakar_aws_ls + changed_when: false + failed_when: false + +- name: Plakar | Initialize AWS repository (@{{ plakar_store_name }}) + become: true + become_user: "{{ plakar_user }}" + ansible.builtin.command: > + plakar at @{{ plakar_store_name }} create + environment: + PLAKAR_PASSPHRASE: "{{ plakar_repo_passphrase }}" + when: plakar_aws_ls.rc != 0 + +# Initial backup to AWS +# plakar_run_initial_backup (default: false). Set to true the first time +# you want to seed the repo with an initial backup from this host. +- name: Plakar | Run initial backup to AWS + become: true + become_user: "{{ plakar_user }}" + ansible.builtin.command: > + plakar at @{{ plakar_store_name }} backup {{ plakar_snapshot_root }} + environment: + PLAKAR_PASSPHRASE: "{{ plakar_repo_passphrase }}" + when: plakar_run_initial_backup | default(false) | bool + +# Render the scheduler config (maps the docs example to your store + root) +- name: Plakar | Render scheduler config + ansible.builtin.template: + src: scheduler.yaml.j2 + dest: "{{ plakar_scheduler_config_path }}" + owner: "{{ plakar_user }}" + group: "{{ plakar_user }}" + mode: "0640" + when: plakar_scheduler_enabled | default(false) | bool + +# Check if scheduler is already running +- name: Plakar | Check if scheduler is already running + become: true + become_user: "{{ plakar_user }}" + ansible.builtin.command: plakar scheduler list + register: plakar_scheduler_list + changed_when: false + failed_when: false + when: plakar_scheduler_enabled | default(false) | bool + +# Start scheduler in the background if it's not running +# Matches docs: +# plakar scheduler start -tasks ./scheduler.yaml +- name: Plakar | Start scheduler for daily backups + become: true + become_user: "{{ plakar_user }}" + ansible.builtin.command: > + plakar scheduler start -tasks {{ plakar_scheduler_config_path }} + when: + - plakar_scheduler_enabled | default(false) | bool + - plakar_scheduler_list.rc != 0 diff --git a/roles/plakar/tasks/main.yml b/roles/plakar/tasks/main.yml new file mode 100644 index 0000000000..b30ceedf9a --- /dev/null +++ b/roles/plakar/tasks/main.yml @@ -0,0 +1,136 @@ +--- +# roles/plakar +- name: Plakar | Ensure apt dependencies are present + ansible.builtin.apt: + name: + - curl + - gnupg2 + - ca-certificates + state: present + +- name: Plakar | Download repo GPG key (ASCII) + ansible.builtin.get_url: + url: "{{ plakar_apt_key_url }}" + dest: "{{ plakar_apt_key_path }}.asc" + mode: "0644" + +- name: Plakar | Dearmor repo GPG key + ansible.builtin.command: > + gpg --dearmor -o {{ plakar_apt_key_path }} {{ plakar_apt_key_path }}.asc + args: + creates: "{{ plakar_apt_key_path }}" + +- name: Plakar | Configure apt repository + ansible.builtin.apt_repository: + repo: "deb [signed-by={{ plakar_apt_key_path }}] {{ plakar_apt_repo_url }} {{ plakar_apt_repo_suite }} {{ plakar_apt_repo_component }}" + filename: "plakar" + state: present + +- name: Plakar | Clean apt cache + ansible.builtin.command: apt-get clean + changed_when: false + +- name: Plakar | Update apt cache after adding repository + ansible.builtin.command: apt-get update + register: cache_update + retries: 3 + delay: 5 + until: cache_update.rc == 0 + changed_when: false + failed_when: false + +- name: Plakar | Display cache update errors + ansible.builtin.debug: + msg: | + Return code: {{ cache_update.rc }} + STDOUT: {{ cache_update.stdout }} + STDERR: {{ cache_update.stderr }} + when: cache_update.rc != 0 + +- name: Plakar | Fail if cache update failed + ansible.builtin.fail: + msg: "apt-get update failed. See debug output above." + when: cache_update.rc != 0 + +- name: Plakar | Install plakar package + ansible.builtin.apt: + name: plakar + state: present + update_cache: true + +- name: Plakar | Ensure build directory exists for S3 plugin + ansible.builtin.file: + path: "{{ plakar_s3_build_dir }}" + state: directory + mode: "0755" + owner: "{{ plakar_user }}" + group: "{{ plakar_user }}" + +# Check if S3 plugin already installed +- name: Plakar | Check installed plugins + ansible.builtin.command: plakar pkg show + register: plakar_pkg_show + become: true + become_user: "{{ plakar_user }}" + changed_when: false + failed_when: plakar_pkg_show.rc not in [0, 1] + # rc==1 could mean "no plugins installed yet" depending on version + +- name: Plakar | Determine if S3 plugin is already installed + ansible.builtin.set_fact: + plakar_s3_installed: "{{ 's3' in (plakar_pkg_show.stdout | default('')) }}" + +# Build S3 plugin (if missing) +- name: Plakar | Build S3 plugin from upstream recipe + ansible.builtin.command: plakar pkg build s3 + become: true + become_user: "{{ plakar_user }}" + args: + chdir: "{{ plakar_s3_build_dir }}" + when: not plakar_s3_installed + +- name: Plakar | Ensure S3 plugin ptar exists + ansible.builtin.stat: + path: "{{ plakar_s3_build_dir }}/{{ plakar_s3_plugin_filename }}" + register: plakar_s3_ptar + when: not plakar_s3_installed + +- name: Plakar | Fail if expected S3 plugin file is missing + ansible.builtin.fail: + msg: > + Expected {{ plakar_s3_build_dir }}/{{ plakar_s3_plugin_filename }} + but it does not exist. Check plakar pkg build s3 output and adjust + plakar_s3_plugin_version/arch if needed. + when: + - not plakar_s3_installed + - not plakar_s3_ptar.stat.exists | default(false) + +# Install S3 plugin via local ptar +- name: Plakar | Install S3 plugin from local ptar + ansible.builtin.command: > + plakar pkg add {{ plakar_s3_build_dir }}/{{ plakar_s3_plugin_filename }} + args: + chdir: "{{ plakar_s3_build_dir }}" + become: true + become_user: "{{ plakar_user }}" + when: not plakar_s3_installed + +# Verify plugin is now installed +- name: Plakar | Verify S3 plugin is installed + ansible.builtin.command: plakar pkg show + register: plakar_pkg_show_after + become: true + become_user: "{{ plakar_user }}" + changed_when: false + +- name: Plakar | Assert S3 shows up in plugin list + ansible.builtin.assert: + that: + - "'s3' in (plakar_pkg_show_after.stdout | default(''))" + fail_msg: "S3 plugin did not appear in 'plakar pkg show' after install." + +- name: Plakar | Configure repository and initial backup + ansible.builtin.include_tasks: configure_backup.yml + when: + - running_on_server + - plakar_configure_backup | default(false) diff --git a/roles/plakar/templates/plakar.yaml.j2 b/roles/plakar/templates/plakar.yaml.j2 new file mode 100644 index 0000000000..5c9e9f599b --- /dev/null +++ b/roles/plakar/templates/plakar.yaml.j2 @@ -0,0 +1,19 @@ +# {{ ansible_managed | comment }} +version: 1.0.0 + +# 1. Define the storage backend (S3 API) +repository: + backend: s3 + config: + bucket: "{{ plakar_repo_bucket }}" + region: "{{ plakar_repo_region }}" + access_key: "{{ plakar_repo_access_key }}" + secret_key: "{{ plakar_repo_secret_key }}" + +# 2. Define the backup job +snapshot: + root: "{{ plakar_snapshot_root }}" # The directory you want backed up + excludes: +{% for pattern in plakar_snapshot_excludes %} + - "{{ pattern }}" +{% endfor %} diff --git a/roles/plakar/templates/scheduler.yaml.j2 b/roles/plakar/templates/scheduler.yaml.j2 new file mode 100644 index 0000000000..b2a325cc69 --- /dev/null +++ b/roles/plakar/templates/scheduler.yaml.j2 @@ -0,0 +1,10 @@ +# {{ ansible_managed | comment }} +agent: + tasks: + - name: "backup {{ plakar_snapshot_root }}" + repository: "@{{ plakar_store_name }}" + + backup: + path: "{{ plakar_snapshot_root }}" + interval: "{{ plakar_scheduler_interval }}" + check: true diff --git a/roles/plakar/tests/inventory b/roles/plakar/tests/inventory new file mode 100644 index 0000000000..878877b077 --- /dev/null +++ b/roles/plakar/tests/inventory @@ -0,0 +1,2 @@ +localhost + diff --git a/roles/plakar/tests/test.yml b/roles/plakar/tests/test.yml new file mode 100644 index 0000000000..966764a1fb --- /dev/null +++ b/roles/plakar/tests/test.yml @@ -0,0 +1,5 @@ +--- +- hosts: localhost + remote_user: root + roles: + - roles/plakar diff --git a/roles/plakar/vars/main.yml b/roles/plakar/vars/main.yml new file mode 100644 index 0000000000..ce6304ba70 --- /dev/null +++ b/roles/plakar/vars/main.yml @@ -0,0 +1,2 @@ +--- +# vars file for roles/plakar