mirror of
https://github.com/auricom/home-cluster.git
synced 2025-09-17 18:24:14 +02:00
feat: update workflows
This commit is contained in:
3
.github/linters/.jscpd.json
vendored
Normal file
3
.github/linters/.jscpd.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"ignore": ["**/truenas/files/scripts/**"]
|
||||||
|
}
|
2
.github/workflows/helm-release-differ.yaml
vendored
2
.github/workflows/helm-release-differ.yaml
vendored
@@ -68,7 +68,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
issue-number: "${{ github.event.pull_request.number }}"
|
issue-number: "${{ github.event.pull_request.number }}"
|
||||||
comment-author: "${{ env.BOT_USERNAME }}"
|
comment-author: "${{ env.BOT_USERNAME }}"
|
||||||
body-includes: helm-release.yaml
|
body-includes: "${{ matrix.file }}"
|
||||||
- name: Create or update comment
|
- name: Create or update comment
|
||||||
uses: peter-evans/create-or-update-comment@v2
|
uses: peter-evans/create-or-update-comment@v2
|
||||||
with:
|
with:
|
||||||
|
@@ -13,21 +13,23 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Lint
|
name: MegaLinter
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Generate Token
|
- name: Generate Token
|
||||||
uses: tibdex/github-app-token@v1
|
uses: tibdex/github-app-token@v1
|
||||||
id: generate-token
|
id: generate-token
|
||||||
with:
|
with:
|
||||||
app_id: "${{ secrets.BOT_APP_ID }}"
|
app_id: "${{ secrets.BOT_APP_ID }}"
|
||||||
private_key: "${{ secrets.BOT_APP_PRIVATE_KEY }}"
|
private_key: "${{ secrets.BOT_APP_PRIVATE_KEY }}"
|
||||||
|
|
||||||
- name: MegaLinter
|
- name: MegaLinter
|
||||||
uses: oxsecurity/megalinter@v6.0.3
|
uses: oxsecurity/megalinter@v6.0.4
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: "${{ steps.generate-token.outputs.token }}"
|
GITHUB_TOKEN: "${{ steps.generate-token.outputs.token }}"
|
||||||
PRINT_ALPACA: false
|
PRINT_ALPACA: false
|
||||||
@@ -40,10 +42,10 @@ jobs:
|
|||||||
"ACTION_ACTIONLINT",
|
"ACTION_ACTIONLINT",
|
||||||
"ANSIBLE_ANSIBLE_LINT",
|
"ANSIBLE_ANSIBLE_LINT",
|
||||||
"COPYPASTE_JSCPD",
|
"COPYPASTE_JSCPD",
|
||||||
"CREDENTIALS_SECRETLINT",
|
|
||||||
"GIT_GIT_DIFF",
|
|
||||||
"KUBERNETES_KUBEVAL",
|
"KUBERNETES_KUBEVAL",
|
||||||
"MARKDOWN_MARKDOWNLINT",
|
"MARKDOWN_MARKDOWNLINT",
|
||||||
|
"REPOSITORY_GIT_DIFF",
|
||||||
|
"REPOSITORY_SECRETLINT",
|
||||||
"TERRAFORM_TERRAFORM_FMT",
|
"TERRAFORM_TERRAFORM_FMT",
|
||||||
"YAML_PRETTIER",
|
"YAML_PRETTIER",
|
||||||
"YAML_YAMLLINT"
|
"YAML_YAMLLINT"
|
||||||
@@ -53,19 +55,13 @@ jobs:
|
|||||||
)
|
)
|
||||||
}}
|
}}
|
||||||
ANSIBLE_DIRECTORY: ansible
|
ANSIBLE_DIRECTORY: ansible
|
||||||
ANSIBLE_ANSIBLE_LINT_CONFIG_FILE: .ansible-lint
|
ANSIBLE_ANSIBLE_LINT_CONFIG_FILE: .github/linters/.ansible-lint
|
||||||
|
COPYPASTE_JSCPD_CONFIG_FILE: .github/linters/.jspcd.json
|
||||||
KUBERNETES_DIRECTORY: cluster
|
KUBERNETES_DIRECTORY: cluster
|
||||||
KUBERNETES_KUBEVAL_ARGUMENTS: --ignore-missing-schemas
|
KUBERNETES_KUBEVAL_ARGUMENTS: --ignore-missing-schemas
|
||||||
KUBERNETES_KUBEVAL_FILTER_REGEX_INCLUDE: "(kubernetes)"
|
KUBERNETES_KUBEVAL_FILTER_REGEX_INCLUDE: "(kubernetes)"
|
||||||
MARKDOWN_MARKDOWNLINT_CONFIG_FILE: .github/linters/.markdownlint.yaml
|
MARKDOWN_MARKDOWNLINT_CONFIG_FILE: .github/linters/.markdownlint.yaml
|
||||||
MARKDOWN_MARKDOWNLINT_RULES_PATH: .github/
|
MARKDOWN_MARKDOWNLINT_RULES_PATH: .github/
|
||||||
YAML_YAMLLINT_CONFIG_FILE: .github/linters/.yamllint.yaml
|
YAML_YAMLLINT_CONFIG_FILE: .github/linters/.yamllint.yaml
|
||||||
|
YAML_PRETTIER_CONFIG_FILE: .github/linters/.prettierrc.yaml
|
||||||
YAML_PRETTIER_FILTER_REGEX_EXCLUDE: "(.*\\.sops\\.ya?ml)"
|
YAML_PRETTIER_FILTER_REGEX_EXCLUDE: "(.*\\.sops\\.ya?ml)"
|
||||||
- name: Archive production artifacts
|
|
||||||
if: ${{ success() }} || ${{ failure() }}
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: MegaLinter reports
|
|
||||||
path: |
|
|
||||||
report
|
|
||||||
mega-linter.log
|
|
3
.github/workflows/scan-containers.yaml
vendored
3
.github/workflows/scan-containers.yaml
vendored
@@ -64,8 +64,7 @@ jobs:
|
|||||||
image-ref: ${{ matrix.containers }}
|
image-ref: ${{ matrix.containers }}
|
||||||
vuln-type: os,library
|
vuln-type: os,library
|
||||||
severity: CRITICAL,HIGH
|
severity: CRITICAL,HIGH
|
||||||
format: template
|
format: sarif
|
||||||
template: "@/contrib/sarif.tpl"
|
|
||||||
output: trivy-results.sarif
|
output: trivy-results.sarif
|
||||||
- name: Upload Trivy scan results to GitHub Security tab
|
- name: Upload Trivy scan results to GitHub Security tab
|
||||||
uses: github/codeql-action/upload-sarif@v2
|
uses: github/codeql-action/upload-sarif@v2
|
||||||
|
2
.github/workflows/schedule-renovate.yaml
vendored
2
.github/workflows/schedule-renovate.yaml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
echo "DRY_RUN=${{ github.event.inputs.dryRun || env.DRY_RUN }}" >> "${GITHUB_ENV}"
|
echo "DRY_RUN=${{ github.event.inputs.dryRun || env.DRY_RUN }}" >> "${GITHUB_ENV}"
|
||||||
echo "LOG_LEVEL=${{ github.event.inputs.logLevel || env.LOG_LEVEL }}" >> "${GITHUB_ENV}"
|
echo "LOG_LEVEL=${{ github.event.inputs.logLevel || env.LOG_LEVEL }}" >> "${GITHUB_ENV}"
|
||||||
- name: Renovate
|
- name: Renovate
|
||||||
uses: renovatebot/github-action@v32.112.0
|
uses: renovatebot/github-action@v32.111.0
|
||||||
with:
|
with:
|
||||||
configurationFile: .github/renovate.json5
|
configurationFile: .github/renovate.json5
|
||||||
token: "x-access-token:${{ steps.generate-token.outputs.token }}"
|
token: "x-access-token:${{ steps.generate-token.outputs.token }}"
|
||||||
|
96
ansible/roles/truenas/files/scripts/borgserver.bash
Normal file
96
ansible/roles/truenas/files/scripts/borgserver.bash
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
PUID=${PUID:-1000}
|
||||||
|
PGID=${PGID:-1000}
|
||||||
|
|
||||||
|
usermod -o -u "$PUID" borg &>/dev/null
|
||||||
|
groupmod -o -g "$PGID" borg &>/dev/null
|
||||||
|
|
||||||
|
BORG_DATA_DIR=/backups
|
||||||
|
SSH_KEY_DIR=/keys
|
||||||
|
BORG_CMD='cd ${BORG_DATA_DIR}/${client_name}; borg serve --restrict-to-path ${BORG_DATA_DIR}/${client_name} ${BORG_SERVE_ARGS}'
|
||||||
|
AUTHORIZED_KEYS_PATH=/home/borg/.ssh/authorized_keys
|
||||||
|
|
||||||
|
# Append only mode?
|
||||||
|
BORG_APPEND_ONLY=${BORG_APPEND_ONLY:=no}
|
||||||
|
|
||||||
|
source /etc/os-release
|
||||||
|
echo "########################################################"
|
||||||
|
echo -n " * BorgServer powered by "
|
||||||
|
borg -V
|
||||||
|
echo " * Based on k8s-at-home"
|
||||||
|
echo "########################################################"
|
||||||
|
echo " * User id: $(id -u borg)"
|
||||||
|
echo " * Group id: $(id -g borg)"
|
||||||
|
echo "########################################################"
|
||||||
|
|
||||||
|
|
||||||
|
# Precheck if BORG_ADMIN is set
|
||||||
|
if [ "${BORG_APPEND_ONLY}" == "yes" ] && [ -z "${BORG_ADMIN}" ] ; then
|
||||||
|
echo "WARNING: BORG_APPEND_ONLY is active, but no BORG_ADMIN was specified!"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Precheck directories & client ssh-keys
|
||||||
|
for dir in BORG_DATA_DIR SSH_KEY_DIR ; do
|
||||||
|
dirpath=$(eval echo '$'${dir})
|
||||||
|
echo " * Testing Volume ${dir}: ${dirpath}"
|
||||||
|
if [ ! -d "${dirpath}" ] ; then
|
||||||
|
echo "ERROR: ${dirpath} is no directory!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$(find ${SSH_KEY_DIR}/clients ! -regex '.*/\..*' -a -type f | wc -l)" == "0" ] ; then
|
||||||
|
echo "ERROR: No SSH-Pubkey file found in ${SSH_KEY_DIR}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Create SSH-Host-Keys on persistent storage, if not exist
|
||||||
|
mkdir -p ${SSH_KEY_DIR}/host 2>/dev/null
|
||||||
|
echo " * Checking / Preparing SSH Host-Keys..."
|
||||||
|
for keytype in ed25519 rsa ; do
|
||||||
|
if [ ! -f "${SSH_KEY_DIR}/host/ssh_host_${keytype}_key" ] ; then
|
||||||
|
echo " ** Creating SSH Hostkey [${keytype}]..."
|
||||||
|
ssh-keygen -q -f "${SSH_KEY_DIR}/host/ssh_host_${keytype}_key" -N '' -t ${keytype}
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "########################################################"
|
||||||
|
echo " * Starting SSH-Key import..."
|
||||||
|
|
||||||
|
# Add every key to borg-users authorized_keys
|
||||||
|
rm ${AUTHORIZED_KEYS_PATH} &>/dev/null
|
||||||
|
for keyfile in $(find "${SSH_KEY_DIR}/clients" ! -regex '.*/\..*' -a -type f); do
|
||||||
|
client_name=$(basename ${keyfile})
|
||||||
|
mkdir ${BORG_DATA_DIR}/${client_name} 2>/dev/null
|
||||||
|
echo " ** Adding client ${client_name} with repo path ${BORG_DATA_DIR}/${client_name}"
|
||||||
|
|
||||||
|
# If client is $BORG_ADMIN unset $client_name, so path restriction equals $BORG_DATA_DIR
|
||||||
|
# Otherwise add --append-only, if enabled
|
||||||
|
borg_cmd=${BORG_CMD}
|
||||||
|
if [ "${client_name}" == "${BORG_ADMIN}" ] ; then
|
||||||
|
echo " ** Client '${client_name}' is BORG_ADMIN! **"
|
||||||
|
unset client_name
|
||||||
|
elif [ "${BORG_APPEND_ONLY}" == "yes" ] ; then
|
||||||
|
borg_cmd="${BORG_CMD} --append-only"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -n "restrict,command=\"$(eval echo -n \"${borg_cmd}\")\" " >> ${AUTHORIZED_KEYS_PATH}
|
||||||
|
cat ${keyfile} >> ${AUTHORIZED_KEYS_PATH}
|
||||||
|
echo >> ${AUTHORIZED_KEYS_PATH}
|
||||||
|
done
|
||||||
|
chmod 0600 "${AUTHORIZED_KEYS_PATH}"
|
||||||
|
|
||||||
|
echo " * Validating structure of generated ${AUTHORIZED_KEYS_PATH}..."
|
||||||
|
ERROR=$(ssh-keygen -lf ${AUTHORIZED_KEYS_PATH} 2>&1 >/dev/null)
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "ERROR: ${ERROR}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
chown -R borg:borg ${BORG_DATA_DIR}
|
||||||
|
chown borg:borg ${AUTHORIZED_KEYS_PATH}
|
||||||
|
chmod 600 ${AUTHORIZED_KEYS_PATH}
|
||||||
|
|
||||||
|
echo "########################################################"
|
||||||
|
echo " * Init done!"
|
Reference in New Issue
Block a user