Switch to wrangler for R2 upload

- wrangler 사용 (CF API 토큰으로 인증)
- AWS CLI + R2 전용 키 불필요
- --remote 플래그 추가
- cleanup 기능 단순화 (wrangler object list 미지원)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
kappa
2026-01-30 22:21:26 +09:00
parent 750f8ac241
commit c8928bf5ad

View File

@@ -10,12 +10,8 @@ set -euo pipefail
VAULT_ADDR="${VAULT_ADDR:-https://vault.anvil.it.com}"
VAULT_TOKEN="${VAULT_TOKEN:?VAULT_TOKEN is required}"
# R2 Configuration
R2_ACCOUNT_ID="${R2_ACCOUNT_ID:?R2_ACCOUNT_ID is required}"
R2_ACCESS_KEY="${R2_ACCESS_KEY:?R2_ACCESS_KEY is required}"
R2_SECRET_KEY="${R2_SECRET_KEY:?R2_SECRET_KEY is required}"
# R2 Configuration (wrangler 사용 - CF API 토큰으로 인증)
R2_BUCKET="${R2_BUCKET:-vault-backup}"
R2_ENDPOINT="https://${R2_ACCOUNT_ID}.r2.cloudflarestorage.com"
# Backup settings
BACKUP_DIR="${BACKUP_DIR:-/tmp/vault-backups}"
@@ -37,7 +33,7 @@ error() {
}
check_dependencies() {
for cmd in curl aws jq; do
for cmd in curl wrangler jq; do
if ! command -v "$cmd" &>/dev/null; then
error "$cmd is required but not installed"
fi
@@ -100,7 +96,7 @@ export_secrets() {
echo '], "timestamp": "'$(date -Iseconds)'"}' >> "$secrets_file"
fi
# Compress and encrypt (optional)
# Compress
gzip -c "$secrets_file" > "$output_file"
rm -f "$secrets_file"
}
@@ -111,35 +107,15 @@ upload_to_r2() {
log "Uploading to R2: ${R2_BUCKET}/${filename}"
# Configure AWS CLI for R2
export AWS_ACCESS_KEY_ID="$R2_ACCESS_KEY"
export AWS_SECRET_ACCESS_KEY="$R2_SECRET_KEY"
# wrangler 사용 (CF API 토큰으로 인증)
wrangler r2 object put "${R2_BUCKET}/${filename}" --file="$file" --content-type="application/octet-stream" --remote
aws s3 cp "$file" "s3://${R2_BUCKET}/${filename}" \
--endpoint-url "$R2_ENDPOINT" \
--quiet
log "Upload complete: s3://${R2_BUCKET}/${filename}"
log "Upload complete: r2://${R2_BUCKET}/${filename}"
}
cleanup_old_backups() {
log "Cleaning up backups older than ${RETENTION_DAYS} days..."
export AWS_ACCESS_KEY_ID="$R2_ACCESS_KEY"
export AWS_SECRET_ACCESS_KEY="$R2_SECRET_KEY"
local cutoff_date=$(date -v-${RETENTION_DAYS}d +%Y-%m-%d 2>/dev/null || date -d "${RETENTION_DAYS} days ago" +%Y-%m-%d)
aws s3 ls "s3://${R2_BUCKET}/" --endpoint-url "$R2_ENDPOINT" 2>/dev/null | \
while read -r line; do
local file_date=$(echo "$line" | awk '{print $1}')
local file_name=$(echo "$line" | awk '{print $4}')
if [[ "$file_date" < "$cutoff_date" && -n "$file_name" ]]; then
log "Deleting old backup: $file_name"
aws s3 rm "s3://${R2_BUCKET}/${file_name}" --endpoint-url "$R2_ENDPOINT" --quiet
fi
done
log "Cleanup skipped - wrangler doesn't support object listing"
log "Use Cloudflare dashboard or lifecycle rules for automatic cleanup"
}
cleanup_local() {