From majestic-devops
Guides Backblaze B2 Cloud Storage integration with B2 CLI for bucket/file operations and Terraform/OpenTofu for provisioning buckets with lifecycle rules, CORS, and encryption.
npx claudepluginhub majesticlabs-dev/majestic-marketplace --plugin majestic-devopsThis skill is limited to using the following tools:
See [references/b2-cli-setup.md](references/b2-cli-setup.md) for B2 CLI installation and authentication.
Manages AWS S3 buckets with versioning, encryption, access control, lifecycle policies, and replication. Use for object storage, static sites, and data lakes.
Provides rclone setup, configuration, and commands to upload, sync, list, and manage files in AWS S3, Cloudflare R2, Backblaze B2, Google Drive, Dropbox.
Guides AWS S3 bucket creation, object operations, versioning, encryption, public access control, lifecycle policies, and storage classes using CLI and boto3.
Share bugs, ideas, or general feedback.
See references/b2-cli-setup.md for B2 CLI installation and authentication. See references/b2-key-capabilities.md for application key capabilities and sync flags.
b2 bucket list
b2 bucket create my-bucket allPrivate
b2 bucket create my-bucket allPrivate \
--lifecycle-rules '[{"daysFromHidingToDeleting": 30, "fileNamePrefix": "logs/"}]'
b2 bucket delete my-bucket
b2 ls my-bucket
b2 ls my-bucket path/to/folder/
b2 upload-file my-bucket local-file.txt remote/path/file.txt
b2 upload-file --content-type "application/json" my-bucket data.json data.json
b2 download-file-by-name my-bucket remote/path/file.txt local-file.txt
b2 download-file-by-id <fileId> local-file.txt
b2 rm my-bucket path/to/file.txt
b2 rm --recursive --versions my-bucket path/to/folder/
b2 sync /local/path b2://my-bucket/prefix/
b2 sync b2://my-bucket/prefix/ /local/path
b2 sync /local/path b2://my-bucket/ \
--threads 20 --delete --keep-days 30 --exclude-regex ".*\.tmp$"
terraform {
required_providers {
b2 = {
source = "Backblaze/b2"
version = "~> 0.8"
}
}
}
provider "b2" {
# Uses B2_APPLICATION_KEY_ID and B2_APPLICATION_KEY env vars
}
resource "b2_bucket" "storage" {
bucket_name = "my-app-storage"
bucket_type = "allPrivate"
bucket_info = {
environment = "production"
application = "my-app"
}
lifecycle_rules {
file_name_prefix = "logs/"
days_from_hiding_to_deleting = 30
days_from_uploading_to_hiding = 90
}
lifecycle_rules {
file_name_prefix = "temp/"
days_from_hiding_to_deleting = 1
days_from_uploading_to_hiding = 7
}
}
output "bucket_id" {
value = b2_bucket.storage.bucket_id
}
resource "b2_bucket" "web_assets" {
bucket_name = "my-web-assets"
bucket_type = "allPublic"
cors_rules {
cors_rule_name = "allowWebApp"
allowed_origins = ["https://myapp.com", "https://www.myapp.com"]
allowed_headers = ["*"]
allowed_operations = ["s3_get", "s3_head"]
expose_headers = ["x-bz-content-sha1"]
max_age_seconds = 3600
}
}
resource "b2_bucket" "encrypted" {
bucket_name = "my-encrypted-bucket"
bucket_type = "allPrivate"
default_server_side_encryption {
mode = "SSE-B2"
algorithm = "AES256"
}
}
resource "b2_bucket" "compliance" {
bucket_name = "compliance-records"
bucket_type = "allPrivate"
file_lock_configuration {
is_file_lock_enabled = true
default_retention {
mode = "governance"
period {
duration = 365
unit = "days"
}
}
}
}
resource "b2_application_key" "app" {
key_name = "my-app-key"
capabilities = ["listBuckets", "listFiles", "readFiles", "writeFiles"]
bucket_id = b2_bucket.storage.bucket_id
name_prefix = "uploads/"
}
output "application_key_id" {
value = b2_application_key.app.application_key_id
}
output "application_key" {
value = b2_application_key.app.application_key
sensitive = true
}
data "b2_account_info" "current" {}
output "account_id" {
value = data.b2_account_info.current.account_id
}
data "b2_bucket" "existing" {
bucket_name = "my-existing-bucket"
}
output "bucket_id" {
value = data.b2_bucket.existing.bucket_id
}
resource "b2_bucket_file" "config" {
bucket_id = b2_bucket.storage.bucket_id
file_name = "config/settings.json"
source = "${path.module}/files/settings.json"
content_type = "application/json"
}
resource "b2_bucket_file" "data" {
bucket_id = b2_bucket.storage.bucket_id
file_name = "data/export.csv"
source = "${path.module}/files/export.csv"
file_info = {
exported_at = timestamp()
version = "1.0"
}
}
Endpoint format: s3.<region>.backblazeb2.com (e.g., us-west-004, eu-central-003)
# ~/.aws/config
[profile backblaze]
region = us-west-004
output = json
# ~/.aws/credentials - use B2 applicationKeyId / applicationKey
[backblaze]
aws_access_key_id = your-key-id
aws_secret_access_key = your-application-key
aws --profile backblaze --endpoint-url https://s3.us-west-004.backblazeb2.com s3 ls
aws --profile backblaze --endpoint-url https://s3.us-west-004.backblazeb2.com \
s3 sync /local/path s3://my-bucket/prefix/
# ~/.config/rclone/rclone.conf
[backblaze]
type = b2
account = your-key-id
key = your-application-key
endpoint = s3.us-west-004.backblazeb2.com
rclone sync /local/path backblaze:my-bucket/prefix/
rclone copy -P /local/path backblaze:my-bucket/
.PHONY: b2-sync-up b2-sync-down b2-backup
B2_BUCKET ?= my-bucket
B2_PREFIX ?= data/
LOCAL_PATH ?= ./data
b2-sync-up:
b2 sync --threads 20 $(LOCAL_PATH) b2://$(B2_BUCKET)/$(B2_PREFIX)
b2-sync-down:
b2 sync --threads 20 b2://$(B2_BUCKET)/$(B2_PREFIX) $(LOCAL_PATH)
b2-backup:
b2 sync --threads 20 --keep-days 30 \
$(LOCAL_PATH) b2://$(B2_BUCKET)/backups/$(shell date +%Y-%m-%d)/