Use Azure Blob Storage Python SDK for uploading/downloading/listing blobs, managing containers, and authentication with DefaultAzureCredential.
From antigravity-awesome-skillsnpx claudepluginhub sickn33/antigravity-awesome-skills --plugin antigravity-awesome-skillsThis skill uses the workspace's default tool permissions.
Designs and optimizes AI agent action spaces, tool definitions, observation formats, error recovery, and context for higher task completion rates.
Enables AI agents to execute x402 payments with per-task budgets, spending controls, and non-custodial wallets via MCP tools. Use when agents pay for APIs, services, or other agents.
Compares coding agents like Claude Code and Aider on custom YAML-defined codebase tasks using git worktrees, measuring pass rate, cost, time, and consistency.
Client library for Azure Blob Storage — object storage for unstructured data.
pip install azure-storage-blob azure-identity
AZURE_STORAGE_ACCOUNT_NAME=<your-storage-account>
# Or use full URL
AZURE_STORAGE_ACCOUNT_URL=https://<account>.blob.core.windows.net
from azure.identity import DefaultAzureCredential
from azure.storage.blob import BlobServiceClient
credential = DefaultAzureCredential()
account_url = "https://<account>.blob.core.windows.net"
blob_service_client = BlobServiceClient(account_url, credential=credential)
| Client | Purpose | Get From |
|---|---|---|
BlobServiceClient | Account-level operations | Direct instantiation |
ContainerClient | Container operations | blob_service_client.get_container_client() |
BlobClient | Single blob operations | container_client.get_blob_client() |
container_client = blob_service_client.get_container_client("mycontainer")
container_client.create_container()
# From file path
blob_client = blob_service_client.get_blob_client(
container="mycontainer",
blob="sample.txt"
)
with open("./local-file.txt", "rb") as data:
blob_client.upload_blob(data, overwrite=True)
# From bytes/string
blob_client.upload_blob(b"Hello, World!", overwrite=True)
# From stream
import io
stream = io.BytesIO(b"Stream content")
blob_client.upload_blob(stream, overwrite=True)
blob_client = blob_service_client.get_blob_client(
container="mycontainer",
blob="sample.txt"
)
# To file
with open("./downloaded.txt", "wb") as file:
download_stream = blob_client.download_blob()
file.write(download_stream.readall())
# To memory
download_stream = blob_client.download_blob()
content = download_stream.readall() # bytes
# Read into existing buffer
stream = io.BytesIO()
num_bytes = blob_client.download_blob().readinto(stream)
container_client = blob_service_client.get_container_client("mycontainer")
# List all blobs
for blob in container_client.list_blobs():
print(f"{blob.name} - {blob.size} bytes")
# List with prefix (folder-like)
for blob in container_client.list_blobs(name_starts_with="logs/"):
print(blob.name)
# Walk blob hierarchy (virtual directories)
for item in container_client.walk_blobs(delimiter="/"):
if item.get("prefix"):
print(f"Directory: {item['prefix']}")
else:
print(f"Blob: {item.name}")
blob_client.delete_blob()
# Delete with snapshots
blob_client.delete_blob(delete_snapshots="include")
# Configure chunk sizes for large uploads/downloads
blob_client = BlobClient(
account_url=account_url,
container_name="mycontainer",
blob_name="large-file.zip",
credential=credential,
max_block_size=4 * 1024 * 1024, # 4 MiB blocks
max_single_put_size=64 * 1024 * 1024 # 64 MiB single upload limit
)
# Parallel upload
blob_client.upload_blob(data, max_concurrency=4)
# Parallel download
download_stream = blob_client.download_blob(max_concurrency=4)
from datetime import datetime, timedelta, timezone
from azure.storage.blob import generate_blob_sas, BlobSasPermissions
sas_token = generate_blob_sas(
account_name="<account>",
container_name="mycontainer",
blob_name="sample.txt",
account_key="<account-key>", # Or use user delegation key
permission=BlobSasPermissions(read=True),
expiry=datetime.now(timezone.utc) + timedelta(hours=1)
)
# Use SAS token
blob_url = f"https://<account>.blob.core.windows.net/mycontainer/sample.txt?{sas_token}"
# Get properties
properties = blob_client.get_blob_properties()
print(f"Size: {properties.size}")
print(f"Content-Type: {properties.content_settings.content_type}")
print(f"Last modified: {properties.last_modified}")
# Set metadata
blob_client.set_blob_metadata(metadata={"category": "logs", "year": "2024"})
# Set content type
from azure.storage.blob import ContentSettings
blob_client.set_http_headers(
content_settings=ContentSettings(content_type="application/json")
)
from azure.identity.aio import DefaultAzureCredential
from azure.storage.blob.aio import BlobServiceClient
async def upload_async():
credential = DefaultAzureCredential()
async with BlobServiceClient(account_url, credential=credential) as client:
blob_client = client.get_blob_client("mycontainer", "sample.txt")
with open("./file.txt", "rb") as data:
await blob_client.upload_blob(data, overwrite=True)
# Download async
async def download_async():
async with BlobServiceClient(account_url, credential=credential) as client:
blob_client = client.get_blob_client("mycontainer", "sample.txt")
stream = await blob_client.download_blob()
data = await stream.readall()
overwrite=True explicitly when re-uploadingmax_concurrency for large file transfersreadinto() over readall() for memory efficiencywalk_blobs() for hierarchical listingThis skill is applicable to execute the workflow or actions described in the overview.