Manages file storage with AWS S3 using the JavaScript SDK v3. Use when uploading files, generating presigned URLs, managing buckets, or implementing cloud storage in Node.js applications.
Manages AWS S3 file operations using JavaScript SDK v3. Claude uses this when you need to upload files, generate presigned URLs, list/delete objects, or implement cloud storage in Node.js applications.
/plugin marketplace add mgd34msu/goodvibes-plugin/plugin install goodvibes@goodvibes-marketThis skill inherits all available tools. When active, it can use any tool Claude has access to.
Object storage with the AWS SDK for JavaScript v3. Upload files, generate presigned URLs, and manage buckets.
npm install @aws-sdk/client-s3 @aws-sdk/s3-request-presigner
import { S3Client } from '@aws-sdk/client-s3';
const s3Client = new S3Client({
region: process.env.AWS_REGION || 'us-east-1',
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
});
import { PutObjectCommand } from '@aws-sdk/client-s3';
async function uploadFile(bucket, key, body, contentType) {
const command = new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: body,
ContentType: contentType,
});
await s3Client.send(command);
return `https://${bucket}.s3.amazonaws.com/${key}`;
}
// Usage
await uploadFile(
'my-bucket',
'uploads/image.jpg',
fileBuffer,
'image/jpeg'
);
import { PutObjectCommand } from '@aws-sdk/client-s3';
const command = new PutObjectCommand({
Bucket: 'my-bucket',
Key: 'documents/report.pdf',
Body: fileBuffer,
ContentType: 'application/pdf',
// Access control
ACL: 'private', // private, public-read, public-read-write
// Metadata
Metadata: {
'uploaded-by': 'user-123',
'original-name': 'quarterly-report.pdf',
},
// Caching
CacheControl: 'max-age=31536000',
// Server-side encryption
ServerSideEncryption: 'AES256',
// Content disposition
ContentDisposition: 'attachment; filename="report.pdf"',
// Tags
Tagging: 'environment=production&type=report',
});
await s3Client.send(command);
import { Upload } from '@aws-sdk/lib-storage';
async function uploadLargeFile(bucket, key, body) {
const upload = new Upload({
client: s3Client,
params: {
Bucket: bucket,
Key: key,
Body: body,
},
queueSize: 4, // Concurrent parts
partSize: 5 * 1024 * 1024, // 5MB parts
leavePartsOnError: false,
});
upload.on('httpUploadProgress', (progress) => {
console.log(`Progress: ${progress.loaded}/${progress.total}`);
});
await upload.done();
}
import { GetObjectCommand } from '@aws-sdk/client-s3';
async function downloadFile(bucket, key) {
const command = new GetObjectCommand({
Bucket: bucket,
Key: key,
});
const response = await s3Client.send(command);
// Convert stream to buffer
const chunks = [];
for await (const chunk of response.Body) {
chunks.push(chunk);
}
return Buffer.concat(chunks);
}
// Or get as string
async function getFileAsString(bucket, key) {
const command = new GetObjectCommand({
Bucket: bucket,
Key: key,
});
const response = await s3Client.send(command);
return response.Body.transformToString();
}
Generate temporary URLs for upload/download without sharing credentials.
import { GetObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
async function getDownloadUrl(bucket, key, expiresIn = 3600) {
const command = new GetObjectCommand({
Bucket: bucket,
Key: key,
});
const url = await getSignedUrl(s3Client, command, { expiresIn });
return url;
}
// Usage - valid for 1 hour
const downloadUrl = await getDownloadUrl('my-bucket', 'files/doc.pdf');
import { PutObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
async function getUploadUrl(bucket, key, contentType, expiresIn = 3600) {
const command = new PutObjectCommand({
Bucket: bucket,
Key: key,
ContentType: contentType,
});
const url = await getSignedUrl(s3Client, command, { expiresIn });
return url;
}
// Usage
const uploadUrl = await getUploadUrl(
'my-bucket',
'uploads/image.jpg',
'image/jpeg'
);
// Client can PUT to this URL
await fetch(uploadUrl, {
method: 'PUT',
body: file,
headers: {
'Content-Type': 'image/jpeg',
},
});
import { createPresignedPost } from '@aws-sdk/s3-presigned-post';
async function getUploadForm(bucket, key) {
const { url, fields } = await createPresignedPost(s3Client, {
Bucket: bucket,
Key: key,
Conditions: [
['content-length-range', 0, 10 * 1024 * 1024], // Max 10MB
['starts-with', '$Content-Type', 'image/'],
],
Expires: 3600,
});
return { url, fields };
}
// Client-side usage
const { url, fields } = await getUploadForm('my-bucket', 'uploads/${filename}');
const formData = new FormData();
Object.entries(fields).forEach(([key, value]) => {
formData.append(key, value);
});
formData.append('file', file);
await fetch(url, {
method: 'POST',
body: formData,
});
import { ListObjectsV2Command } from '@aws-sdk/client-s3';
async function listFiles(bucket, prefix = '') {
const command = new ListObjectsV2Command({
Bucket: bucket,
Prefix: prefix,
MaxKeys: 100,
});
const response = await s3Client.send(command);
return response.Contents?.map((item) => ({
key: item.Key,
size: item.Size,
lastModified: item.LastModified,
})) || [];
}
// Paginate through all files
async function* listAllFiles(bucket, prefix = '') {
let continuationToken;
do {
const command = new ListObjectsV2Command({
Bucket: bucket,
Prefix: prefix,
ContinuationToken: continuationToken,
});
const response = await s3Client.send(command);
for (const item of response.Contents || []) {
yield item;
}
continuationToken = response.NextContinuationToken;
} while (continuationToken);
}
// Usage
for await (const file of listAllFiles('my-bucket', 'uploads/')) {
console.log(file.Key);
}
import { DeleteObjectCommand, DeleteObjectsCommand } from '@aws-sdk/client-s3';
// Delete single file
async function deleteFile(bucket, key) {
const command = new DeleteObjectCommand({
Bucket: bucket,
Key: key,
});
await s3Client.send(command);
}
// Delete multiple files
async function deleteFiles(bucket, keys) {
const command = new DeleteObjectsCommand({
Bucket: bucket,
Delete: {
Objects: keys.map((Key) => ({ Key })),
},
});
const response = await s3Client.send(command);
return response.Deleted;
}
// Usage
await deleteFiles('my-bucket', [
'uploads/old1.jpg',
'uploads/old2.jpg',
]);
import { CopyObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3';
async function copyFile(bucket, sourceKey, destinationKey) {
const command = new CopyObjectCommand({
Bucket: bucket,
CopySource: `${bucket}/${sourceKey}`,
Key: destinationKey,
});
await s3Client.send(command);
}
async function moveFile(bucket, sourceKey, destinationKey) {
await copyFile(bucket, sourceKey, destinationKey);
await deleteFile(bucket, sourceKey);
}
import { HeadObjectCommand } from '@aws-sdk/client-s3';
async function fileExists(bucket, key) {
try {
const command = new HeadObjectCommand({
Bucket: bucket,
Key: key,
});
await s3Client.send(command);
return true;
} catch (error) {
if (error.name === 'NotFound') {
return false;
}
throw error;
}
}
// app/api/upload/route.ts
import { NextRequest, NextResponse } from 'next/server';
import { PutObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import { s3Client } from '@/lib/s3';
export async function POST(request: NextRequest) {
const { filename, contentType } = await request.json();
const key = `uploads/${Date.now()}-${filename}`;
const command = new PutObjectCommand({
Bucket: process.env.S3_BUCKET!,
Key: key,
ContentType: contentType,
});
const uploadUrl = await getSignedUrl(s3Client, command, { expiresIn: 3600 });
return NextResponse.json({
uploadUrl,
key,
publicUrl: `https://${process.env.S3_BUCKET}.s3.amazonaws.com/${key}`,
});
}
// Client component
async function handleUpload(file: File) {
// Get presigned URL
const response = await fetch('/api/upload', {
method: 'POST',
body: JSON.stringify({
filename: file.name,
contentType: file.type,
}),
});
const { uploadUrl, publicUrl } = await response.json();
// Upload directly to S3
await fetch(uploadUrl, {
method: 'PUT',
body: file,
headers: {
'Content-Type': file.type,
},
});
return publicUrl;
}
Set in AWS Console or via SDK:
[
{
"AllowedHeaders": ["*"],
"AllowedMethods": ["GET", "PUT", "POST", "DELETE"],
"AllowedOrigins": ["https://yourdomain.com"],
"ExposeHeaders": ["ETag"]
}
]
Use CloudFront for CDN delivery:
const cloudFrontUrl = `https://d1234.cloudfront.net/${key}`;
S3 SDK works with Cloudflare R2:
const s3Client = new S3Client({
region: 'auto',
endpoint: `https://${accountId}.r2.cloudflarestorage.com`,
credentials: {
accessKeyId: R2_ACCESS_KEY_ID,
secretAccessKey: R2_SECRET_ACCESS_KEY,
},
});
This skill should be used when the user asks to "create a slash command", "add a command", "write a custom command", "define command arguments", "use command frontmatter", "organize commands", "create command with file references", "interactive command", "use AskUserQuestion in command", or needs guidance on slash command structure, YAML frontmatter fields, dynamic arguments, bash execution in commands, user interaction patterns, or command development best practices for Claude Code.
This skill should be used when the user asks to "create an agent", "add an agent", "write a subagent", "agent frontmatter", "when to use description", "agent examples", "agent tools", "agent colors", "autonomous agent", or needs guidance on agent structure, system prompts, triggering conditions, or agent development best practices for Claude Code plugins.
This skill should be used when the user asks to "create a hook", "add a PreToolUse/PostToolUse/Stop hook", "validate tool use", "implement prompt-based hooks", "use ${CLAUDE_PLUGIN_ROOT}", "set up event-driven automation", "block dangerous commands", or mentions hook events (PreToolUse, PostToolUse, Stop, SubagentStop, SessionStart, SessionEnd, UserPromptSubmit, PreCompact, Notification). Provides comprehensive guidance for creating and implementing Claude Code plugin hooks with focus on advanced prompt-based hooks API.