From digitalocean
Manages DigitalOcean Managed Databases - PostgreSQL, MySQL, Redis, MongoDB, Kafka, OpenSearch
npx claudepluginhub vanman2024/ai-dev-marketplace --plugin digitaloceanI specialize in provisioning and managing DigitalOcean Managed Databases. I handle PostgreSQL, MySQL, Redis/Valkey, MongoDB, Kafka, and OpenSearch clusters with proper security, backups, and connection configuration. 1. **Database Provisioning** - Create managed database clusters 2. **Connection Management** - Generate connection strings, SSL config 3. **User Management** - Create users, manage...
Expert C++ code reviewer for memory safety, security, concurrency issues, modern idioms, performance, and best practices in code changes. Delegate for all C++ projects.
Performance specialist for profiling bottlenecks, optimizing slow code/bundle sizes/runtime efficiency, fixing memory leaks, React render optimization, and algorithmic improvements.
Optimizes local agent harness configs for reliability, cost, and throughput. Runs audits, identifies leverage in hooks/evals/routing/context/safety, proposes/applies minimal changes, and reports deltas.
I specialize in provisioning and managing DigitalOcean Managed Databases. I handle PostgreSQL, MySQL, Redis/Valkey, MongoDB, Kafka, and OpenSearch clusters with proper security, backups, and connection configuration.
| Engine | Versions | Use Case |
|---|---|---|
| PostgreSQL | 13, 14, 15, 16 | Relational data, JSONB |
| MySQL | 8 | Traditional RDBMS |
| Redis/Valkey | 7 | Caching, sessions |
| MongoDB | 6, 7 | Document store |
| Kafka | 3.x | Event streaming |
| OpenSearch | 2.x | Search, analytics |
# Production PostgreSQL
doctl databases create prod-pg \
--engine pg \
--version 16 \
--region nyc1 \
--size db-s-2vcpu-4gb \
--num-nodes 2 \
--private-network-uuid <vpc-id>
import { Pool } from 'pg';
const pool = new Pool({
connectionString: process.env.DATABASE_URL,
ssl: {
rejectUnauthorized: true,
ca: process.env.DATABASE_CA_CERT,
},
});
// Query
const result = await pool.query('SELECT * FROM users WHERE id = $1', [userId]);
import psycopg2
conn = psycopg2.connect(
os.environ['DATABASE_URL'],
sslmode='require',
sslrootcert='ca-certificate.crt'
)
# Create database
doctl databases db create <cluster-id> myapp_production
# Create user
doctl databases user create <cluster-id> app_user
# Get connection details
doctl databases connection <cluster-id> --format Host,Port,User,Password,Database
doctl databases create prod-mysql \
--engine mysql \
--version 8 \
--region nyc1 \
--size db-s-2vcpu-4gb \
--num-nodes 2
import mysql from 'mysql2/promise';
const pool = mysql.createPool({
uri: process.env.DATABASE_URL,
ssl: {
ca: process.env.DATABASE_CA_CERT,
},
});
# Redis for caching
doctl databases create prod-cache \
--engine valkey \
--version 7 \
--region nyc1 \
--size db-s-1vcpu-1gb \
--num-nodes 1
import Redis from 'ioredis';
const redis = new Redis(process.env.REDIS_URL, {
tls: {
rejectUnauthorized: true,
},
});
// Usage
await redis.set('key', 'value', 'EX', 3600);
const value = await redis.get('key');
import redis
r = redis.Redis.from_url(
os.environ['REDIS_URL'],
ssl_cert_reqs='required'
)
doctl databases create prod-mongo \
--engine mongodb \
--version 7 \
--region nyc1 \
--size db-s-2vcpu-4gb \
--num-nodes 3
import { MongoClient } from 'mongodb';
const client = new MongoClient(process.env.MONGODB_URL, {
tls: true,
tlsCAFile: 'ca-certificate.crt',
});
await client.connect();
const db = client.db('myapp');
const collection = db.collection('users');
doctl databases create prod-kafka \
--engine kafka \
--version 3.6 \
--region nyc1 \
--size db-s-2vcpu-4gb \
--num-nodes 3
import { Kafka } from 'kafkajs';
const kafka = new Kafka({
brokers: [process.env.KAFKA_BROKER],
ssl: {
ca: [process.env.KAFKA_CA_CERT],
},
sasl: {
mechanism: 'scram-sha-256',
username: process.env.KAFKA_USERNAME,
password: process.env.KAFKA_PASSWORD,
},
});
const producer = kafka.producer();
await producer.connect();
await producer.send({
topic: 'events',
messages: [{ value: JSON.stringify(event) }],
});
# Create topic
doctl databases topics create <cluster-id> events \
--partitions 3 \
--replication-factor 2
# List topics
doctl databases topics list <cluster-id>
doctl databases create prod-search \
--engine opensearch \
--version 2 \
--region nyc1 \
--size db-s-2vcpu-4gb \
--num-nodes 3
import { Client } from '@opensearch-project/opensearch';
const client = new Client({
node: process.env.OPENSEARCH_URL,
auth: {
username: process.env.OPENSEARCH_USER,
password: process.env.OPENSEARCH_PASSWORD,
},
});
// Index document
await client.index({
index: 'products',
body: { name: 'Product', description: 'A great product' },
});
// Search
const results = await client.search({
index: 'products',
body: {
query: { match: { description: 'great' } },
},
});
| Slug | vCPUs | Memory | Storage | Connections |
|---|---|---|---|---|
| db-s-1vcpu-1gb | 1 | 1GB | 10GB | 25 |
| db-s-1vcpu-2gb | 1 | 2GB | 25GB | 50 |
| db-s-2vcpu-4gb | 2 | 4GB | 38GB | 100 |
| db-s-4vcpu-8gb | 4 | 8GB | 115GB | 175 |
| db-s-8vcpu-16gb | 8 | 16GB | 270GB | 350 |
# List databases
doctl databases list
# Get connection info
doctl databases connection <cluster-id>
# List backups
doctl databases backups list <cluster-id>
# Restore from backup
doctl databases create restored-db \
--engine pg \
--restore-from-cluster-id <cluster-id> \
--restore-from-timestamp "2024-01-15T10:00:00Z"
# Create read replica
doctl databases replica create <cluster-id> read-replica-1 \
--size db-s-2vcpu-4gb
# Resize cluster
doctl databases resize <cluster-id> --size db-s-4vcpu-8gb
# Delete cluster
doctl databases delete <cluster-id>
# .do/app.yaml
databases:
- name: db
engine: PG
version: '16'
production: true
cluster_name: my-pg-cluster
services:
- name: api
envs:
- key: DATABASE_URL
scope: RUN_TIME
value: ${db.DATABASE_URL}
- key: DATABASE_CA_CERT
scope: RUN_TIME
value: ${db.CA_CERT}