From johnlindquist-claude
Database operations for SQLite, PostgreSQL, and MySQL. Use for queries, schema inspection, migrations, and AI-assisted query generation.
npx claudepluginhub joshuarweaver/cascade-ai-ml-engineering --plugin johnlindquist-claudeThis skill uses the workspace's default tool permissions.
Query and manage databases across SQLite, PostgreSQL, and MySQL.
Creates isolated Git worktrees for feature branches with prioritized directory selection, gitignore safety checks, auto project setup for Node/Python/Rust/Go, and baseline verification.
Executes implementation plans in current session by dispatching fresh subagents per independent task, with two-stage reviews: spec compliance then code quality.
Dispatches parallel agents to independently tackle 2+ tasks like separate test failures or subsystems without shared state or dependencies.
Query and manage databases across SQLite, PostgreSQL, and MySQL.
Install database CLIs as needed:
# SQLite (usually pre-installed on macOS/Linux)
sqlite3 --version
# PostgreSQL
brew install postgresql
# or
apt install postgresql-client
# MySQL
brew install mysql-client
# or
apt install mysql-client
# Connect to database
sqlite3 database.db
# Execute query
sqlite3 database.db "SELECT * FROM users LIMIT 10"
# Output as CSV
sqlite3 -csv database.db "SELECT * FROM users"
# Output as JSON (requires sqlite 3.33+)
sqlite3 -json database.db "SELECT * FROM users"
# Column headers
sqlite3 -header database.db "SELECT * FROM users"
# Execute SQL file
sqlite3 database.db < queries.sql
# Schema commands
sqlite3 database.db ".schema"
sqlite3 database.db ".tables"
sqlite3 database.db ".schema users"
# Connect
psql postgresql://user:pass@host:5432/dbname
# Execute query
psql -c "SELECT * FROM users LIMIT 10" postgresql://...
# Tuples only (no headers)
psql -t -c "SELECT count(*) FROM users" postgresql://...
# No alignment (machine-readable)
psql -t -A -c "SELECT id,name FROM users" postgresql://...
# Execute SQL file
psql -f queries.sql postgresql://...
# List tables
psql -c "\dt" postgresql://...
# Describe table
psql -c "\d users" postgresql://...
# Output format
psql -c "SELECT * FROM users" --csv postgresql://...
psql -c "SELECT * FROM users" --html postgresql://...
# Connect
mysql -h host -u user -p dbname
# Execute query
mysql -h host -u user -p -e "SELECT * FROM users LIMIT 10" dbname
# Batch mode (no headers)
mysql -h host -u user -p -B -e "SELECT * FROM users" dbname
# Execute SQL file
mysql -h host -u user -p dbname < queries.sql
# Show tables
mysql -h host -u user -p -e "SHOW TABLES" dbname
# Describe table
mysql -h host -u user -p -e "DESCRIBE users" dbname
# All tables
sqlite3 db.sqlite ".tables"
# Table schema
sqlite3 db.sqlite ".schema tablename"
# All schemas
sqlite3 db.sqlite ".schema"
# All tables
psql -c "\dt" $DATABASE_URL
# Table schema
psql -c "\d tablename" $DATABASE_URL
# Table with indexes
psql -c "\d+ tablename" $DATABASE_URL
# All tables
mysql -e "SHOW TABLES" -h host -u user -p dbname
# Table schema
mysql -e "DESCRIBE tablename" -h host -u user -p dbname
# Create statement
mysql -e "SHOW CREATE TABLE tablename" -h host -u user -p dbname
# SQLite
sqlite3 db.sqlite "EXPLAIN QUERY PLAN SELECT * FROM users WHERE email = 'x'"
# PostgreSQL
psql -c "EXPLAIN ANALYZE SELECT * FROM users WHERE email = 'x'" $DATABASE_URL
# MySQL
mysql -e "EXPLAIN SELECT * FROM users WHERE email = 'x'" dbname
# SQLite to CSV
sqlite3 -csv -header db.sqlite "SELECT * FROM users" > users.csv
# PostgreSQL to CSV
psql -c "\COPY users TO 'users.csv' CSV HEADER" $DATABASE_URL
# MySQL to CSV
mysql -e "SELECT * FROM users" -B dbname | tr '\t' ',' > users.csv
Use Gemini to help write queries:
# Describe what you want
gemini -m pro -o text -e "" "Write a SQL query to:
- Find all users who signed up in the last 30 days
- Who have made at least one purchase
- Order by purchase count descending
Table schemas:
- users (id, email, created_at)
- purchases (id, user_id, amount, created_at)
Output PostgreSQL-compatible SQL."
# Generate query
QUERY=$(gemini -m pro -o text -e "" "Write SQL for: [your request]")
# Review before executing
echo "Generated query:"
echo "$QUERY"
# Then execute if safe
# psql -c "$QUERY" $DATABASE_URL
# Create migration file
cat > migrations/001_add_column.sql << 'EOF'
ALTER TABLE users ADD COLUMN status VARCHAR(50) DEFAULT 'active';
EOF
# Apply migration
psql -f migrations/001_add_column.sql $DATABASE_URL
# 1. Test on copy first
createdb test_migration
pg_dump $DATABASE_URL | psql test_migration
# 2. Run migration on test
psql -f migration.sql test_migration
# 3. Verify
psql -c "\d tablename" test_migration
# 4. Apply to production
psql -f migration.sql $DATABASE_URL
# 5. Cleanup
dropdb test_migration
Store connection strings securely:
# .env file (don't commit!)
DATABASE_URL=postgresql://user:pass@host:5432/dbname
SQLITE_DB=./data/app.db
# Usage
psql $DATABASE_URL
sqlite3 $SQLITE_DB