Implement Apollo.io rate limiting and backoff. Use when handling rate limits, implementing retry logic, or optimizing API request throughput. Trigger with phrases like "apollo rate limit", "apollo 429", "apollo throttling", "apollo backoff", "apollo request limits".
Implements Apollo.io rate limiting and exponential backoff to maximize API throughput.
/plugin marketplace add jeremylongshore/claude-code-plugins-plus-skills/plugin install apollo-pack@claude-code-plugins-plusThis skill is limited to using the following tools:
Implement robust rate limiting and backoff strategies for Apollo.io API to maximize throughput while avoiding 429 errors.
| Endpoint Category | Rate Limit | Window | Burst Limit |
|---|---|---|---|
| People Search | 100/min | 1 minute | 10/sec |
| Person Enrichment | 100/min | 1 minute | 10/sec |
| Organization Enrichment | 100/min | 1 minute | 10/sec |
| Sequences/Campaigns | 50/min | 1 minute | 5/sec |
| Bulk Operations | 10/min | 1 minute | 2/sec |
| General API | 100/min | 1 minute | 10/sec |
# Check current rate limit status
curl -I -X POST "https://api.apollo.io/v1/people/search" \
-H "Content-Type: application/json" \
-d '{"api_key": "'$APOLLO_API_KEY'", "per_page": 1}'
# Response headers:
# X-RateLimit-Limit: 100
# X-RateLimit-Remaining: 95
# X-RateLimit-Reset: 1640000000
# Retry-After: 60 (only when rate limited)
// src/lib/apollo/rate-limiter.ts
interface RateLimiterConfig {
maxRequests: number;
windowMs: number;
minSpacingMs: number;
}
class RateLimiter {
private queue: Array<{
resolve: (value: void) => void;
reject: (error: Error) => void;
}> = [];
private requestTimestamps: number[] = [];
private lastRequestTime = 0;
private processing = false;
constructor(private config: RateLimiterConfig) {}
async acquire(): Promise<void> {
return new Promise((resolve, reject) => {
this.queue.push({ resolve, reject });
this.processQueue();
});
}
private async processQueue() {
if (this.processing || this.queue.length === 0) return;
this.processing = true;
while (this.queue.length > 0) {
// Clean old timestamps outside window
const now = Date.now();
this.requestTimestamps = this.requestTimestamps.filter(
(ts) => now - ts < this.config.windowMs
);
// Check if we're at capacity
if (this.requestTimestamps.length >= this.config.maxRequests) {
const oldestTs = this.requestTimestamps[0];
const waitTime = this.config.windowMs - (now - oldestTs) + 100;
await this.wait(waitTime);
continue;
}
// Enforce minimum spacing
const timeSinceLastRequest = now - this.lastRequestTime;
if (timeSinceLastRequest < this.config.minSpacingMs) {
await this.wait(this.config.minSpacingMs - timeSinceLastRequest);
}
// Process next request
const item = this.queue.shift()!;
this.requestTimestamps.push(Date.now());
this.lastRequestTime = Date.now();
item.resolve();
}
this.processing = false;
}
private wait(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
}
// Create rate limiter for Apollo
export const apolloRateLimiter = new RateLimiter({
maxRequests: 90, // Leave buffer below 100
windowMs: 60000,
minSpacingMs: 100, // 100ms between requests
});
// src/lib/apollo/backoff.ts
interface BackoffConfig {
initialDelayMs: number;
maxDelayMs: number;
maxRetries: number;
multiplier: number;
jitter: boolean;
}
const defaultConfig: BackoffConfig = {
initialDelayMs: 1000,
maxDelayMs: 60000,
maxRetries: 5,
multiplier: 2,
jitter: true,
};
export async function withBackoff<T>(
fn: () => Promise<T>,
config: Partial<BackoffConfig> = {}
): Promise<T> {
const cfg = { ...defaultConfig, ...config };
let lastError: Error;
let delay = cfg.initialDelayMs;
for (let attempt = 0; attempt <= cfg.maxRetries; attempt++) {
try {
await apolloRateLimiter.acquire();
return await fn();
} catch (error: any) {
lastError = error;
// Check if retryable
const status = error.response?.status;
if (status === 401 || status === 403 || status === 422) {
throw error; // Don't retry auth/validation errors
}
if (attempt === cfg.maxRetries) {
break;
}
// Get delay from Retry-After header or calculate
const retryAfter = error.response?.headers?.['retry-after'];
if (retryAfter) {
delay = parseInt(retryAfter) * 1000;
}
// Add jitter to prevent thundering herd
const jitter = cfg.jitter ? Math.random() * 1000 : 0;
const actualDelay = Math.min(delay + jitter, cfg.maxDelayMs);
console.log(`Retry ${attempt + 1}/${cfg.maxRetries} after ${actualDelay}ms`);
await new Promise((r) => setTimeout(r, actualDelay));
delay *= cfg.multiplier;
}
}
throw lastError!;
}
// src/lib/apollo/request-queue.ts
import PQueue from 'p-queue';
// Concurrency-limited queue
export const apolloQueue = new PQueue({
concurrency: 5, // Max 5 concurrent requests
interval: 1000, // Per second
intervalCap: 10, // Max 10 per interval
});
// Usage
async function batchSearchPeople(domains: string[]): Promise<Person[]> {
const results = await Promise.all(
domains.map((domain) =>
apolloQueue.add(() =>
withBackoff(() => apollo.searchPeople({ q_organization_domains: [domain] }))
)
)
);
return results.flat().map((r) => r?.people || []).flat();
}
import { withBackoff } from './backoff';
const people = await withBackoff(() =>
apollo.searchPeople({
q_organization_domains: ['stripe.com'],
per_page: 100,
})
);
import { apolloQueue } from './request-queue';
async function enrichCompanies(domains: string[]) {
const results = [];
for (const domain of domains) {
const result = await apolloQueue.add(
() => withBackoff(() => apollo.enrichOrganization(domain)),
{ priority: 1 } // Lower priority
);
results.push(result);
}
return results;
}
// High priority for user-facing requests
async function interactiveSearch(query: string) {
return apolloQueue.add(
() => withBackoff(() => apollo.searchPeople({ q_keywords: query })),
{ priority: 0 } // Highest priority
);
}
// Low priority for background sync
async function backgroundSync(contacts: string[]) {
return Promise.all(
contacts.map((id) =>
apolloQueue.add(
() => withBackoff(() => apollo.getContact(id)),
{ priority: 10 } // Low priority
)
)
);
}
// src/lib/apollo/rate-monitor.ts
class RateLimitMonitor {
private requests: Array<{ timestamp: number; remaining: number }> = [];
recordRequest(remaining: number) {
this.requests.push({
timestamp: Date.now(),
remaining,
});
// Keep only last 5 minutes
const cutoff = Date.now() - 5 * 60 * 1000;
this.requests = this.requests.filter((r) => r.timestamp > cutoff);
}
getStats() {
const lastMinute = this.requests.filter(
(r) => r.timestamp > Date.now() - 60000
);
return {
requestsLastMinute: lastMinute.length,
currentRemaining: lastMinute[lastMinute.length - 1]?.remaining ?? 100,
utilizationPercent: (lastMinute.length / 100) * 100,
isNearLimit: lastMinute.length > 80,
};
}
}
export const rateLimitMonitor = new RateLimitMonitor();
| Scenario | Strategy |
|---|---|
| 429 response | Use Retry-After header |
| Burst limit hit | Add minimum spacing |
| Sustained limit | Queue with concurrency |
| Network timeout | Exponential backoff |
Proceed to apollo-security-basics for API security best practices.
Expert guidance for Next.js Cache Components and Partial Prerendering (PPR). **PROACTIVE ACTIVATION**: Use this skill automatically when working in Next.js projects that have `cacheComponents: true` in their next.config.ts/next.config.js. When this config is detected, proactively apply Cache Components patterns and best practices to all React Server Component implementations. **DETECTION**: At the start of a session in a Next.js project, check for `cacheComponents: true` in next.config. If enabled, this skill's patterns should guide all component authoring, data fetching, and caching decisions. **USE CASES**: Implementing 'use cache' directive, configuring cache lifetimes with cacheLife(), tagging cached data with cacheTag(), invalidating caches with updateTag()/revalidateTag(), optimizing static vs dynamic content boundaries, debugging cache issues, and reviewing Cache Component implementations.