Optimize Linear API queries and caching for better performance. Use when improving response times, reducing API calls, or implementing caching strategies. Trigger with phrases like "linear performance", "optimize linear", "linear caching", "linear slow queries", "speed up linear".
/plugin marketplace add jeremylongshore/claude-code-plugins-plus-skills/plugin install linear-pack@claude-code-plugins-plusThis skill is limited to using the following tools:
Optimize Linear API usage for maximum performance and minimal latency.
Minimize Field Selection:
// BAD: Fetching unnecessary fields
const issues = await client.issues();
for (const issue of issues.nodes) {
// Only using id and title, but fetching everything
console.log(issue.id, issue.title);
}
// GOOD: Request only needed fields
const query = `
query MinimalIssues($first: Int!) {
issues(first: $first) {
nodes {
id
title
}
}
}
`;
Avoid N+1 Queries:
// BAD: N+1 queries
const issues = await client.issues();
for (const issue of issues.nodes) {
const state = await issue.state; // Separate query per issue!
console.log(issue.title, state?.name);
}
// GOOD: Use connections and batch loading
const query = `
query IssuesWithState($first: Int!) {
issues(first: $first) {
nodes {
id
title
state {
name
}
}
}
}
`;
// lib/cache.ts
import Redis from "ioredis";
const redis = new Redis(process.env.REDIS_URL);
interface CacheOptions {
ttlSeconds: number;
keyPrefix?: string;
}
export class LinearCache {
private keyPrefix: string;
private defaultTtl: number;
constructor(options: CacheOptions = { ttlSeconds: 300 }) {
this.keyPrefix = options.keyPrefix || "linear";
this.defaultTtl = options.ttlSeconds;
}
private key(key: string): string {
return `${this.keyPrefix}:${key}`;
}
async get<T>(key: string): Promise<T | null> {
const data = await redis.get(this.key(key));
return data ? JSON.parse(data) : null;
}
async set<T>(key: string, value: T, ttl = this.defaultTtl): Promise<void> {
await redis.setex(this.key(key), ttl, JSON.stringify(value));
}
async getOrFetch<T>(
key: string,
fetcher: () => Promise<T>,
ttl = this.defaultTtl
): Promise<T> {
const cached = await this.get<T>(key);
if (cached) return cached;
const data = await fetcher();
await this.set(key, data, ttl);
return data;
}
async invalidate(pattern: string): Promise<void> {
const keys = await redis.keys(this.key(pattern));
if (keys.length) {
await redis.del(...keys);
}
}
}
export const cache = new LinearCache({ ttlSeconds: 300 });
// lib/cached-client.ts
import { LinearClient } from "@linear/sdk";
import { cache } from "./cache";
export class CachedLinearClient {
private client: LinearClient;
constructor(apiKey: string) {
this.client = new LinearClient({ apiKey });
}
async getTeams() {
return cache.getOrFetch(
"teams",
async () => {
const teams = await this.client.teams();
return teams.nodes.map(t => ({ id: t.id, name: t.name, key: t.key }));
},
3600 // Teams rarely change, cache for 1 hour
);
}
async getWorkflowStates(teamKey: string) {
return cache.getOrFetch(
`states:${teamKey}`,
async () => {
const teams = await this.client.teams({
filter: { key: { eq: teamKey } },
});
const states = await teams.nodes[0].states();
return states.nodes.map(s => ({
id: s.id,
name: s.name,
type: s.type,
}));
},
3600 // States rarely change
);
}
async getIssue(identifier: string, maxAge = 60) {
return cache.getOrFetch(
`issue:${identifier}`,
async () => {
const issue = await this.client.issue(identifier);
const state = await issue.state;
return {
id: issue.id,
identifier: issue.identifier,
title: issue.title,
state: state?.name,
priority: issue.priority,
};
},
maxAge
);
}
// Invalidate cache when we know data changed
async createIssue(input: any) {
const result = await this.client.createIssue(input);
await cache.invalidate("issues:*");
return result;
}
}
// lib/batcher.ts
interface BatchRequest<T> {
key: string;
resolve: (value: T) => void;
reject: (error: Error) => void;
}
class RequestBatcher<T> {
private queue: BatchRequest<T>[] = [];
private timeout: NodeJS.Timeout | null = null;
private batchSize: number;
private delayMs: number;
private batchFetcher: (keys: string[]) => Promise<Map<string, T>>;
constructor(options: {
batchSize?: number;
delayMs?: number;
batchFetcher: (keys: string[]) => Promise<Map<string, T>>;
}) {
this.batchSize = options.batchSize || 50;
this.delayMs = options.delayMs || 10;
this.batchFetcher = options.batchFetcher;
}
async load(key: string): Promise<T> {
return new Promise((resolve, reject) => {
this.queue.push({ key, resolve, reject });
this.scheduleFlush();
});
}
private scheduleFlush(): void {
if (this.queue.length >= this.batchSize) {
this.flush();
return;
}
if (!this.timeout) {
this.timeout = setTimeout(() => this.flush(), this.delayMs);
}
}
private async flush(): Promise<void> {
if (this.timeout) {
clearTimeout(this.timeout);
this.timeout = null;
}
const batch = this.queue.splice(0, this.batchSize);
if (batch.length === 0) return;
try {
const keys = batch.map(r => r.key);
const results = await this.batchFetcher(keys);
for (const request of batch) {
const result = results.get(request.key);
if (result !== undefined) {
request.resolve(result);
} else {
request.reject(new Error(`Not found: ${request.key}`));
}
}
} catch (error) {
for (const request of batch) {
request.reject(error as Error);
}
}
}
}
// Usage
const issueBatcher = new RequestBatcher<any>({
batchFetcher: async (identifiers) => {
const issues = await client.issues({
filter: { identifier: { in: identifiers } },
});
return new Map(issues.nodes.map(i => [i.identifier, i]));
},
});
// These will be batched into a single request
const [issue1, issue2, issue3] = await Promise.all([
issueBatcher.load("ENG-1"),
issueBatcher.load("ENG-2"),
issueBatcher.load("ENG-3"),
]);
// lib/client-pool.ts
import { LinearClient } from "@linear/sdk";
class ClientPool {
private clients: LinearClient[] = [];
private maxClients: number;
private currentIndex = 0;
constructor(apiKey: string, maxClients = 5) {
this.maxClients = maxClients;
for (let i = 0; i < maxClients; i++) {
this.clients.push(new LinearClient({ apiKey }));
}
}
getClient(): LinearClient {
const client = this.clients[this.currentIndex];
this.currentIndex = (this.currentIndex + 1) % this.maxClients;
return client;
}
}
export const clientPool = new ClientPool(process.env.LINEAR_API_KEY!);
// lib/complexity-monitor.ts
interface QueryStats {
complexity: number;
duration: number;
timestamp: Date;
}
class ComplexityMonitor {
private stats: QueryStats[] = [];
private maxStats = 1000;
record(complexity: number, duration: number): void {
this.stats.push({
complexity,
duration,
timestamp: new Date(),
});
if (this.stats.length > this.maxStats) {
this.stats = this.stats.slice(-this.maxStats);
}
}
getAverageComplexity(): number {
if (this.stats.length === 0) return 0;
return this.stats.reduce((a, b) => a + b.complexity, 0) / this.stats.length;
}
getSlowQueries(thresholdMs = 1000): QueryStats[] {
return this.stats.filter(s => s.duration > thresholdMs);
}
getComplexQueries(threshold = 1000): QueryStats[] {
return this.stats.filter(s => s.complexity > threshold);
}
}
export const monitor = new ComplexityMonitor();
Optimize costs with linear-cost-tuning.
This skill should be used when the user asks to "create a hookify rule", "write a hook rule", "configure hookify", "add a hookify rule", or needs guidance on hookify rule syntax and patterns.
Create distinctive, production-grade frontend interfaces with high design quality. Use this skill when the user asks to build web components, pages, or applications. Generates creative, polished code that avoids generic AI aesthetics.