From connectwise-automate
Manages ConnectWise Automate alerts: lists active alerts, acknowledges them, views history, creates tickets. Covers sources (monitors, scripts, events), severity levels, lifecycle, status, and ticket integration.
npx claudepluginhub wyre-technology/msp-claude-plugins --plugin connectwise-automateThis skill uses the workspace's default tool permissions.
Alerts in ConnectWise Automate are notifications generated by monitors, scripts, or system events that require attention. This skill covers alert listing, acknowledgment, history tracking, and ticket creation workflows.
Guides Next.js Cache Components and Partial Prerendering (PPR) with cacheComponents enabled. Implements 'use cache', cacheLife(), cacheTag(), revalidateTag(), static/dynamic optimization, and cache debugging.
Migrates code, prompts, and API calls from Claude Sonnet 4.0/4.5 or Opus 4.1 to Opus 4.5, updating model strings on Anthropic, AWS, GCP, Azure platforms.
Analyzes BMad project state from catalog CSV, configs, artifacts, and query to recommend next skills or answer questions. Useful for help requests, 'what next', or starting BMad.
Alerts in ConnectWise Automate are notifications generated by monitors, scripts, or system events that require attention. This skill covers alert listing, acknowledgment, history tracking, and ticket creation workflows.
| Source | Description | Example |
|---|---|---|
| Monitor | Generated by monitor threshold | CPU > 90% |
| Script | Generated by script execution | Backup failed |
| Event Log | Windows Event Log trigger | Security event |
| System | Automate system events | Agent offline |
| Manual | User-created alerts | Maintenance note |
| Level | Value | Description | Response Time |
|---|---|---|---|
Information | 1 | Informational only | Review at convenience |
Warning | 2 | Potential issue | Investigate within hours |
Error | 3 | Failure detected | Respond within SLA |
Critical | 4 | Severe/emergency | Immediate response |
Generated → Active → Acknowledged → Resolved
│ │
│ └── Ticket Created
│
└── Auto-Cleared (if condition clears)
| Status | Description |
|---|---|
New | Just generated, unread |
Active | Open, unacknowledged |
Acknowledged | Someone is working on it |
Resolved | Issue fixed, alert closed |
Cleared | Condition auto-cleared |
Suppressed | Temporarily hidden |
interface Alert {
// Identifiers
AlertID: number; // Primary key
AlertGUID: string; // Global unique ID
// Source
Source: AlertSource; // Monitor, Script, EventLog, System
SourceID: number; // ID of source (MonitorID, ScriptID, etc.)
SourceName: string; // Name of source
// Target
ComputerID: number; // Affected computer
ComputerName: string; // Computer hostname
ClientID: number; // Parent client
ClientName: string; // Client name
LocationID: number; // Location ID
// Alert Details
Subject: string; // Alert title
Message: string; // Detailed message
Severity: AlertSeverity; // 1-4 severity level
Status: AlertStatus; // Current status
// Timestamps
TimeGenerated: string; // When created
TimeAcknowledged: string; // When acknowledged
TimeResolved: string; // When resolved
LastUpdate: string; // Last status change
// Acknowledgment
AcknowledgedBy: string; // User who acknowledged
Notes: string; // Acknowledgment notes
// Ticket Integration
TicketID: number; // Linked ticket ID
TicketStatus: string; // Ticket status
// Context
Category: string; // Alert category
AdditionalData: object; // Extra context data
}
type AlertSource = 'Monitor' | 'Script' | 'EventLog' | 'System' | 'Manual';
type AlertSeverity = 1 | 2 | 3 | 4;
type AlertStatus = 'New' | 'Active' | 'Acknowledged' | 'Resolved' | 'Cleared' | 'Suppressed';
interface AlertHistory {
HistoryID: number;
AlertID: number;
Action: string; // Status change, note added, etc.
ActionBy: string; // User who made change
ActionTime: string; // When action occurred
PreviousStatus: string;
NewStatus: string;
Notes: string;
}
GET /cwa/api/v1/Alerts?condition=Status in ('New','Active')&pageSize=100
Authorization: Bearer {token}
Response:
[
{
"AlertID": 54321,
"Subject": "Disk C: Low Space",
"Message": "Disk C: is 8% free on ACME-DC01",
"Severity": 2,
"Status": "Active",
"ComputerID": 12345,
"ComputerName": "ACME-DC01",
"ClientName": "Acme Corporation",
"Source": "Monitor",
"SourceName": "Disk Space Monitor",
"TimeGenerated": "2024-02-15T08:30:00Z",
"Category": "Performance"
}
]
GET /cwa/api/v1/Alerts/{alertID}
Authorization: Bearer {token}
Response:
{
"AlertID": 54321,
"AlertGUID": "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
"Subject": "Disk C: Low Space",
"Message": "Disk C: is 8% free on ACME-DC01\n\nTotal: 500 GB\nFree: 40 GB\nThreshold: 10%",
"Severity": 2,
"Status": "Active",
"ComputerID": 12345,
"ComputerName": "ACME-DC01",
"ClientID": 100,
"ClientName": "Acme Corporation",
"LocationID": 1,
"Source": "Monitor",
"SourceID": 5001,
"SourceName": "Disk Space Monitor",
"TimeGenerated": "2024-02-15T08:30:00Z",
"Category": "Performance",
"AdditionalData": {
"DriveLetter": "C:",
"FreeSpaceGB": 40,
"TotalSpaceGB": 500,
"FreePercent": 8
}
}
GET /cwa/api/v1/Alerts?condition=ClientID = 100 and Status = 'Active'&pageSize=100
Authorization: Bearer {token}
GET /cwa/api/v1/Alerts?condition=Severity >= 3 and Status = 'Active'&pageSize=100
Authorization: Bearer {token}
POST /cwa/api/v1/Alerts/{alertID}/Acknowledge
Authorization: Bearer {token}
Content-Type: application/json
{
"Notes": "Investigating disk space issue"
}
Response:
{
"AlertID": 54321,
"Status": "Acknowledged",
"AcknowledgedBy": "admin@example.com",
"TimeAcknowledged": "2024-02-15T10:45:00Z"
}
POST /cwa/api/v1/Alerts/{alertID}/Resolve
Authorization: Bearer {token}
Content-Type: application/json
{
"Notes": "Cleared 50GB of temp files. Disk now at 18% free."
}
POST /cwa/api/v1/Alerts/{alertID}/Notes
Authorization: Bearer {token}
Content-Type: application/json
{
"Note": "Contacted user about large files in Downloads folder"
}
POST /cwa/api/v1/Alerts/{alertID}/CreateTicket
Authorization: Bearer {token}
Content-Type: application/json
{
"TicketSubject": "Disk Space Critical on ACME-DC01",
"Priority": 2,
"BoardID": 1,
"Notes": "Auto-created from Automate alert"
}
Response:
{
"AlertID": 54321,
"TicketID": 98765,
"TicketNumber": "TKT-2024-00123",
"TicketStatus": "New"
}
GET /cwa/api/v1/Alerts/{alertID}/History
Authorization: Bearer {token}
Response:
[
{
"HistoryID": 1,
"Action": "Created",
"ActionTime": "2024-02-15T08:30:00Z",
"NewStatus": "New"
},
{
"HistoryID": 2,
"Action": "Acknowledged",
"ActionBy": "admin@example.com",
"ActionTime": "2024-02-15T10:45:00Z",
"PreviousStatus": "Active",
"NewStatus": "Acknowledged",
"Notes": "Investigating disk space issue"
}
]
POST /cwa/api/v1/Alerts/{alertID}/Suppress
Authorization: Bearer {token}
Content-Type: application/json
{
"Duration": 3600,
"Reason": "Scheduled maintenance window"
}
POST /cwa/api/v1/Alerts/BulkAcknowledge
Authorization: Bearer {token}
Content-Type: application/json
{
"AlertIDs": [54321, 54322, 54323],
"Notes": "Bulk acknowledgment for server maintenance"
}
async function getCriticalAlertsDashboard(client) {
const criticalAlerts = await client.request(
`/Alerts?condition=Severity >= 3 and Status in ('New','Active')&pageSize=100`
);
const dashboard = {
totalCritical: criticalAlerts.length,
byClient: {},
byCategory: {},
oldest: null
};
for (const alert of criticalAlerts) {
// Group by client
const clientName = alert.ClientName || 'Unknown';
if (!dashboard.byClient[clientName]) {
dashboard.byClient[clientName] = [];
}
dashboard.byClient[clientName].push({
id: alert.AlertID,
subject: alert.Subject,
computer: alert.ComputerName,
severity: alert.Severity,
age: getAlertAge(alert.TimeGenerated)
});
// Group by category
const category = alert.Category || 'Uncategorized';
dashboard.byCategory[category] = (dashboard.byCategory[category] || 0) + 1;
// Track oldest
if (!dashboard.oldest || new Date(alert.TimeGenerated) < new Date(dashboard.oldest.TimeGenerated)) {
dashboard.oldest = alert;
}
}
return dashboard;
}
function getAlertAge(timeGenerated) {
const now = new Date();
const generated = new Date(timeGenerated);
const diffMs = now - generated;
const diffMins = Math.floor(diffMs / 60000);
if (diffMins < 60) return `${diffMins} minutes`;
if (diffMins < 1440) return `${Math.floor(diffMins / 60)} hours`;
return `${Math.floor(diffMins / 1440)} days`;
}
async function acknowledgeAndCreateTicket(client, alertId, options = {}) {
const {
notes = 'Acknowledged and ticket created',
priority = 2,
boardId = 1
} = options;
// Get alert details
const alert = await client.request(`/Alerts/${alertId}`);
// Acknowledge the alert
await client.request(`/Alerts/${alertId}/Acknowledge`, {
method: 'POST',
body: JSON.stringify({ Notes: notes })
});
// Create ticket
const ticketResponse = await client.request(`/Alerts/${alertId}/CreateTicket`, {
method: 'POST',
body: JSON.stringify({
TicketSubject: alert.Subject,
Priority: alert.Severity >= 3 ? 1 : priority,
BoardID: boardId,
Notes: `Auto-created from Automate alert\n\n${alert.Message}`
})
});
return {
alert: {
id: alertId,
subject: alert.Subject,
status: 'Acknowledged'
},
ticket: {
id: ticketResponse.TicketID,
number: ticketResponse.TicketNumber
}
};
}
async function triageAlertsByClient(client, clientId) {
const alerts = await client.request(
`/Alerts?condition=ClientID = ${clientId} and Status in ('New','Active')&pageSize=200`
);
const triage = {
client: clientId,
total: alerts.length,
critical: [],
error: [],
warning: [],
info: []
};
for (const alert of alerts) {
const summary = {
id: alert.AlertID,
subject: alert.Subject,
computer: alert.ComputerName,
source: alert.SourceName,
age: getAlertAge(alert.TimeGenerated)
};
switch (alert.Severity) {
case 4: triage.critical.push(summary); break;
case 3: triage.error.push(summary); break;
case 2: triage.warning.push(summary); break;
default: triage.info.push(summary);
}
}
return triage;
}
async function bulkResolveAlerts(client, alertIds, notes) {
const results = [];
for (const alertId of alertIds) {
try {
await client.request(`/Alerts/${alertId}/Resolve`, {
method: 'POST',
body: JSON.stringify({ Notes: notes })
});
results.push({ alertId, status: 'resolved' });
} catch (error) {
results.push({ alertId, status: 'failed', error: error.message });
}
// Respect rate limits
await sleep(100);
}
return {
resolved: results.filter(r => r.status === 'resolved').length,
failed: results.filter(r => r.status === 'failed').length,
details: results
};
}
async function checkAlertEscalation(client) {
const alerts = await client.request(
`/Alerts?condition=Status = 'Active' and Severity >= 2&pageSize=500`
);
const escalations = [];
const now = new Date();
for (const alert of alerts) {
const generated = new Date(alert.TimeGenerated);
const ageMinutes = (now - generated) / 60000;
// Escalation rules based on severity and age
let shouldEscalate = false;
let reason = '';
switch (alert.Severity) {
case 4: // Critical
if (ageMinutes > 15) {
shouldEscalate = true;
reason = 'Critical alert unacknowledged for 15+ minutes';
}
break;
case 3: // Error
if (ageMinutes > 60) {
shouldEscalate = true;
reason = 'Error alert unacknowledged for 1+ hour';
}
break;
case 2: // Warning
if (ageMinutes > 240) {
shouldEscalate = true;
reason = 'Warning alert unacknowledged for 4+ hours';
}
break;
}
if (shouldEscalate) {
escalations.push({
alertId: alert.AlertID,
subject: alert.Subject,
client: alert.ClientName,
computer: alert.ComputerName,
severity: alert.Severity,
ageMinutes: Math.round(ageMinutes),
reason
});
}
}
return escalations;
}
| Error | Status | Cause | Resolution |
|---|---|---|---|
| Alert not found | 404 | Invalid AlertID | Verify alert exists |
| Already resolved | 400 | Alert already closed | Check current status |
| Permission denied | 403 | No access to alert | Check user permissions |
| Invalid status | 400 | Invalid status transition | Follow lifecycle rules |
| Ticket creation failed | 400 | PSA integration error | Check ticket board config |
{
"error": {
"code": "BadRequest",
"message": "Cannot acknowledge already resolved alert"
}
}
async function safeResolveAlert(client, alertId, notes) {
// Get current alert status
const alert = await client.request(`/Alerts/${alertId}`);
if (alert.Status === 'Resolved' || alert.Status === 'Cleared') {
return {
success: false,
error: `Alert already ${alert.Status.toLowerCase()}`
};
}
try {
await client.request(`/Alerts/${alertId}/Resolve`, {
method: 'POST',
body: JSON.stringify({ Notes: notes })
});
return {
success: true,
alertId,
previousStatus: alert.Status,
newStatus: 'Resolved'
};
} catch (error) {
return {
success: false,
error: error.message
};
}
}
async function standardAlertResponse(client, alertId) {
const workflow = {
steps: [],
success: true
};
try {
// Step 1: Get alert details
const alert = await client.request(`/Alerts/${alertId}`);
workflow.steps.push({
step: 'Get Alert',
status: 'success',
data: {
subject: alert.Subject,
severity: alert.Severity,
computer: alert.ComputerName
}
});
// Step 2: Acknowledge if not already
if (alert.Status === 'New' || alert.Status === 'Active') {
await client.request(`/Alerts/${alertId}/Acknowledge`, {
method: 'POST',
body: JSON.stringify({ Notes: 'Investigating alert' })
});
workflow.steps.push({ step: 'Acknowledge', status: 'success' });
}
// Step 3: Create ticket if critical/error
if (alert.Severity >= 3) {
const ticket = await client.request(`/Alerts/${alertId}/CreateTicket`, {
method: 'POST',
body: JSON.stringify({
TicketSubject: alert.Subject,
Priority: alert.Severity === 4 ? 1 : 2,
BoardID: 1
})
});
workflow.steps.push({
step: 'Create Ticket',
status: 'success',
ticketId: ticket.TicketID
});
}
// Step 4: Check computer status
const computer = await client.request(`/Computers/${alert.ComputerID}`);
workflow.steps.push({
step: 'Check Computer',
status: 'success',
computerStatus: computer.Status
});
} catch (error) {
workflow.success = false;
workflow.steps.push({
step: 'Error',
status: 'failed',
error: error.message
});
}
return workflow;
}