From cybersecurity-skills
Automates GDPR DSAR workflows: identity verification, PII discovery via regex/NER in DBs/files, Article 15 templating, deadline tracking, exemptions, batch processing, audit logging.
npx claudepluginhub mukul975/anthropic-cybersecurity-skills --plugin cybersecurity-skillsThis skill uses the workspace's default tool permissions.
- When building automated DSAR processing pipelines for GDPR/UK GDPR compliance
Applies Acme Corporation brand guidelines including colors, fonts, layouts, and messaging to generated PowerPoint, Excel, and PDF documents.
Builds DCF models with sensitivity analysis, Monte Carlo simulations, and scenario planning for investment valuation and risk assessment.
Calculates profitability (ROE, margins), liquidity (current ratio), leverage, efficiency, and valuation (P/E, EV/EBITDA) ratios from financial statements in CSV, JSON, text, or Excel for investment analysis.
Under GDPR Article 15, data subjects have the right to obtain from the controller:
Implement a request intake system that captures the request through any channel, verifies the requester's identity, and starts the compliance clock.
from agent import DSARWorkflowEngine
engine = DSARWorkflowEngine(config_path="dsar_config.json")
# Register a new DSAR
request = engine.register_dsar(
requester_name="Jane Smith",
requester_email="jane.smith@example.com",
request_channel="email",
request_text="I would like a copy of all personal data you hold about me.",
identity_docs=["passport_verified"],
)
print(f"DSAR ID: {request['dsar_id']}, Deadline: {request['deadline']}")
Scan databases, files, and logs using regex patterns and NER to find all personal data associated with the data subject.
from agent import PIIDiscoveryEngine
pii_engine = PIIDiscoveryEngine()
# Scan structured data (database)
db_results = pii_engine.scan_database(
connection_string="postgresql://user:pass@localhost/appdb",
search_identifiers={"email": "jane.smith@example.com", "name": "Jane Smith"},
)
# Scan unstructured data (files, logs)
file_results = pii_engine.scan_files(
directories=["/var/log/app", "/data/exports", "/data/documents"],
search_identifiers={"email": "jane.smith@example.com", "name": "Jane Smith"},
)
# Scan with NER for contextual PII detection
ner_results = pii_engine.scan_with_ner(
text_corpus=file_results["raw_text_matches"],
entity_types=["PERSON", "EMAIL", "PHONE_NUMBER", "LOCATION", "DATE_OF_BIRTH"],
)
all_pii = pii_engine.consolidate_results(db_results, file_results, ner_results)
print(f"Found {all_pii['total_records']} PII records across {all_pii['source_count']} sources")
Map discovered PII to processing purposes, legal bases, and retention periods as required by Article 15.
from agent import DataMapper
mapper = DataMapper(data_inventory_path="data_inventory.json")
# Map PII to Article 15 categories
mapped_data = mapper.map_to_article15(
pii_records=all_pii,
data_subject_id="jane.smith@example.com",
)
# Output includes processing purposes, recipients, retention for each data category
for category in mapped_data["categories"]:
print(f"Category: {category['name']}")
print(f" Purpose: {category['processing_purpose']}")
print(f" Legal basis: {category['legal_basis']}")
print(f" Retention: {category['retention_period']}")
print(f" Recipients: {', '.join(category['recipients'])}")
Apply exemptions where lawful (third-party data, legal privilege, trade secrets) before compiling the response.
from agent import ExemptionReviewer
reviewer = ExemptionReviewer()
# Check for applicable exemptions
review_result = reviewer.review_exemptions(
mapped_data=mapped_data,
exemption_checks=[
"third_party_data",
"legal_professional_privilege",
"trade_secrets",
"crime_prevention",
"management_forecasting",
],
)
# Apply redactions where exemptions apply
redacted_data = reviewer.apply_redactions(mapped_data, review_result["exemptions"])
print(f"Applied {review_result['exemption_count']} exemptions")
Generate a compliant DSAR response package with cover letter, data export, and supplementary information document.
from agent import DSARResponseGenerator
generator = DSARResponseGenerator(template_dir="templates/")
# Generate complete response package
response = generator.generate_response(
dsar_id=request["dsar_id"],
data_subject="Jane Smith",
mapped_data=redacted_data,
format="pdf", # or "json", "csv"
)
# Package includes: cover letter, data export, supplementary info, audit log
for doc in response["documents"]:
print(f"Generated: {doc['filename']} ({doc['type']})")
Maintain complete audit trail of the DSAR lifecycle for accountability.
from agent import DSARAuditLogger
logger = DSARAuditLogger(log_path="dsar_audit_logs/")
# Log complete DSAR lifecycle
logger.log_event(request["dsar_id"], "request_received", {
"channel": "email",
"identity_verified": True,
})
logger.log_event(request["dsar_id"], "pii_discovery_complete", {
"records_found": all_pii["total_records"],
"sources_scanned": all_pii["source_count"],
})
logger.log_event(request["dsar_id"], "response_sent", {
"format": "pdf",
"documents_count": len(response["documents"]),
"exemptions_applied": review_result["exemption_count"],
})
# Generate compliance report
compliance_report = logger.generate_compliance_report(request["dsar_id"])
from agent import DSARWorkflowEngine, PIIDiscoveryEngine, DSARResponseGenerator
# Full automated pipeline
engine = DSARWorkflowEngine(config_path="dsar_config.json")
pii = PIIDiscoveryEngine()
gen = DSARResponseGenerator(template_dir="templates/")
# 1. Intake
req = engine.register_dsar(
requester_name="John Doe",
requester_email="john.doe@example.com",
request_channel="web_form",
request_text="Please provide all my data under GDPR Article 15.",
identity_docs=["email_verified", "account_match"],
)
# 2. Discover
results = pii.full_scan(
search_identifiers={"email": "john.doe@example.com"},
sources=["database", "files", "logs"],
)
# 3. Generate response
response = gen.generate_response(
dsar_id=req["dsar_id"],
data_subject="John Doe",
mapped_data=results,
)
# 4. Track deadline
engine.update_status(req["dsar_id"], "response_sent")
print(f"DSAR {req['dsar_id']} completed, {engine.days_remaining(req['dsar_id'])} days remaining")
from agent import PIIPatternMatcher
matcher = PIIPatternMatcher()
# Test individual patterns
test_text = "Contact jane.smith@example.com or call +44 20 7946 0958. SSN: 123-45-6789"
matches = matcher.scan_text(test_text)
for m in matches:
print(f" [{m['type']}] '{m['value']}' (confidence: {m['confidence']})")