Analyzes supply chain attack artifacts including trojanized software updates, compromised build pipelines, and sideloaded dependencies to identify intrusion vectors and compromise scope.
npx claudepluginhub killvxk/cybersecurity-skills-zhThis skill uses the workspace's default tool permissions.
供应链攻击通过破坏合法软件分发渠道,借助受信任的更新机制投递恶意软件。典型案例包括 SolarWinds SUNBURST(2020 年,影响 18,000 多个客户)、3CX SmoothOperator(2023 年,一起源自 Trading Technologies 的级联供应链攻击)以及大量 npm/PyPI 包投毒活动。分析工作涉及:将木马化二进制文件与合法版本进行比对、识别构建工件中的注入代码、检查代码签名异常,以及追踪从初始攻陷到载荷投递的感染链。截至 2025 年,供应链攻击占所有违规事件的 30%,较前几年增加了 100%。
Analyzes supply chain malware artifacts like trojanized binaries, compromised pipelines, and sideloaded dependencies using Python PE comparison to identify intrusions. For incident response and threat hunting.
Analyzes supply chain malware artifacts like trojanized binaries, compromised pipelines, and sideloaded dependencies to identify intrusion vectors. For SOC incident response and threat hunting.
Hunts supply chain compromise indicators like trojanized updates, compromised dependencies, unauthorized code changes, and tampered builds. For EDR/SIEM threat hunting.
Share bugs, ideas, or general feedback.
供应链攻击通过破坏合法软件分发渠道,借助受信任的更新机制投递恶意软件。典型案例包括 SolarWinds SUNBURST(2020 年,影响 18,000 多个客户)、3CX SmoothOperator(2023 年,一起源自 Trading Technologies 的级联供应链攻击)以及大量 npm/PyPI 包投毒活动。分析工作涉及:将木马化二进制文件与合法版本进行比对、识别构建工件中的注入代码、检查代码签名异常,以及追踪从初始攻陷到载荷投递的感染链。截至 2025 年,供应链攻击占所有违规事件的 30%,较前几年增加了 100%。
pefile、ssdeep、hashlib#!/usr/bin/env python3
"""比对木马化二进制文件与合法版本。"""
import hashlib
import pefile
import sys
import json
def compare_pe_files(legitimate_path, suspect_path):
"""比对合法版本与可疑版本之间的 PE 文件结构。"""
legit_pe = pefile.PE(legitimate_path)
suspect_pe = pefile.PE(suspect_path)
report = {"differences": [], "suspicious_sections": [], "import_changes": []}
# 比对节
legit_sections = {s.Name.rstrip(b'\x00').decode(): {
"size": s.SizeOfRawData,
"entropy": s.get_entropy(),
"characteristics": s.Characteristics,
} for s in legit_pe.sections}
suspect_sections = {s.Name.rstrip(b'\x00').decode(): {
"size": s.SizeOfRawData,
"entropy": s.get_entropy(),
"characteristics": s.Characteristics,
} for s in suspect_pe.sections}
# 查找新增或已修改的节
for name, props in suspect_sections.items():
if name not in legit_sections:
report["suspicious_sections"].append({
"name": name, "reason": "合法版本中不存在的新节",
"size": props["size"], "entropy": round(props["entropy"], 2),
})
elif abs(props["size"] - legit_sections[name]["size"]) > 1024:
report["suspicious_sections"].append({
"name": name, "reason": "节大小发生显著变化",
"legit_size": legit_sections[name]["size"],
"suspect_size": props["size"],
})
# 比对导入
legit_imports = set()
if hasattr(legit_pe, 'DIRECTORY_ENTRY_IMPORT'):
for entry in legit_pe.DIRECTORY_ENTRY_IMPORT:
for imp in entry.imports:
if imp.name:
legit_imports.add(f"{entry.dll.decode()}!{imp.name.decode()}")
suspect_imports = set()
if hasattr(suspect_pe, 'DIRECTORY_ENTRY_IMPORT'):
for entry in suspect_pe.DIRECTORY_ENTRY_IMPORT:
for imp in entry.imports:
if imp.name:
suspect_imports.add(f"{entry.dll.decode()}!{imp.name.decode()}")
new_imports = suspect_imports - legit_imports
if new_imports:
report["import_changes"] = list(new_imports)
# 检查代码签名
report["legit_signed"] = bool(legit_pe.OPTIONAL_HEADER.DATA_DIRECTORY[4].Size)
report["suspect_signed"] = bool(suspect_pe.OPTIONAL_HEADER.DATA_DIRECTORY[4].Size)
return report
def hash_file(filepath):
"""计算文件的多种哈希值。"""
hashes = {}
with open(filepath, 'rb') as f:
data = f.read()
for algo in ['md5', 'sha1', 'sha256']:
h = hashlib.new(algo)
h.update(data)
hashes[algo] = h.hexdigest()
return hashes
if __name__ == "__main__":
if len(sys.argv) < 3:
print(f"用法:{sys.argv[0]} <legitimate_binary> <suspect_binary>")
sys.exit(1)
report = compare_pe_files(sys.argv[1], sys.argv[2])
print(json.dumps(report, indent=2, ensure_ascii=False))