Analyzes Linux logs (auth.log, syslog, systemd journal, kern.log, audit.log) via grep, journalctl, ausearch to reconstruct user activity, detect intrusions, and build event timelines on compromised systems.
npx claudepluginhub killvxk/cybersecurity-skills-zhThis skill uses the workspace's default tool permissions.
Linux 系统维护着大量日志,是取证调查的主要证据来源。与 Windows 事件日志不同,Linux 日志通常是存储在 /var/log/ 中的纯文本文件和由 systemd-journald 管理的二进制 journal 文件。关键取证日志包括:auth.log(认证事件、sudo 使用、SSH 会话)、syslog(系统级消息)、kern.log(内核事件)以及特定应用程序日志。Linux 审计框架(auditd)提供了与 Windows 安全事件日志相当的详细安全事件记录。对这些日志的取证分析使调查人员能够重建用户会话、识别未授权访问、检测权限提升、追踪横向移动并建立完整的事件时间线。
Performs forensic analysis of Linux logs (syslog, auth.log, systemd journal, kern.log, audit.log) to reconstruct user activity, detect unauthorized access, and build event timelines on compromised systems.
Analyzes Linux logs (syslog, auth.log, systemd journal, kern.log) for forensics: reconstructs user activity, detects unauthorized access, builds event timelines.
Analyzes Linux system artifacts like auth logs, cron jobs, shell history, configs, and SUID binaries to detect intrusions, rootkits, persistence, and unauthorized access in incident response.
Share bugs, ideas, or general feedback.
Linux 系统维护着大量日志,是取证调查的主要证据来源。与 Windows 事件日志不同,Linux 日志通常是存储在 /var/log/ 中的纯文本文件和由 systemd-journald 管理的二进制 journal 文件。关键取证日志包括:auth.log(认证事件、sudo 使用、SSH 会话)、syslog(系统级消息)、kern.log(内核事件)以及特定应用程序日志。Linux 审计框架(auditd)提供了与 Windows 安全事件日志相当的详细安全事件记录。对这些日志的取证分析使调查人员能够重建用户会话、识别未授权访问、检测权限提升、追踪横向移动并建立完整的事件时间线。
| 日志文件 | 路径 | 内容 |
|---|---|---|
| auth.log / secure | /var/log/auth.log(Debian)或 /var/log/secure(RHEL) | 认证、sudo、SSH、PAM |
| syslog / messages | /var/log/syslog(Debian)或 /var/log/messages(RHEL) | 通用系统消息 |
| kern.log | /var/log/kern.log | 内核消息、USB 事件、驱动加载 |
| lastlog | /var/log/lastlog | 每用户最后登录记录(二进制) |
| wtmp | /var/log/wtmp | 登录/注销记录(二进制,用 last 读取) |
| btmp | /var/log/btmp | 登录失败记录(二进制,用 lastb 读取) |
| faillog | /var/log/faillog | 登录失败计数器(二进制) |
| cron.log | /var/log/cron 或 /var/log/syslog | 计划任务执行记录 |
| audit.log | /var/log/audit/audit.log | Linux 审计框架事件 |
| journal | /var/log/journal/ 或 /run/log/journal/ | systemd 二进制 journal |
| dpkg.log | /var/log/dpkg.log | 软件包安装/删除记录(Debian) |
| yum.log | /var/log/yum.log | 软件包安装/删除记录(RHEL) |
# 查找所有成功的 SSH 登录
grep "Accepted" /var/log/auth.log
# 查找 SSH 登录失败尝试
grep "Failed password" /var/log/auth.log
# 提取登录失败记录中的唯一来源 IP
grep "Failed password" /var/log/auth.log | grep -oP '\d+\.\d+\.\d+\.\d+' | sort -u
# 查找 sudo 命令执行
grep "sudo:" /var/log/auth.log | grep "COMMAND"
# 检测暴力破解模式(同一 IP 超过 10 次失败)
grep "Failed password" /var/log/auth.log | awk '{print $(NF-3)}' | sort | uniq -c | sort -rn | head -20
# 查找账户创建事件
grep "useradd\|adduser" /var/log/auth.log
# 检测 SSH 密钥认证
grep "Accepted publickey" /var/log/auth.log
# 以 JSON 格式导出 journal 用于取证处理
journalctl --output=json --no-pager > journal_export.json
# 按时间范围过滤
journalctl --since "2025-02-01" --until "2025-02-15" --output=json > timerange.json
# 按单元/服务过滤
journalctl -u sshd --output=json > sshd_journal.json
# 显示内核消息(USB 事件、模块加载)
journalctl -k --output=json > kernel_journal.json
# 按优先级过滤(0=emerg 到 7=debug)
journalctl -p err --output=json > errors.json
# 特定启动周期的日志
journalctl -b 0 --output=json > current_boot.json
journalctl --list-boots # 列出所有已记录的启动会话
# 搜索特定事件类型的审计日志
ausearch -m USER_AUTH --start today
# 搜索文件访问事件
ausearch -f /etc/shadow
# 搜索进程执行记录
ausearch -m EXECVE --start "02/01/2025" --end "02/28/2025"
# 生成登录事件报告
aureport --login --start "02/01/2025"
# 生成认证失败摘要
aureport --auth --failed
# 搜索特定用户活动
ausearch -ua 1001 # 按 UID
ausearch -ua username # 按用户名
# 检查系统级 crontab
cat /etc/crontab
# 检查用户 crontabs
ls -la /var/spool/cron/crontabs/
# 查看 cron 执行日志
grep "CRON" /var/log/syslog
# 检查 at/batch 任务
ls -la /var/spool/at/
atq
import re
import json
import sys
import os
from datetime import datetime
from collections import defaultdict
class LinuxLogForensicAnalyzer:
"""分析 Linux 系统日志进行取证调查。"""
def __init__(self, log_dir: str, output_dir: str):
self.log_dir = log_dir
self.output_dir = output_dir
os.makedirs(output_dir, exist_ok=True)
def parse_auth_log(self, auth_log_path: str) -> dict:
"""解析 auth.log 中的认证事件。"""
events = {
"successful_logins": [],
"failed_logins": [],
"sudo_commands": [],
"account_changes": [],
"ssh_sessions": []
}
ssh_accepted = re.compile(
r'(\w+\s+\d+\s+[\d:]+)\s+(\S+)\s+sshd\[\d+\]:\s+Accepted\s+(\S+)\s+for\s+(\S+)\s+from\s+([\d.]+)'
)
ssh_failed = re.compile(
r'(\w+\s+\d+\s+[\d:]+)\s+(\S+)\s+sshd\[\d+\]:\s+Failed\s+password\s+for\s+(\S*)\s+from\s+([\d.]+)'
)
sudo_cmd = re.compile(
r'(\w+\s+\d+\s+[\d:]+)\s+(\S+)\s+sudo:\s+(\S+)\s+:.*COMMAND=(.*)'
)
useradd = re.compile(
r'(\w+\s+\d+\s+[\d:]+)\s+(\S+)\s+useradd\[\d+\]:\s+new user: name=(\S+)'
)
with open(auth_log_path, "r", errors="replace") as f:
for line in f:
m = ssh_accepted.search(line)
if m:
events["successful_logins"].append({
"timestamp": m.group(1), "host": m.group(2),
"method": m.group(3), "user": m.group(4), "source_ip": m.group(5)
})
continue
m = ssh_failed.search(line)
if m:
events["failed_logins"].append({
"timestamp": m.group(1), "host": m.group(2),
"user": m.group(3), "source_ip": m.group(4)
})
continue
m = sudo_cmd.search(line)
if m:
events["sudo_commands"].append({
"timestamp": m.group(1), "host": m.group(2),
"user": m.group(3), "command": m.group(4).strip()
})
continue
m = useradd.search(line)
if m:
events["account_changes"].append({
"timestamp": m.group(1), "host": m.group(2),
"new_user": m.group(3)
})
return events
def detect_brute_force(self, auth_events: dict, threshold: int = 10) -> list:
"""从 auth 日志数据中检测暴力破解尝试。"""
ip_failures = defaultdict(int)
for event in auth_events.get("failed_logins", []):
ip_failures[event["source_ip"]] += 1
brute_force = []
for ip, count in ip_failures.items():
if count >= threshold:
brute_force.append({"source_ip": ip, "failed_attempts": count})
return sorted(brute_force, key=lambda x: x["failed_attempts"], reverse=True)
def generate_report(self, auth_log_path: str) -> str:
"""生成全面的取证分析报告。"""
auth_events = self.parse_auth_log(auth_log_path)
brute_force = self.detect_brute_force(auth_events)
report = {
"analysis_timestamp": datetime.now().isoformat(),
"log_source": auth_log_path,
"summary": {
"successful_logins": len(auth_events["successful_logins"]),
"failed_logins": len(auth_events["failed_logins"]),
"sudo_commands": len(auth_events["sudo_commands"]),
"account_changes": len(auth_events["account_changes"]),
"brute_force_sources": len(brute_force)
},
"brute_force_detected": brute_force,
"auth_events": auth_events
}
report_path = os.path.join(self.output_dir, "linux_log_forensics.json")
with open(report_path, "w") as f:
json.dump(report, f, indent=2)
print(f"[*] 成功登录次数:{report['summary']['successful_logins']}")
print(f"[*] 登录失败次数:{report['summary']['failed_logins']}")
print(f"[*] sudo 命令次数:{report['summary']['sudo_commands']}")
print(f"[*] 暴力破解来源:{report['summary']['brute_force_sources']}")
return report_path
def main():
if len(sys.argv) < 3:
print("用法:python process.py <auth_log_path> <output_dir>")
sys.exit(1)
analyzer = LinuxLogForensicAnalyzer(os.path.dirname(sys.argv[1]), sys.argv[2])
analyzer.generate_report(sys.argv[1])
if __name__ == "__main__":
main()