用户操作日志系统(集成智能去重)
一、总体架构设计
graph TD
U[用户操作] -->|点击/输入/提交| F[前端监控层]
subgraph "前端监控层"
F1[JS事件监听]
F2[操作信息收集]
F3[本地防重检测]
F4[异步批量上报]
end
F --> F1
F1 --> F2
F2 --> F3
F3 -->|非重复| F4
F3 -->|重复| D1[前端阻止/标记]
F4 -->|HTTP上报| B[后端接收层]
subgraph "后端处理层"
B1[频率限制检查]
B2[智能去重过滤]
B3[业务语义解析]
B4[日志持久化]
B5[ES/MQ同步]
end
B --> B1
B1 -->|超限| L[丢弃]
B1 -->|正常| B2
B2 -->|重复| M[合并/丢弃]
B2 -->|有效| B3
B3 --> B4
B4 --> B5
B4 -->|存储| DB[(MySQL)]
B5 -->|同步| ES[(Elasticsearch)]
B5 -->|消息| MQ[RocketMQ]
subgraph "查询分析层"
Q1[操作流水查询]
Q2[去重统计分析]
Q3[用户行为分析]
Q4[实时监控告警]
end
DB --> Q1
ES --> Q2
ES --> Q3
MQ --> Q4
二、前端监控系统(集成防重)
1. 智能防重监控脚本
// operation-tracker-with-dedupe.js
class SmartOperationTracker {
constructor(config) {
this.config = {
appId: 'system-a',
userId: '',
sessionId: '',
reportUrl: '/api/operation/log/batch',
dedupeEnabled: true,
dedupeWindow: 1000, // 去重时间窗口(ms)
localCacheSize: 50, // 本地缓存大小
batchSize: 10,
batchInterval: 3000,
...config
};
this.operationQueue = []; // 待上报队列
this.localDedupeCache = new Map(); // 本地去重缓存
this.lastReportTime = 0;
this.init();
}
init() {
this.bindEvents();
this.startBatchReport();
this.injectMetadata();
// 页面卸载前强制上报
window.addEventListener('beforeunload', () => this.forceFlush());
// 错误监控
window.addEventListener('error', (e) => this.trackError(e));
window.addEventListener('unhandledrejection', (e) => this.trackPromiseError(e));
}
bindEvents() {
// 智能点击监听(带防抖和去重)
document.addEventListener('click', this.debounce((e) => {
this.handleClick(e);
}, 300), true);
// 表单提交监听
document.addEventListener('submit', (e) => {
e.preventDefault();
this.handleFormSubmit(e.target).then(() => {
e.target.submit(); // 继续原始提交
});
}, true);
// 输入变化监听(带防抖)
document.addEventListener('input', this.debounce((e) => {
this.handleInput(e.target);
}, 800), true);
// 页面变化监听
let lastUrl = location.href;
new MutationObserver(() => {
if (lastUrl !== location.href) {
this.trackPageChange(lastUrl, location.href);
lastUrl = location.href;
}
}).observe(document, { subtree: true, childList: true });
}
handleClick(event) {
const target = event.target;
// 1. 检查是否可追踪
if (!this.isTrackableElement(target)) return;
// 2. 生成操作指纹
const operationFingerprint = this.generateFingerprint(target, 'CLICK');
// 3. 本地去重检查
if (this.config.dedupeEnabled && this.isLocalDuplicate(operationFingerprint)) {
console.debug('本地去重:重复点击', target);
this.markAsDuplicate(operationFingerprint);
return;
}
// 4. 记录操作
const operation = this.buildClickOperation(target, event);
this.queueOperation(operation);
// 5. 更新本地缓存
this.updateLocalCache(operationFingerprint);
}
isLocalDuplicate(fingerprint) {
const cached = this.localDedupeCache.get(fingerprint);
if (!cached) return false;
// 检查是否在去重时间窗口内
const now = Date.now();
return (now - cached.timestamp) < this.config.dedupeWindow;
}
markAsDuplicate(fingerprint) {
const cached = this.localDedupeCache.get(fingerprint);
if (cached) {
cached.duplicateCount = (cached.duplicateCount || 1) + 1;
cached.lastDuplicateTime = Date.now();
this.localDedupeCache.set(fingerprint, cached);
}
}
updateLocalCache(fingerprint) {
const cacheEntry = {
timestamp: Date.now(),
duplicateCount: 0
};
this.localDedupeCache.set(fingerprint, cacheEntry);
// 清理过期缓存
if (this.localDedupeCache.size > this.config.localCacheSize) {
const now = Date.now();
for (const [key, value] of this.localDedupeCache.entries()) {
if (now - value.timestamp > this.config.dedupeWindow * 10) {
this.localDedupeCache.delete(key);
}
}
}
}
generateFingerprint(element, eventType) {
// 多维度生成指纹
const parts = [
eventType,
element.tagName,
element.id || '',
this.normalizeClassName(element.className),
this.getElementPath(element),
window.location.pathname
];
return parts.join('|').replace(/\s+/g, '');
}
buildClickOperation(element, event) {
return {
operationId: this.generateId(),
eventType: 'CLICK',
elementType: element.tagName.toLowerCase(),
elementId: element.id || this.generateElementId(element),
elementPath: this.getElementPath(element),
elementText: this.getElementText(element),
elementClass: element.className,
pageUrl: window.location.href,
pageTitle: document.title,
referrer: document.referrer,
timestamp: Date.now(),
position: {
x: event.clientX,
y: event.clientY,
screen: `${window.screen.width}x${window.screen.height}`
},
// 设备信息
userAgent: navigator.userAgent,
platform: navigator.platform,
// 上下文信息
sessionId: this.config.sessionId,
userId: this.config.userId,
appId: this.config.appId,
// 去重相关
dedupeFingerprint: this.generateFingerprint(element, 'CLICK'),
isSuspectedDuplicate: false
};
}
handleFormSubmit(form) {
return new Promise((resolve) => {
const formData = this.collectFormData(form);
const fingerprint = this.generateFormFingerprint(form, formData);
// 表单提交严格去重
if (this.isLocalDuplicate(fingerprint)) {
console.warn('表单重复提交阻止');
resolve();
return;
}
const operation = {
operationId: this.generateId(),
eventType: 'FORM_SUBMIT',
formId: form.id,
formAction: form.action,
formMethod: form.method,
formData: this.sanitizeFormData(formData),
pageUrl: window.location.href,
timestamp: Date.now(),
dedupeFingerprint: fingerprint
};
this.queueOperation(operation);
this.updateLocalCache(fingerprint);
resolve();
});
}
sanitizeFormData(data) {
const sensitive = ['password', 'pwd', 'creditCard', 'idCard', 'cvv'];
const sanitized = { ...data };
sensitive.forEach(field => {
if (sanitized[field]) {
sanitized[field] = '***';
}
});
return sanitized;
}
queueOperation(operation) {
// 添加队列时间戳
operation.queueTime = Date.now();
this.operationQueue.push(operation);
// 检查是否达到批量大小
if (this.operationQueue.length >= this.config.batchSize) {
this.flush();
}
}
flush() {
if (this.operationQueue.length === 0) return;
const batch = this.operationQueue.splice(0, this.config.batchSize);
this.lastReportTime = Date.now();
// 使用sendBeacon或fetch上报
if (navigator.sendBeacon && batch.length <= 64 * 1024) {
const blob = new Blob([JSON.stringify(batch)], { type: 'application/json' });
navigator.sendBeacon(this.config.reportUrl, blob);
} else {
this.reportByFetch(batch);
}
}
reportByFetch(batch) {
fetch(this.config.reportUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(batch),
keepalive: true
}).catch(err => {
console.error('日志上报失败:', err);
// 失败重试
this.operationQueue.unshift(...batch);
});
}
forceFlush() {
if (this.operationQueue.length > 0) {
this.flush();
}
}
// 工具方法
debounce(func, wait) {
let timeout;
return function executedFunction(...args) {
const later = () => {
clearTimeout(timeout);
func(...args);
};
clearTimeout(timeout);
timeout = setTimeout(later, wait);
};
}
generateId() {
return `${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
}
// 全局初始化
if (!window.__smartTracker) {
window.__smartTracker = new SmartOperationTracker();
}
2. 页面模板集成
<%-- 在JSP页面头部注入 --%>
<%@ page contentType="text/html;charset=UTF-8" %>
<html>
<head>
<title>${pageTitle} - 系统A</title>
<!-- 注入追踪元数据 -->
<meta name="track:app-id" content="system-a">
<meta name="track:user-id" content="${sessionScope.user.id}">
<meta name="track:user-name" content="${sessionScope.user.name}">
<meta name="track:session-id" content="${pageContext.session.id}">
<!-- 引入智能追踪脚本 -->
<script src="${ctx}/static/js/operation-tracker-with-dedupe.js"></script>
<style>
/* 为被追踪元素添加视觉反馈(可选) */
.tracked-click:active {
opacity: 0.8;
}
</style>
</head>
<body>
<!-- 页面内容 -->
<script>
// 页面加载完成后初始化
document.addEventListener('DOMContentLoaded', function() {
const tracker = window.__smartTracker;
// 设置配置
tracker.config.userId = document.querySelector('meta[name="track:user-id"]').content;
tracker.config.sessionId = document.querySelector('meta[name="track:session-id"]').content;
// 记录页面访问
tracker.queueOperation({
eventType: 'PAGE_VIEW',
pageUrl: window.location.href,
pageTitle: document.title,
timestamp: Date.now(),
referrer: document.referrer
});
// 为重要按钮添加额外追踪
document.querySelectorAll('[data-track-important]').forEach(btn => {
btn.addEventListener('click', function(e) {
tracker.logCustomOperation('IMPORTANT_CLICK', {
buttonId: this.id,
buttonText: this.textContent,
importance: this.dataset.trackImportant
});
});
});
});
// 捕获AJAX请求(如果使用jQuery)
if (window.jQuery) {
$(document).ajaxSend(function(event, jqXHR, settings) {
window.__smartTracker.trackAjaxRequest(settings);
});
}
</script>
</body>
</html>
三、后端处理系统(集成智能去重)
1. 核心实体设计
// 操作日志实体(集成去重字段)
@Data
@Entity
@Table(name = "user_operation_log")
@EqualsAndHashCode(callSuper = true)
public class UserOperationLog extends BaseEntity {
// 基础信息
@Column(name = "operation_id", nullable = false, length = 64)
private String operationId;
@Column(name = "trace_id", length = 64)
private String traceId;
@Column(name = "session_id", nullable = false, length = 128)
private String sessionId;
@Column(name = "user_id", nullable = false, length = 64)
private String userId;
@Column(name = "username", length = 128)
private String username;
@Column(name = "user_ip", length = 64)
private String userIp;
// 页面信息
@Column(name = "page_url", length = 500)
private String pageUrl;
@Column(name = "page_title", length = 200)
private String pageTitle;
@Column(name = "referrer", length = 500)
private String referrer;
// 操作信息
@Column(name = "event_type", nullable = false, length = 32)
private String eventType;
@Column(name = "element_type", length = 32)
private String elementType;
@Column(name = "element_id", length = 200)
private String elementId;
@Column(name = "element_path", length = 1000)
private String elementPath;
@Column(name = "element_text", length = 500)
private String elementText;
@Column(name = "element_class", length = 500)
private String elementClass;
// 业务信息
@Column(name = "business_key", length = 64)
private String businessKey;
@Column(name = "operation_type", length = 32)
private String operationType;
@Column(name = "module_name", length = 64)
private String moduleName;
@Column(name = "operation_name", length = 64)
private String operationName;
// 操作数据
@Column(name = "request_data", columnDefinition = "TEXT")
private String requestData;
@Column(name = "extra_data", columnDefinition = "TEXT")
private String extraData;
// 设备信息
@Column(name = "user_agent", length = 500)
private String userAgent;
@Column(name = "device_type", length = 32)
private String deviceType;
@Column(name = "screen_resolution", length = 32)
private String screenResolution;
// 时间信息
@Column(name = "operation_time", nullable = false)
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss.SSS")
private Date operationTime;
@Column(name = "server_receive_time")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss.SSS")
private Date serverReceiveTime;
// 去重相关字段
@Column(name = "dedupe_fingerprint", length = 500)
private String dedupeFingerprint;
@Column(name = "is_duplicate")
private Boolean isDuplicate = false;
@Column(name = "duplicate_type", length = 32)
private String duplicateType; // LOCAL, SERVER, VALID, INVALID
@Column(name = "duplicate_count")
private Integer duplicateCount = 1;
@Column(name = "original_operation_id", length = 64)
private String originalOperationId;
@Column(name = "dedupe_reason", length = 200)
private String dedupeReason;
// 结果信息
@Column(name = "success")
private Boolean success = true;
@Column(name = "error_code", length = 32)
private String errorCode;
@Column(name = "error_msg", length = 500)
private String errorMsg;
// 性能信息
@Column(name = "duration")
private Long duration;
@Column(name = "response_size")
private Long responseSize;
// 索引字段(用于快速查询)
@Column(name = "search_key", length = 100)
private String searchKey;
@Transient
private Map<String, Object> extraMap;
/**
* 生成搜索关键字
*/
@PrePersist
public void generateSearchKey() {
this.searchKey = String.format("%s|%s|%s|%s",
this.userId,
this.moduleName != null ? this.moduleName : "unknown",
this.operationType != null ? this.operationType : "unknown",
this.businessKey != null ? this.businessKey : ""
);
}
}
// 前端操作DTO
@Data
public class FrontendOperation {
private String operationId;
private String eventType;
private String elementType;
private String elementId;
private String elementPath;
private String elementText;
private String elementClass;
private String pageUrl;
private String pageTitle;
private String referrer;
private Long timestamp;
private Map<String, Object> position;
private Map<String, Object> extra;
private String userAgent;
private String sessionId;
private String userId;
private String appId;
private String dedupeFingerprint;
private Boolean isSuspectedDuplicate;
// 转换为日志实体
public UserOperationLog toLogEntity() {
UserOperationLog log = new UserOperationLog();
BeanUtils.copyProperties(this, log, "position", "extra");
if (this.position != null) {
log.setExtraData(JsonUtils.toJson(this.position));
}
if (this.extra != null) {
log.setRequestData(JsonUtils.toJson(this.extra));
}
log.setOperationTime(new Date(this.timestamp));
log.setServerReceiveTime(new Date());
return log;
}
}
2. 智能去重过滤器
@Component
@Slf4j
public class IntelligentDeduplicationFilter {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
@Autowired
private DeduplicationRuleService ruleService;
// 本地缓存(二级缓存)
private final Cache<String, OperationCache> localCache = CacheBuilder.newBuilder()
.maximumSize(5000)
.expireAfterWrite(10, TimeUnit.SECONDS)
.build();
/**
* 智能去重处理
*/
public DeduplicationResult process(FrontendOperation operation) {
// 1. 规则检查(是否启用去重)
if (!shouldDeduplicate(operation)) {
return DeduplicationResult.unique();
}
// 2. 生成去重指纹
String dedupeKey = generateDedupeKey(operation);
// 3. 检查本地缓存
OperationCache localCacheEntry = localCache.getIfPresent(dedupeKey);
if (localCacheEntry != null && isDuplicateInCache(localCacheEntry, operation)) {
return handleLocalDuplicate(localCacheEntry, operation, dedupeKey);
}
// 4. 检查Redis分布式缓存
String redisKey = buildRedisKey(dedupeKey);
OperationCache redisCache = (OperationCache) redisTemplate.opsForValue().get(redisKey);
if (redisCache != null && isDuplicateInCache(redisCache, operation)) {
return handleRedisDuplicate(redisCache, operation, dedupeKey);
}
// 5. 业务逻辑去重检查
if (isBusinessDuplicate(operation)) {
return DeduplicationResult.duplicate("业务逻辑重复", "BUSINESS_DUPLICATE");
}
// 6. 更新缓存
updateCache(dedupeKey, operation);
return DeduplicationResult.unique();
}
/**
* 生成智能去重键
*/
private String generateDedupeKey(FrontendOperation operation) {
StringBuilder key = new StringBuilder();
// 基础维度
key.append(operation.getUserId()).append(":")
.append(operation.getSessionId()).append(":")
.append(operation.getPageUrl()).append(":")
.append(operation.getEventType()).append(":");
// 元素维度(标准化)
String elementKey = normalizeElementKey(operation);
key.append(elementKey).append(":");
// 数据维度(提取关键字段)
String dataKey = extractDataKey(operation);
key.append(dataKey);
return key.toString();
}
/**
* 标准化元素键
*/
private String normalizeElementKey(FrontendOperation operation) {
if (operation.getElementPath() == null) {
return operation.getElementId() != null ?
"id:" + operation.getElementId() : "no-element";
}
// 标准化路径(移除动态索引)
String path = operation.getElementPath()
.replaceAll(":nth-child\\(\\d+\\)", ":nth-child(*)")
.replaceAll("#[a-zA-Z0-9_]+\\d{8,}", "#dynamicId");
// 提取关键部分
String[] parts = path.split(" > ");
if (parts.length > 3) {
return parts[parts.length - 3] + ">" +
parts[parts.length - 2] + ">" +
parts[parts.length - 1];
}
return path;
}
/**
* 提取数据关键字段
*/
private String extractDataKey(FrontendOperation operation) {
if (operation.getExtra() == null || operation.getExtra().isEmpty()) {
return "no-data";
}
// 根据不同事件类型提取关键字段
switch (operation.getEventType()) {
case "FORM_SUBMIT":
return extractFormKey(operation.getExtra());
case "QUERY":
return extractQueryKey(operation.getExtra());
case "CLICK":
return extractClickKey(operation);
default:
return "default";
}
}
private String extractFormKey(Map<String, Object> extra) {
// 提取表单关键字段
List<String> keyFields = Arrays.asList("id", "formId", "action", "businessKey");
StringBuilder key = new StringBuilder();
for (String field : keyFields) {
if (extra.containsKey(field)) {
key.append(field).append("=").append(extra.get(field)).append(";");
}
}
return key.length() > 0 ? key.toString() : "form-default";
}
/**
* 判断是否重复
*/
private boolean isDuplicateInCache(OperationCache cache, FrontendOperation operation) {
long timeDiff = operation.getTimestamp() - cache.getTimestamp();
// 获取规则配置的时间窗口
long timeWindow = ruleService.getTimeWindow(
operation.getEventType(),
operation.getPageUrl()
);
// 时间窗口内认为是重复
return timeDiff < timeWindow;
}
/**
* 处理本地缓存重复
*/
private DeduplicationResult handleLocalDuplicate(
OperationCache cache,
FrontendOperation operation,
String dedupeKey) {
// 判断是否为有效重复
if (isValidRepetition(cache, operation)) {
cache.incrementCount();
localCache.put(dedupeKey, cache);
return DeduplicationResult.validDuplicate(
cache.getCount(),
"LOCAL_VALID_DUPLICATE"
);
} else {
return DeduplicationResult.duplicate(
"本地重复操作",
"LOCAL_INVALID_DUPLICATE"
);
}
}
/**
* 判断是否为有效重复
*/
private boolean isValidRepetition(OperationCache cache, FrontendOperation operation) {
// 1. 检查操作类型
String eventType = operation.getEventType();
// 查询类操作允许少量重复
if (eventType.equals("QUERY") || eventType.equals("SEARCH")) {
return cache.getCount() < 3; // 最多允许3次重复查询
}
// 提交类操作严格限制
if (eventType.equals("FORM_SUBMIT") || eventType.equals("SAVE")) {
return false;
}
// 2. 检查时间间隔
long timeDiff = operation.getTimestamp() - cache.getTimestamp();
// 间隔超过3秒认为是有效操作
if (timeDiff > 3000) {
return true;
}
// 3. 检查是否页面变化
if (cache.getPageUrl() != null && operation.getPageUrl() != null) {
if (!cache.getPageUrl().equals(operation.getPageUrl())) {
return true; // 页面变化,不是无效重复
}
}
return false;
}
/**
* 更新缓存
*/
private void updateCache(String dedupeKey, FrontendOperation operation) {
OperationCache cache = new OperationCache(
operation.getOperationId(),
operation.getTimestamp(),
operation.getPageUrl(),
operation.getEventType(),
1
);
// 更新本地缓存
localCache.put(dedupeKey, cache);
// 更新Redis缓存(分布式去重)
String redisKey = buildRedisKey(dedupeKey);
redisTemplate.opsForValue().set(
redisKey,
cache,
30, TimeUnit.SECONDS
);
}
private String buildRedisKey(String dedupeKey) {
return "operation:dedupe:" + dedupeKey.hashCode();
}
}
// 缓存实体
@Data
@AllArgsConstructor
class OperationCache {
private String operationId;
private Long timestamp;
private String pageUrl;
private String eventType;
private Integer count;
public void incrementCount() {
this.count++;
}
}
// 去重结果
@Data
@Builder
class DeduplicationResult {
private boolean duplicate;
private boolean shouldLog;
private String reason;
private String duplicateType;
private Integer duplicateCount;
private String originalOperationId;
public static DeduplicationResult unique() {
return DeduplicationResult.builder()
.duplicate(false)
.shouldLog(true)
.build();
}
public static DeduplicationResult duplicate(String reason, String type) {
return DeduplicationResult.builder()
.duplicate(true)
.shouldLog(false)
.reason(reason)
.duplicateType(type)
.build();
}
public static DeduplicationResult validDuplicate(int count, String type) {
return DeduplicationResult.builder()
.duplicate(true)
.shouldLog(true)
.duplicateCount(count)
.duplicateType(type)
.reason("有效重复操作")
.build();
}
}
3. 日志收集服务
@Service
@Slf4j
public class OperationLogCollectorService {
@Autowired
private IntelligentDeduplicationFilter deduplicationFilter;
@Autowired
private FrequencyLimitService frequencyLimitService;
@Autowired
private OperationLogRepository logRepository;
@Autowired
private RocketMQTemplate rocketMQTemplate;
@Autowired
private BusinessContextResolver businessContextResolver;
// 批量处理队列
private final BlockingQueue<LogProcessTask> processQueue =
new LinkedBlockingQueue<>(10000);
@PostConstruct
public void init() {
// 启动处理线程池
ExecutorService executor = Executors.newFixedThreadPool(3);
for (int i = 0; i < 3; i++) {
executor.submit(this::processQueueTask);
}
}
/**
* 接收前端批量日志
*/
@Async
public void receiveBatchLogs(List<FrontendOperation> operations,
HttpServletRequest request) {
if (operations == null || operations.isEmpty()) {
return;
}
String userIp = getClientIp(request);
String sessionId = request.getSession().getId();
for (FrontendOperation operation : operations) {
try {
// 1. 补充请求信息
operation.setSessionId(sessionId);
operation.setUserIp(userIp);
// 2. 频率限制检查
if (!frequencyLimitService.check(operation)) {
log.debug("频率限制: {}", operation.getOperationId());
continue;
}
// 3. 智能去重
DeduplicationResult dedupeResult = deduplicationFilter.process(operation);
// 4. 创建处理任务
LogProcessTask task = new LogProcessTask(operation, dedupeResult);
boolean offered = processQueue.offer(task, 100, TimeUnit.MILLISECONDS);
if (!offered) {
log.warn("处理队列已满,丢弃操作: {}", operation.getOperationId());
}
} catch (Exception e) {
log.error("处理操作日志失败: {}", operation.getOperationId(), e);
// 降级:直接保存原始日志
saveAsFallback(operation, e.getMessage());
}
}
}
/**
* 处理队列任务
*/
private void processQueueTask() {
List<LogProcessTask> batch = new ArrayList<>(100);
while (true) {
try {
batch.clear();
// 批量获取任务
LogProcessTask task = processQueue.poll(100, TimeUnit.MILLISECONDS);
if (task != null) {
batch.add(task);
processQueue.drainTo(batch, 99);
// 批量处理
processBatch(batch);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
} catch (Exception e) {
log.error("处理日志队列失败", e);
}
}
}
/**
* 批量处理
*/
private void processBatch(List<LogProcessTask> tasks) {
List<UserOperationLog> logsToSave = new ArrayList<>();
List<UserOperationLog> duplicateSummaries = new ArrayList<>();
for (LogProcessTask task : tasks) {
FrontendOperation operation = task.getOperation();
DeduplicationResult dedupeResult = task.getDedupeResult();
// 转换为日志实体
UserOperationLog logEntity = operation.toLogEntity();
// 解析业务上下文
businessContextResolver.resolve(logEntity, operation);
// 处理去重结果
if (dedupeResult.isDuplicate()) {
handleDuplicateLog(logEntity, dedupeResult, duplicateSummaries);
if (dedupeResult.isShouldLog()) {
// 有效重复,记录但标记为重复
logEntity.setIsDuplicate(true);
logEntity.setDuplicateType(dedupeResult.getDuplicateType());
logEntity.setDuplicateCount(dedupeResult.getDuplicateCount());
logEntity.setDedupeReason(dedupeResult.getReason());
if (dedupeResult.getOriginalOperationId() != null) {
logEntity.setOriginalOperationId(dedupeResult.getOriginalOperationId());
}
logsToSave.add(logEntity);
}
// 无效重复,不保存到主表
} else {
// 唯一操作
logsToSave.add(logEntity);
}
}
// 批量保存
if (!logsToSave.isEmpty()) {
saveBatch(logsToSave);
}
// 保存重复摘要
if (!duplicateSummaries.isEmpty()) {
saveDuplicateSummaries(duplicateSummaries);
}
}
/**
* 处理重复日志
*/
private void handleDuplicateLog(UserOperationLog logEntity,
DeduplicationResult dedupeResult,
List<UserOperationLog> summaries) {
// 创建重复摘要记录
if ("INVALID_DUPLICATE".equals(dedupeResult.getDuplicateType())) {
UserOperationLog summary = createDuplicateSummary(logEntity, dedupeResult);
summaries.add(summary);
}
}
/**
* 批量保存
*/
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void saveBatch(List<UserOperationLog> logs) {
try {
// 1. 保存到数据库
logRepository.batchInsert(logs);
// 2. 发送到消息队列(异步处理)
sendToMessageQueue(logs);
// 3. 更新统计信息
updateStatistics(logs);
} catch (Exception e) {
log.error("批量保存操作日志失败", e);
// 降级:异步重试或保存到文件
saveToFallbackStorage(logs);
}
}
/**
* 发送到消息队列
*/
private void sendToMessageQueue(List<UserOperationLog> logs) {
for (UserOperationLog log : logs) {
// 排除重复操作(除非是有效重复)
if (log.getIsDuplicate() != null && log.getIsDuplicate()
&& !"VALID_DUPLICATE".equals(log.getDuplicateType())) {
continue;
}
Message<UserOperationLog> message = MessageBuilder
.withPayload(log)
.setHeader("operationType", "USER_OPERATION")
.setHeader("timestamp", System.currentTimeMillis())
.build();
rocketMQTemplate.send("operation-log-topic", message);
}
}
}
// 处理任务包装类
@Data
@AllArgsConstructor
class LogProcessTask {
private FrontendOperation operation;
private DeduplicationResult dedupeResult;
}
四、存储与查询优化
1. 数据库设计优化
-- 主日志表
CREATE TABLE user_operation_log (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
operation_id VARCHAR(64) NOT NULL UNIQUE COMMENT '操作ID',
trace_id VARCHAR(64) COMMENT '链路ID',
session_id VARCHAR(128) NOT NULL COMMENT '会话ID',
user_id VARCHAR(64) NOT NULL COMMENT '用户ID',
username VARCHAR(128) COMMENT '用户名',
user_ip VARCHAR(64) COMMENT '用户IP',
page_url VARCHAR(500) COMMENT '页面URL',
page_title VARCHAR(200) COMMENT '页面标题',
referrer VARCHAR(500) COMMENT '来源',
event_type VARCHAR(32) NOT NULL COMMENT '事件类型',
element_type VARCHAR(32) COMMENT '元素类型',
element_id VARCHAR(200) COMMENT '元素ID',
element_path VARCHAR(1000) COMMENT '元素路径',
element_text VARCHAR(500) COMMENT '元素文本',
element_class VARCHAR(500) COMMENT '元素类名',
business_key VARCHAR(64) COMMENT '业务主键',
operation_type VARCHAR(32) COMMENT '操作类型',
module_name VARCHAR(64) COMMENT '模块名',
operation_name VARCHAR(64) COMMENT '操作名称',
request_data JSON COMMENT '请求数据',
extra_data JSON COMMENT '扩展数据',
user_agent VARCHAR(500) COMMENT '用户代理',
device_type VARCHAR(32) COMMENT '设备类型',
screen_resolution VARCHAR(32) COMMENT '屏幕分辨率',
operation_time DATETIME(3) NOT NULL COMMENT '操作时间',
server_receive_time DATETIME(3) NOT NULL COMMENT '接收时间',
-- 去重相关字段
dedupe_fingerprint VARCHAR(500) COMMENT '去重指纹',
is_duplicate TINYINT DEFAULT 0 COMMENT '是否重复',
duplicate_type VARCHAR(32) COMMENT '重复类型',
duplicate_count INT DEFAULT 1 COMMENT '重复次数',
original_operation_id VARCHAR(64) COMMENT '原始操作ID',
dedupe_reason VARCHAR(200) COMMENT '去重原因',
success TINYINT DEFAULT 1 COMMENT '是否成功',
error_code VARCHAR(32) COMMENT '错误码',
error_msg VARCHAR(500) COMMENT '错误信息',
duration BIGINT COMMENT '耗时ms',
response_size BIGINT COMMENT '响应大小',
search_key VARCHAR(100) COMMENT '搜索关键字',
create_time DATETIME(3) DEFAULT CURRENT_TIMESTAMP(3),
update_time DATETIME(3) DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3),
INDEX idx_user_time (user_id, operation_time),
INDEX idx_session (session_id, operation_time),
INDEX idx_business (business_key, operation_time),
INDEX idx_module (module_name, operation_time),
INDEX idx_operation_type (operation_type, operation_time),
INDEX idx_dedupe (is_duplicate, duplicate_type),
INDEX idx_search (search_key),
INDEX idx_operation_time (operation_time)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='用户操作日志表'
PARTITION BY RANGE (TO_DAYS(operation_time)) (
PARTITION p202401 VALUES LESS THAN (TO_DAYS('2024-02-01')),
PARTITION p202402 VALUES LESS THAN (TO_DAYS('2024-03-01')),
PARTITION p202403 VALUES LESS THAN (TO_DAYS('2024-04-01')),
PARTITION p_future VALUES LESS THAN MAXVALUE
);
-- 重复操作摘要表
CREATE TABLE operation_duplicate_summary (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
dedupe_key VARCHAR(500) NOT NULL COMMENT '去重键',
user_id VARCHAR(64) NOT NULL COMMENT '用户ID',
session_id VARCHAR(128) NOT NULL COMMENT '会话ID',
page_url VARCHAR(500) COMMENT '页面URL',
element_text VARCHAR(500) COMMENT '元素文本',
first_operation_id VARCHAR(64) NOT NULL COMMENT '首次操作ID',
first_operation_time DATETIME(3) NOT NULL COMMENT '首次操作时间',
last_operation_id VARCHAR(64) NOT NULL COMMENT '最后操作ID',
last_operation_time DATETIME(3) NOT NULL COMMENT '最后操作时间',
total_count INT NOT NULL COMMENT '总次数',
duplicate_type VARCHAR(32) NOT NULL COMMENT '重复类型',
reason VARCHAR(200) COMMENT '原因',
is_reasonable TINYINT DEFAULT 0 COMMENT '是否合理',
create_time DATETIME(3) DEFAULT CURRENT_TIMESTAMP(3),
INDEX idx_dedupe_key (dedupe_key),
INDEX idx_user_time (user_id, first_operation_time),
INDEX idx_reasonable (is_reasonable)
) COMMENT='重复操作摘要表';
2. Elasticsearch索引设计
// Elasticsearch实体映射
@Document(indexName = "user_operation_log")
@Setting(settingPath = "/elasticsearch/operation-log-settings.json")
@Mapping(mappingPath = "/elasticsearch/operation-log-mapping.json")
@Data
public class OperationLogES {
@Id
private String id;
@Field(type = FieldType.Keyword)
private String operationId;
@Field(type = FieldType.Keyword)
private String userId;
@Field(type = FieldType.Keyword)
private String sessionId;
@Field(type = FieldType.Keyword)
private String eventType;
@Field(type = FieldType.Keyword)
private String elementType;
@Field(type = FieldType.Text, analyzer = "ik_max_word")
private String elementText;
@Field(type = FieldType.Text)
private String elementPath;
@Field(type = FieldType.Keyword)
private String pageUrl;
@Field(type = FieldType.Text, analyzer = "ik_max_word")
private String pageTitle;
@Field(type = FieldType.Keyword)
private String moduleName;
@Field(type = FieldType.Keyword)
private String operationType;
@Field(type = FieldType.Keyword)
private String businessKey;
@Field(type = FieldType.Boolean)
private Boolean isDuplicate;
@Field(type = FieldType.Keyword)
private String duplicateType;
@Field(type = FieldType.Integer)
private Integer duplicateCount;
@Field(type = FieldType.Date, format = DateFormat.custom, pattern = "yyyy-MM-dd HH:mm:ss.SSS")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss.SSS")
private Date operationTime;
@Field(type = FieldType.Long)
private Long duration;
@Field(type = FieldType.Boolean)
private Boolean success;
@Field(type = FieldType.Keyword)
private String errorCode;
@Field(type = FieldType.Object)
private Map<String, Object> requestData;
@Field(type = FieldType.Object)
private Map<String, Object> extraData;
// 用于聚合的字段
@Field(type = FieldType.Keyword)
private String hourOfDay; // 格式: 2024-01-16T14
@Field(type = FieldType.Keyword)
private String dayOfWeek; // 周一、周二等
}
五、查询与分析接口
1. 操作流水查询
@RestController
@RequestMapping("/api/operation/logs")
@Slf4j
public class OperationLogQueryController {
@Autowired
private OperationLogQueryService queryService;
/**
* 查询用户操作流水(自动过滤无效重复)
*/
@GetMapping("/user/{userId}/stream")
public PageResult<OperationLogVO> getUserOperationStream(
@PathVariable String userId,
@RequestParam(defaultValue = "1") int page,
@RequestParam(defaultValue = "50") int size,
@RequestParam(required = false) String startTime,
@RequestParam(required = false) String endTime,
@RequestParam(required = false) String eventType,
@RequestParam(required = false) Boolean includeDuplicates) {
OperationLogQuery query = OperationLogQuery.builder()
.userId(userId)
.page(page)
.size(size)
.startTime(startTime)
.endTime(endTime)
.eventType(eventType)
.includeDuplicates(includeDuplicates != null ? includeDuplicates : false)
.build();
return queryService.queryUserOperations(query);
}
/**
* 根据会话ID查询完整操作链
*/
@GetMapping("/session/{sessionId}")
public SessionOperationChain getSessionOperations(@PathVariable String sessionId) {
return queryService.getSessionOperationChain(sessionId);
}
/**
* 根据业务ID查询相关操作
*/
@GetMapping("/business/{businessKey}")
public List<OperationLogVO> getBusinessOperations(
@PathVariable String businessKey,
@RequestParam(required = false) String operationType) {
return queryService.getBusinessOperations(businessKey, operationType);
}
/**
* 搜索操作日志
*/
@PostMapping("/search")
public PageResult<OperationLogVO> searchOperations(@RequestBody OperationSearchRequest request) {
return queryService.searchOperations(request);
}
/**
* 获取操作统计
*/
@GetMapping("/stats")
public OperationStats getStats(
@RequestParam String startTime,
@RequestParam String endTime,
@RequestParam(required = false) String userId,
@RequestParam(required = false) String moduleName) {
return queryService.getOperationStats(startTime, endTime, userId, moduleName);
}
/**
* 获取重复操作分析
*/
@GetMapping("/duplicate/analysis")
public DuplicateAnalysis getDuplicateAnalysis(
@RequestParam String startTime,
@RequestParam String endTime) {
return queryService.analyzeDuplicateOperations(startTime, endTime);
}
}
// 查询服务实现
@Service
@Slf4j
public class OperationLogQueryServiceImpl implements OperationLogQueryService {
@Autowired
private OperationLogRepository logRepository;
@Autowired
private ElasticsearchRestTemplate elasticsearchTemplate;
@Override
public PageResult<OperationLogVO> queryUserOperations(OperationLogQuery query) {
// 构建查询条件
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
// 用户ID条件
boolQuery.must(QueryBuilders.termQuery("userId", query.getUserId()));
// 时间范围条件
if (query.getStartTime() != null && query.getEndTime() != null) {
boolQuery.must(QueryBuilders.rangeQuery("operationTime")
.gte(query.getStartTime())
.lte(query.getEndTime()));
}
// 事件类型条件
if (query.getEventType() != null) {
boolQuery.must(QueryBuilders.termQuery("eventType", query.getEventType()));
}
// 是否包含重复操作
if (!query.isIncludeDuplicates()) {
// 过滤掉无效重复
boolQuery.mustNot(QueryBuilders.termQuery("duplicateType", "INVALID_DUPLICATE"));
}
// 构建查询
NativeSearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(boolQuery)
.withPageable(PageRequest.of(query.getPage() - 1, query.getSize()))
.withSort(Sort.by(Sort.Direction.DESC, "operationTime"))
.build();
// 执行查询
SearchHits<OperationLogES> searchHits = elasticsearchTemplate.search(
searchQuery, OperationLogES.class);
// 转换为VO
List<OperationLogVO> logs = searchHits.getSearchHits().stream()
.map(hit -> convertToVO(hit.getContent()))
.collect(Collectors.toList());
return new PageResult<>(
query.getPage(),
query.getSize(),
searchHits.getTotalHits(),
logs
);
}
@Override
public DuplicateAnalysis analyzeDuplicateOperations(String startTime, String endTime) {
// 聚合分析重复操作
NativeSearchQuery query = new NativeSearchQueryBuilder()
.withQuery(QueryBuilders.rangeQuery("operationTime")
.gte(startTime)
.lte(endTime))
.addAggregation(AggregationBuilders.terms("by_duplicate_type")
.field("duplicateType")
.size(10))
.addAggregation(AggregationBuilders.terms("top_duplicate_users")
.field("userId")
.size(10)
.subAggregation(AggregationBuilders.sum("duplicate_count").field("duplicateCount")))
.addAggregation(AggregationBuilders.terms("top_duplicate_pages")
.field("pageUrl")
.size(10))
.build();
SearchHits<OperationLogES> hits = elasticsearchTemplate.search(query, OperationLogES.class);
DuplicateAnalysis analysis = new DuplicateAnalysis();
analysis.setTotalOperations(hits.getTotalHits());
// 解析聚合结果
ParsedStringTerms duplicateTypeAgg = hits.getAggregations().get("by_duplicate_type");
analysis.setDuplicateTypeDistribution(
parseAggregation(duplicateTypeAgg)
);
// 更多聚合解析...
return analysis;
}
}
2. 实时监控面板
<!-- 操作监控面板 -->
<div class="operation-monitor">
<!-- 实时统计 -->
<div class="realtime-stats">
<div class="stat-card">
<h3>今日操作总数</h3>
<div class="stat-value">{{ stats.todayTotal }}</div>
</div>
<div class="stat-card">
<h3>重复操作率</h3>
<div class="stat-value">{{ (stats.duplicateRate * 100).toFixed(2) }}%</div>
</div>
<div class="stat-card">
<h3>平均响应时间</h3>
<div class="stat-value">{{ stats.avgDuration }}ms</div>
</div>
</div>
<!-- 操作流水 -->
<div class="operation-stream">
<div class="stream-header">
<h3>实时操作流水</h3>
<div class="filters">
<select v-model="filter.userId">
<option value="">所有用户</option>
<option v-for="user in users" :value="user.id">{{ user.name }}</option>
</select>
<label>
<input type="checkbox" v-model="filter.showDuplicates"> 显示重复操作
</label>
</div>
</div>
<div class="stream-list">
<div v-for="log in filteredLogs" :key="log.id"
:class="['log-item', { duplicate: log.isDuplicate }]">
<div class="log-time">{{ formatTime(log.operationTime) }}</div>
<div class="log-user">
<span class="username">{{ log.username }}</span>
<span class="user-id">({{ log.userId }})</span>
</div>
<div class="log-action">
<span class="module">{{ log.moduleName }}</span>
<span class="operation">{{ log.operationName }}</span>
<span v-if="log.elementText" class="element">- {{ log.elementText }}</span>
</div>
<div v-if="log.isDuplicate" class="duplicate-badge">
重复{{ log.duplicateCount }}次
</div>
<div class="log-details">
<button @click="showDetails(log)">详情</button>
</div>
</div>
</div>
</div>
<!-- 重复操作分析 -->
<div class="duplicate-analysis">
<h3>重复操作分析</h3>
<div class="chart-container">
<canvas id="duplicateChart"></canvas>
</div>
<div class="top-duplicates">
<h4>高频重复操作</h4>
<table>
<thead>
<tr>
<th>用户</th>
<th>页面</th>
<th>操作</th>
<th>重复次数</th>
<th>原因</th>
</tr>
</thead>
<tbody>
<tr v-for="item in topDuplicates">
<td>{{ item.username }}</td>
<td>{{ item.pageTitle }}</td>
<td>{{ item.operationName }}</td>
<td>{{ item.count }}</td>
<td>{{ item.reason }}</td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
六、配置与监控
1. 应用配置
# application.yml
operation:
track:
enabled: true
frontend:
dedupe-enabled: true
dedupe-window: 1000 # 前端去重时间窗口(ms)
batch-size: 10
batch-interval: 3000
max-queue-size: 1000
backend:
dedupe:
enabled: true
redis-ttl: 30 # Redis缓存时间(秒)
local-cache-size: 5000
time-windows:
CLICK: 1000
FORM_SUBMIT: 3000
QUERY: 500
INPUT_CHANGE: 0 # 0表示不去重
frequency:
enabled: true
limits:
CLICK: { max: 10, window: 1000 }
FORM_SUBMIT: { max: 1, window: 3000 }
QUERY: { max: 5, window: 1000 }
storage:
batch-size: 100
async-threads: 3
fallback-enabled: true
query:
default-page-size: 50
max-page-size: 200
cache-enabled: true
cache-ttl: 300 # 查询缓存时间(秒)
monitor:
enabled: true
metrics:
duplicate-rate-threshold: 0.3 # 重复率告警阈值
error-rate-threshold: 0.05 # 错误率告警阈值
slow-operation-threshold: 3000 # 慢操作阈值(ms)
2. 监控指标
// 监控指标配置
@Component
public class OperationMetrics {
private final MeterRegistry meterRegistry;
// 计数器
private final Counter totalOperations;
private final Counter duplicateOperations;
private final Counter failedOperations;
// 计时器
private final Timer processingTimer;
public OperationMetrics(MeterRegistry meterRegistry) {
this.meterRegistry = meterRegistry;
this.totalOperations = Counter.builder("operation.total")
.description("总操作数量")
.register(meterRegistry);
this.duplicateOperations = Counter.builder("operation.duplicate")
.description("重复操作数量")
.tag("type", "duplicate")
.register(meterRegistry);
this.failedOperations = Counter.builder("operation.failed")
.description("失败操作数量")
.register(meterRegistry);
this.processingTimer = Timer.builder("operation.processing.time")
.description("操作处理时间")
.publishPercentiles(0.5, 0.95, 0.99)
.register(meterRegistry);
}
public void recordOperation(boolean duplicate, boolean success, long processingTime) {
totalOperations.increment();
if (duplicate) {
duplicateOperations.increment();
}
if (!success) {
failedOperations.increment();
}
processingTimer.record(processingTime, TimeUnit.MILLISECONDS);
// 计算实时重复率
double duplicateRate = duplicateOperations.count() / Math.max(1, totalOperations.count());
meterRegistry.gauge("operation.duplicate.rate", duplicateRate);
}
}
七、部署与运维
1. 部署架构
负载均衡器
│
├── Web服务器1 (系统A) ── 操作日志API
│ │
│ ├── Redis集群 (去重缓存)
│ │
│ └── MySQL集群 (主存储)
│
├── Web服务器2 (系统A)
│
└── 日志处理集群
│
├── RocketMQ (消息队列)
│
├── Elasticsearch集群 (搜索分析)
│
└── Kibana (可视化)
2. 运维脚本
#!/bin/bash
# 日志清理脚本
# 保留30天详细日志
mysql -uusername -ppassword -e "
USE operation_log;
DELETE FROM user_operation_log
WHERE operation_time < DATE_SUB(NOW(), INTERVAL 30 DAY);
"
# 保留90天摘要日志
mysql -uusername -ppassword -e "
USE operation_log;
DELETE FROM operation_duplicate_summary
WHERE create_time < DATE_SUB(NOW(), INTERVAL 90 DAY);
"
# 清理Elasticsearch索引
curl -X POST "es-server:9200/user_operation_log/_delete_by_query" -H 'Content-Type: application/json' -d'
{
"query": {
"range": {
"operationTime": {
"lt": "now-30d"
}
}
}
}'
八、优势总结
✅ 智能去重
- 前端本地去重减少无效请求
- 后端分布式去重确保一致性
- 业务语义识别区分有效/无效重复
✅ 完整追溯
- 记录用户从点击到响应的完整链路
- 支持跨会话、跨设备操作追踪
- 业务上下文自动解析
✅ 高性能
- 异步批量处理,不影响主业务
- 多级缓存减少数据库压力
- 分区表设计支持海量数据
✅ 易查询
- 多维度查询接口
- 实时监控面板
- 智能分析报告
✅ 可扩展
- 模块化设计,易于扩展新功能
- 配置化管理,支持动态调整
- 支持集群部署,水平扩展
