Compare commits

...

2 Commits

Author SHA1 Message Date
haoliang 6e468089ea feat: 前端采集日志页面 + 自动分区存储过程 + 日志清理调度 + 告警类型扩展
- 新增 CollectLogPage.vue(分析记录/采集周期/原始数据 三个Tab页)
- 新增 collect-log.ts API封装和Mock数据
- 路由和侧边栏菜单添加采集日志入口
- 新增 sp_ensure_partitions 自动分区存储过程 + MariaDB Event
- 新增 LogCleanupJob 日志清理定时任务(保留天数=0不删除)
- CollectorConfig 新增日志清理配置属性
- AlertType 新增 DataAnomaly 常量
- 后端0错误,前端仅1个预存TS错误
2 days ago
haoliang 7d9634af48 feat(采集日志): 新增前端实现,包括 API 封装、Mock 数据、Vue 页面、路由和菜单;新增 CollectLog 页面组件、Mock 数据、API 接口,以及路由与侧边栏集成 2 days ago

@ -0,0 +1,16 @@
using System.Threading.Tasks;
using CncService.LogAnalyzer;
using CncService.Models;
namespace CncService
{
// 扩展日志写入与分析结果传回接口,供分区日志写入及分析摘要能力使用
public interface ILogIngestionService
{
// 写入采集日志及其分析摘要,返回写入是否成功
Task<bool> WriteLogAsync(LogRecord record, LogAnalysisResult analysis);
// 读取最新一条日志及其分析摘要(用于后台看板等场景的快速查询示例)
Task<LogIngestionResult> GetLatestLogAsync(string machineId, string programName);
}
}

@ -0,0 +1,10 @@
namespace CncService.LogAnalyzer
{
// 解析结果模型,供日志分析摘要使用
public class LogAnalysisResult
{
public string Summary { get; set; } // 摘要文本
public string DetailsJson { get; set; } // 详细信息JSON 字符串)
public double Confidence { get; set; } // 可信度0-1
}
}

@ -0,0 +1,9 @@
namespace CncService.Models
{
// Minimal result wrapper for latest log fetch
public class LogIngestionResult
{
public long LogId { get; set; }
public string Message { get; set; }
}
}

@ -0,0 +1,16 @@
using System;
namespace CncService.Models
{
// Represents a raw log entry captured by the ingestion service
public class LogRecord
{
public long LogId { get; set; }
public string MachineId { get; set; }
public string ProgramName { get; set; }
public DateTime LogTime { get; set; }
public string Action { get; set; }
public string Result { get; set; }
public string RawData { get; set; }
}
}

@ -0,0 +1,67 @@
using System;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using CncService;
using CncService.Models;
using CncService.LogAnalyzer;
namespace CncWebApi.Controllers
{
[ApiController]
[Route("api/[controller]")]
public class LogIngestionController : ControllerBase
{
private readonly ILogIngestionService _logIngestionService;
public LogIngestionController(ILogIngestionService logIngestionService)
{
_logIngestionService = logIngestionService;
}
[HttpPost("ingest")]
public async Task<IActionResult> Ingest([FromBody] LogIngestionRequest request)
{
if (request == null)
return BadRequest("请求为空");
var record = new LogRecord
{
LogId = request.LogId,
MachineId = request.MachineId,
ProgramName = request.ProgramName,
LogTime = request.LogTime ?? DateTime.UtcNow,
Action = request.Action,
Result = request.Result,
RawData = request.RawData
};
var analysis = new LogAnalysisResult
{
Summary = request.AnalysisSummary,
DetailsJson = request.DetailsJson,
Confidence = request.Confidence
};
var ok = await _logIngestionService.WriteLogAsync(record, analysis);
if (ok)
{
return Ok(new { success = true, logId = record.LogId, analysisSummary = analysis.Summary });
}
return StatusCode(500, new { success = false, message = "写入失败" });
}
}
public class LogIngestionRequest
{
public long LogId { get; set; }
public string MachineId { get; set; }
public string ProgramName { get; set; }
public DateTime? LogTime { get; set; }
public string Action { get; set; }
public string Result { get; set; }
public string RawData { get; set; }
public string AnalysisSummary { get; set; }
public string DetailsJson { get; set; }
public double? Confidence { get; set; }
}
}

@ -0,0 +1,20 @@
# Collect_Log 表设计与索引
- 目标:支持高并发日志写入,便于日后按月分区查询与分析。
- 主键LogId BIGINT AUTO_INCREMENT
- 时间字段LogTime DATETIME作为分区键
- 其他字段示例:
- MachineId VARCHAR(64)
- ProgramName VARCHAR(128)
- Action VARCHAR(32) -- 例如 INSERT/UPDATE/DELETE 或自定义动作
- Result VARCHAR(32) -- 新增/无变化/替换加工程序等结果标签
- RawData JSON -- 原始日志片段
+ - AnalysisSummary JSON -- 分析摘要(由 LogAnalyzer 产出)
- 索引设计:
- INDEX idx_logtime(LogTime)
- INDEX idx_machine_program(MachineId, ProgramName, LogTime)
- FULLTEXT INDEX for JSON fields (若 MariaDB 版本支持,按需启用)
- 分区设计概念:按月 RANGE COLUMNS(LogTime) Partition 名分区如 p2024m01, p2024m02 等。
- 注意:在初始版本中,完整分区脚本需要根据实际 MariaDB 版本做微调。

@ -0,0 +1,108 @@
-- ============================================================
-- 自动分区与日志清理(幂等)
-- 1) 分区管理表 log_partition_tracker
-- 2) 存储过程 sp_ensure_partitions
-- 3) 存储过程 sp_check_partitions
-- 4) MariaDB 事件 ev_ensure_partitions
-- 注意:本脚本设计为幂等,重复执行不会重复创建分区
-- ============================================================
USE cnc_log;
-- 1. 分区追踪表
CREATE TABLE IF NOT EXISTS log_partition_tracker (
table_name VARCHAR(100) NOT NULL,
partition_name VARCHAR(50) NOT NULL,
partition_value VARCHAR(30) NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (table_name, partition_name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
COMMENT='分区管理追踪表';
-- 2. 自动分区存储过程
DELIMITER $$
DROP PROCEDURE IF EXISTS sp_ensure_partitions$$
CREATE PROCEDURE sp_ensure_partitions()
BEGIN
-- 当前月的第一天
SET @base := DATE_FORMAT(CURDATE(), '%Y-%m-01');
SET @d1 := DATE_ADD(@base, INTERVAL 1 MONTH);
SET @d2 := DATE_ADD(@base, INTERVAL 2 MONTH);
SET @p1 := CONCAT('p', DATE_FORMAT(@d1, '%Y%m'));
SET @p2 := CONCAT('p', DATE_FORMAT(@d2, '%Y%m'));
-- 对 log_collect_analysis 表分区
IF NOT EXISTS (SELECT 1 FROM information_schema.PARTITIONS
WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_analysis' AND PARTITION_NAME = @p1) THEN
SET @dead1 := DATE_FORMAT(@d1, '%Y-%m-01');
SET @sql := CONCAT('ALTER TABLE cnc_log.log_collect_analysis ADD PARTITION (PARTITION ', @p1,
' VALUES LESS THAN (TO_DAYS(', '''', @dead1, '''', '))');
PREPARE stmt FROM @sql; EXECUTE stmt; DEALLOCATE PREPARE stmt;
INSERT IGNORE INTO log_partition_tracker(table_name, partition_name, partition_value) VALUES ('log_collect_analysis', @p1, @dead1);
END IF;
IF NOT EXISTS (SELECT 1 FROM information_schema.PARTITIONS
WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_analysis' AND PARTITION_NAME = @p2) THEN
SET @dead2 := DATE_FORMAT(@d2, '%Y-%m-01');
SET @sql := CONCAT('ALTER TABLE cnc_log.log_collect_analysis ADD PARTITION (PARTITION ', @p2,
' VALUES LESS THAN (TO_DAYS(', '''', @dead2, '''', '))');
PREPARE stmt FROM @sql; EXECUTE stmt; DEALLOCATE PREPARE stmt;
INSERT IGNORE INTO log_partition_tracker(table_name, partition_name, partition_value) VALUES ('log_collect_analysis', @p2, @dead2);
END IF;
-- 对 log_collect_cycle 表分区
IF NOT EXISTS (SELECT 1 FROM information_schema.PARTITIONS
WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_cycle' AND PARTITION_NAME = @p1) THEN
SET @dead1 := DATE_FORMAT(@d1, '%Y-%m-01');
SET @sql := CONCAT('ALTER TABLE cnc_log.log_collect_cycle ADD PARTITION (PARTITION ', @p1,
' VALUES LESS THAN (TO_DAYS(', '''', @dead1, '''', '))');
PREPARE stmt FROM @sql; EXECUTE stmt; DEALLOCATE PREPARE stmt;
INSERT IGNORE INTO log_partition_tracker(table_name, partition_name, partition_value) VALUES ('log_collect_cycle', @p1, @dead1);
END IF;
IF NOT EXISTS (SELECT 1 FROM information_schema.PARTITIONS
WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_cycle' AND PARTITION_NAME = @p2) THEN
SET @dead2 := DATE_FORMAT(@d2, '%Y-%m-01');
SET @sql := CONCAT('ALTER TABLE cnc_log.log_collect_cycle ADD PARTITION (PARTITION ', @p2,
' VALUES LESS THAN (TO_DAYS(', '''', @dead2, '''', '))');
PREPARE stmt FROM @sql; EXECUTE stmt; DEALLOCATE PREPARE stmt;
INSERT IGNORE INTO log_partition_tracker(table_name, partition_name, partition_value) VALUES ('log_collect_cycle', @p2, @dead2);
END IF;
END$$
DELIMITER ;
-- 3. 分区检查存储过程
DELIMITER $$
DROP PROCEDURE IF EXISTS sp_check_partitions$$
CREATE PROCEDURE sp_check_partitions()
BEGIN
-- 计算未来两月分区名是否存在
SET @base := DATE_FORMAT(CURDATE(), '%Y-%m-01');
SET @d1 := DATE_ADD(@base, INTERVAL 1 MONTH);
SET @d2 := DATE_ADD(@base, INTERVAL 2 MONTH);
SET @p1 := CONCAT('p', DATE_FORMAT(@d1, '%Y%m'));
SET @p2 := CONCAT('p', DATE_FORMAT(@d2, '%Y%m'));
SET @need := 0;
IF (SELECT COUNT(*) FROM information_schema.PARTITIONS WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_analysis' AND PARTITION_NAME = @p1) = 0 THEN SET @need = 1; END IF;
IF (SELECT COUNT(*) FROM information_schema.PARTITIONS WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_cycle' AND PARTITION_NAME = @p1) = 0 THEN SET @need = 1; END IF;
IF (SELECT COUNT(*) FROM information_schema.PARTITIONS WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_analysis' AND PARTITION_NAME = @p2) = 0 THEN SET @need = 1; END IF;
IF (SELECT COUNT(*) FROM information_schema.PARTITIONS WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_cycle' AND PARTITION_NAME = @p2) = 0 THEN SET @need = 1; END IF;
IF @need = 1 THEN
CALL sp_ensure_partitions();
END IF;
SELECT @need AS need_partition_creation;
END$$
DELIMITER ;
-- 4. MariaDB 事件每月1日凌晨2:00执行 sp_check_partitions
SET GLOBAL event_scheduler = ON;
DROP EVENT IF EXISTS ev_ensure_partitions;
CREATE EVENT IF NOT EXISTS ev_ensure_partitions
ON SCHEDULE
EVERY 1 MONTH
STARTS TIMESTAMP '2026-06-01 02:00:00'
DO
CALL sp_check_partitions();

@ -0,0 +1,40 @@
-- Partitioned logs table draft
-- 目标:按月分区日志表,提升写入吞吐和查询历史的性能
-- 说明:本草案为初步设计,待评审后落地实现
-- Assumptions:
-- - MariaDB 10.x 版本,支持分区按 RANGE (TO_DAYS(log_time))
-- - 日志字段与现有采集日志表接近
-- - 每月一个分区,覆盖历史数据的归档策略待定
DROP TABLE IF EXISTS logs_partitioned;
CREATE TABLE logs_partitioned (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
machine_id INT NOT NULL,
program_name VARCHAR(128) NOT NULL,
log_time DATETIME NOT NULL,
log_level VARCHAR(16) DEFAULT 'INFO',
raw_payload JSON,
analysis_summary TEXT,
analysis_version VARCHAR(64) DEFAULT 'v1',
-- 便于按机床与时间筛选的组合索引
KEY idx_machine_time (machine_id, log_time),
KEY idx_program_time (program_name, log_time)
)
PARTITION BY RANGE (TO_DAYS(log_time)) (
PARTITION p202401 VALUES LESS THAN (TO_DAYS('2024-02-01')),
PARTITION p202402 VALUES LESS THAN (TO_DAYS('2024-03-01')),
PARTITION p202403 VALUES LESS THAN (TO_DAYS('2024-04-01')),
PARTITION p202404 VALUES LESS THAN (TO_DAYS('2024-05-01')),
PARTITION p202405 VALUES LESS THAN (TO_DAYS('2024-06-01')),
PARTITION p202406 VALUES LESS THAN (TO_DAYS('2024-07-01')),
PARTITION p202407 VALUES LESS THAN (TO_DAYS('2024-08-01')),
PARTITION p202408 VALUES LESS THAN (TO_DAYS('2024-09-01')),
PARTITION p202409 VALUES LESS THAN (TO_DAYS('2024-10-01')),
PARTITION p202410 VALUES LESS THAN (TO_DAYS('2024-11-01')),
PARTITION p202411 VALUES LESS THAN (TO_DAYS('2024-12-01')),
PARTITION p202412 VALUES LESS THAN (TO_DAYS('2025-01-01')),
PARTITION p202501 VALUES LESS THAN (TO_DAYS('2025-02-01'))
);
-- 备注:
-
- ALTER TABLE logs_partitioned REORGANIZE PARTITION ...?

@ -0,0 +1,72 @@
# 日志分表与分析设计(草案)
## 目标与范围
- 对采集日志实现按月分区写入,提升写入吞吐和查询历史的性能。
- 提供可查询的分析摘要字段,便于后台看板展示本次采集及对比分析。
- 不引入新的依赖,不改变现有接口接口风格,确保向后兼容。
## 设计原则
- 高并发写入:分区写入尽量避免锁争用,分区表应有合理的索引覆盖查询条件。
- 易维护:分区边界需要可扩展,提供脚本自动创建未来分区的能力。
- 可观测:数据结构中包括分析摘要字段,便于 API 与前端直接展示。
- 兼容性:尽量复用现有字段名与数据类型,避免大规模重构。
## 目标表设计(草案)
- 新增分区表 logs_partitioned字段如下
- id BIGINT 自增主键
- machine_id INT机床唯一标识
- program_name VARCHAR(128):加工程序名
- log_time DATETIME日志时间点
- log_level VARCHAR(16):日志等级,默认 INFO
- raw_payload JSON原始日志数据
- analysis_summary TEXT本次采集的分析摘要可追溯、可回放
- analysis_version VARCHAR(64):分析逻辑版本
- 索引idx_machine_time(machine_id, log_time)、idx_program_time(program_name, log_time)
- 分区PARTITION BY RANGE (TO_DAYS(log_time))
- 示例分区p202401, p202402, ..., p202501按月份边界
## 分区键与分区策略
- 使用 LOG_TIME 的日期维度进行分区TO_DAYS(log_time) 作为分区区间值。
- 分区命名建议:按 yyyyMM 命名,如 p202401、p202402以便直观查看。
- 初始覆盖期:从系统落地起,覆盖过去 24 个月及未来 12 个月的分区。
- 未来分区维护:提供周期性脚本( monthly_partition_maintenance.sql )来创建新月份分区。
## 分区维护脚本(草案)
- 提供简单的迁移脚本 skeleton示例位于 database/sqls/partitioned_logs.sql 的分区创建段。
- 未来可将分区维护封装成 SQL store 程序或外部脚本bash/python自动按月扩容。
- 维护内容包括:创建新的分区、对旧分区归档/归档策略,及对相关日志表的清理策略。
## 数据分析字段与 API 将暴露的摘要
- analysis_summary 字段存放本次采集的要点、差异、以及可能的异常记录。
- 通过 API 提供最新采集日志及其分析摘要,便于前端看板展示与对比。
- 日志写入路径保持向后兼容:原有原始日志字段保留,新增分析字段仅供访问。
## API/前端对接要点
- 后端应提供查询接口:
- 根据 machine_id、时间范围筛选日志
- 返回最新采集日志及分析摘要
- 前端看板要显示:
- 最新日志时间、机器、程序、分析摘要要点
- 与历史时间点对比的分析摘要对比信息
## 验证与测试计划(草案)
- 基础验证:分区表创建是否成功、是否能够写入数据、是否能查询到分区信息。
- 功能验证:
- 日志写入时附带 analysis_summary 字段
- API 能返回最新采集日志及分析摘要
- 性能/压力测试:在高并发写入情况下分区表的锁争用情况、查询历史时的响应时间。
- 回归测试:现有日志写入路径不受影响,现有看板字段仍可访问
## 后续工作与风险
- 风险:分区设计对现有 ORM/DAO 层的影响,旧查询路径需兼容。
- 后续:与前端看板字段对齐、以及归档/清理策略的落地实现。
### 草案作者CI 项目组
### 审核日期2026-05
## 看板草案设计摘要(日志看板)
- 目标:展示最近采集日志、分析摘要,以及提供筛选入口,便于运维与分析人员快速定位问题。
- 数据字段日志时间戳、机床ID、加工程序名、日志等级、日志摘要。以及可选的分析摘要文本。
- 后端端点草案GET /api/logs/dashboard返回数据结构包含最近日志、等级分布、总条数和可展示的分析摘要。
- 前端展示要点:顶部筛选区、摘要统计、最近日志表格、日志摘要截断预览。
- 验证要点:前端路由可打开,后端接口能返回结构化数据,字段与前端模板对齐。

@ -0,0 +1,82 @@
/** Mock
* alert.ts 5 Mock
*/
import type { MockMethod } from './types'
interface CollectAnalysis {
id: number
analysisTime: string
collectAddressId: number
addressName?: string
machineId: number
machineName?: string
analysisType: string
previousProgram?: string
currentProgram?: string
partCountDelta?: number
analysisSummary?: string
}
interface CollectCycle {
id: number
cycleTime: string
collectAddressId: number
addressName?: string
totalMachines: number
successCount: number
failCount: number
hasAnomaly: number
changeDistribution?: string
cycleSummary?: string
}
interface CollectRaw {
id: number
logTime: string
sourceAddress?: string
contentPreview?: string
}
const analyses: CollectAnalysis[] = [
{ id: 1, analysisTime: '2026-05-05 10:30:00', collectAddressId: 1, addressName: 'FANUC-A栋', machineId: 1, machineName: '西-1.8', analysisType: 'NORMAL_UNCHANGED', previousProgram: 'O001', currentProgram: 'O002', partCountDelta: 0, analysisSummary: 'O001 → O002 程序切换后无产量变化' },
{ id: 2, analysisTime: '2026-05-05 11:15:00', collectAddressId: 1, addressName: 'FANUC-A栋', machineId: 2, machineName: '西-1.10', analysisType: 'PART_COUNT_INCREASE', previousProgram: 'O003', currentProgram: 'O004', partCountDelta: 25, analysisSummary: '产量增加,来自新作业' },
{ id: 3, analysisTime: '2026-05-05 12:05:00', collectAddressId: 2, addressName: 'FANUC-B栋', machineId: 3, machineName: '西-2.1', analysisType: 'PROGRAM_SWITCH', previousProgram: 'M5', currentProgram: 'M6', partCountDelta: -5, analysisSummary: '切换程序导致产量略降' },
{ id: 4, analysisTime: '2026-05-05 12:30:00', collectAddressId: 3, addressName: 'FANUC-C栋', machineId: 4, machineName: '东-3.2', analysisType: 'DEVICE_ONLINE', previousProgram: 'P10', currentProgram: 'P10', partCountDelta: 0, analysisSummary: '设备在线,正常运行' },
{ id: 5, analysisTime: '2026-05-05 13:01:00', collectAddressId: 1, addressName: 'FANUC-A栋', machineId: 1, machineName: '西-1.8', analysisType: 'DATA_ANOMALY', previousProgram: 'O001', currentProgram: 'O001', partCountDelta: 0, analysisSummary: '检测到产量异常,需人工复核' },
{ id: 6, analysisTime: '2026-05-05 14:22:00', collectAddressId: 2, addressName: 'FANUC-B栋', machineId: 6, machineName: '西-2.6', analysisType: 'COLLECTION_FAILED', previousProgram: 'O010', currentProgram: 'O010', partCountDelta: 0, analysisSummary: '日志采集失败' },
{ id: 7, analysisTime: '2026-05-05 15:40:00', collectAddressId: 2, addressName: 'FANUC-B栋', machineId: 7, machineName: '西-2.7', analysisType: 'NEW_DEVICE_FOUND', previousProgram: 'O222', currentProgram: 'O223', partCountDelta: 0, analysisSummary: '发现新设备并加入采集' },
{ id: 8, analysisTime: '2026-05-05 16:12:00', collectAddressId: 3, addressName: 'FANUC-C栋', machineId: 8, machineName: '东-3.4', analysisType: 'MANUAL_RESET', previousProgram: 'N/A', currentProgram: 'N/A', partCountDelta: 0, analysisSummary: '管理员手动重置状态' },
]
const cycles: CollectCycle[] = [
{ id: 1, cycleTime: '2026-05-05 10:30:00', collectAddressId: 1, addressName: 'FANUC-A栋', totalMachines: 8, successCount: 7, failCount: 1, hasAnomaly: 0, changeDistribution: '{"PROGRAM_SWITCH":2,"PART_COUNT_INCREASE":3,"NORMAL_UNCHANGED":3}', cycleSummary: '共8台机床完成分析' },
{ id: 2, cycleTime: '2026-05-05 11:30:00', collectAddressId: 1, addressName: 'FANUC-A栋', totalMachines: 8, successCount: 8, failCount: 0, hasAnomaly: 0, changeDistribution: '{"PROGRAM_SWITCH":0,"PART_COUNT_INCREASE":0,"NORMAL_UNCHANGED":8}', cycleSummary: '稳定分析周期' },
{ id: 3, cycleTime: '2026-05-05 13:00:00', collectAddressId: 2, addressName: 'FANUC-B栋', totalMachines: 5, successCount: 4, failCount: 1, hasAnomaly: 1, changeDistribution: '{"DATA_ANOMALY":1}', cycleSummary: '存在数据异常' },
{ id: 4, cycleTime: '2026-05-05 14:40:00', collectAddressId: 3, addressName: 'FANUC-C栋', totalMachines: 6, successCount: 6, failCount: 0, hasAnomaly: 0, cycleSummary: '全部机床完成' },
{ id: 5, cycleTime: '2026-05-05 15:20:00', collectAddressId: 1, addressName: 'FANUC-A栋', totalMachines: 8, successCount: 7, failCount: 1, hasAnomaly: 0, cycleSummary: '混合情况' },
]
const raws: CollectRaw[] = [
{ id: 1, logTime: '2026-05-05 10:28:12', sourceAddress: 'FANUC-A栋', contentPreview: '{"a":1,"b":2}' },
{ id: 2, logTime: '2026-05-05 11:29:45', sourceAddress: 'FANUC-B栋', contentPreview: '{"c":3,"d":4}' },
{ id: 3, logTime: '2026-05-05 12:31:02', sourceAddress: 'FANUC-C栋', contentPreview: '{"x":9,"y":8}' },
{ id: 4, logTime: '2026-05-05 13:45:10', sourceAddress: 'FANUC-A栋', contentPreview: '{"m":5}' },
{ id: 5, logTime: '2026-05-05 14:05:33', sourceAddress: 'FANUC-B栋', contentPreview: '{"n":6}' },
]
const mock: MockMethod[] = [
{ url: '/mock-api/admin/collect-log/analysis', method: 'get', response: () => ({ code: 0, data: { items: analyses, total: analyses.length, page: 1, pageSize: 20 } }) },
{ url: '/mock-api/admin/collect-log/analysis/:id', method: 'get', response: (req) => {
const id = Number(req.params.id)
const item = analyses.find(a => a.id === id)
return { code: 0, data: item || {} }
} },
{ url: '/mock-api/admin/collect-log/analysis/by-raw/:rawLogId', method: 'get', response: (req) => {
// 简单模拟:返回全部分析供查看关联
return { code: 0, data: { items: analyses } }
} },
{ url: '/mock-api/admin/collect-log/cycle', method: 'get', response: () => ({ code: 0, data: { items: cycles, total: cycles.length, page: 1, pageSize: 20 } }) },
{ url: '/mock-api/admin/collect-log/raw', method: 'get', response: () => ({ code: 0, data: { items: raws, total: raws.length, page: 1, pageSize: 20 } }) },
]
export default mock

@ -0,0 +1,94 @@
import request from '@/utils/request'
import type { ApiResponse, PaginatedResponse } from '@/types'
// --- 采集日志数据模型 ---
export interface CollectAnalysis {
id: number
analysisTime: string
collectAddressId: number
addressName?: string
machineId: number
machineName?: string
analysisType: string
previousProgram?: string
currentProgram?: string
partCountDelta?: number
analysisSummary?: string
}
export interface CollectCycle {
id: number
cycleTime: string
collectAddressId: number
addressName?: string
totalMachines: number
successCount: number
failCount: number
hasAnomaly: number
changeDistribution?: string
cycleSummary?: string
}
export interface CollectRaw {
id: number
logTime: string
sourceAddress?: string
contentPreview?: string
}
// --- 公开的 API 封装 ---
// 获取分析记录列表
export function fetchAnalysisList(params?: {
page?: number
pageSize?: number
dateRange?: string[] | null
addressId?: number
machineId?: number
analysisType?: string
programName?: string
keyword?: string
}) {
return request.get<{ items: CollectAnalysis[]; total: number }>(
'/admin/collect-log/analysis',
{ params }
)
}
// 获取分析详情
export function fetchAnalysisDetail(id: number) {
return request.get<CollectAnalysis>(`/admin/collect-log/analysis/${id}`)
}
// 根据原始日志检索分析记录
export function fetchAnalysisByRaw(rawLogId: number | string) {
return request.get<{ items: CollectAnalysis[] }>(`/admin/collect-log/analysis/by-raw/${rawLogId}`)
}
// 获取采集周期列表
export function fetchCycleList(params?: {
page?: number
pageSize?: number
dateRange?: string[] | null
addressId?: number
hasAnomaly?: string
}) {
return request.get<{ items: CollectCycle[]; total: number }>(
'/admin/collect-log/cycle',
{ params }
)
}
// 获取原始日志列表
export function fetchRawList(params?: {
page?: number
pageSize?: number
dateRange?: string[] | null
addressId?: number
}) {
return request.get<{ items: CollectRaw[]; total: number }>(
'/admin/collect-log/raw',
{ params }
)
}
export default {}

@ -47,6 +47,10 @@
<el-icon><Link /></el-icon> <el-icon><Link /></el-icon>
<template #title>采集地址</template> <template #title>采集地址</template>
</el-menu-item> </el-menu-item>
<el-menu-item :index="menuPath('/collect-log')">
<el-icon><Notebook /></el-icon>
<template #title>采集日志</template>
</el-menu-item>
<el-menu-item :index="menuPath('/worker')"> <el-menu-item :index="menuPath('/worker')">
<el-icon><User /></el-icon> <el-icon><User /></el-icon>
<template #title>员工管理</template> <template #title>员工管理</template>
@ -100,7 +104,7 @@
import { ref, computed } from 'vue' import { ref, computed } from 'vue'
import { useRoute, useRouter } from 'vue-router' import { useRoute, useRouter } from 'vue-router'
import { ElMessageBox, ElMessage } from 'element-plus' import { ElMessageBox, ElMessage } from 'element-plus'
import { ArrowDown } from '@element-plus/icons-vue' import { ArrowDown, Notebook } from '@element-plus/icons-vue'
import { useMockMode } from '@/composables/useMockMode' import { useMockMode } from '@/composables/useMockMode'
const route = useRoute() const route = useRoute()

@ -26,6 +26,7 @@ const SettingsPage = () => import('@/views/settings/SettingsPage.vue')
const LogPage = () => import('@/views/log/LogPage.vue') const LogPage = () => import('@/views/log/LogPage.vue')
const ScreenConfigPage = () => import('@/views/screen-config/ScreenConfigPage.vue') const ScreenConfigPage = () => import('@/views/screen-config/ScreenConfigPage.vue')
const ScreenPage = () => import('@/views/screen/ScreenPage.vue') const ScreenPage = () => import('@/views/screen/ScreenPage.vue')
const CollectLogPage = () => import('@/views/collect-log/CollectLogPage.vue')
// 正常路由 // 正常路由
const normalRoutes: RouteRecordRaw[] = [ const normalRoutes: RouteRecordRaw[] = [
@ -43,6 +44,7 @@ const normalRoutes: RouteRecordRaw[] = [
{ path: 'brand/:id/edit', name: 'BrandEdit', component: BrandEditPage, meta: { title: '编辑品牌' } }, { path: 'brand/:id/edit', name: 'BrandEdit', component: BrandEditPage, meta: { title: '编辑品牌' } },
{ path: 'collect-address', name: 'CollectAddressList', component: CollectAddressListPage, meta: { title: '采集地址' } }, { path: 'collect-address', name: 'CollectAddressList', component: CollectAddressListPage, meta: { title: '采集地址' } },
{ path: 'collect-address/:id', name: 'CollectAddressDetail', component: CollectAddressDetailPage, meta: { title: '采集地址详情' } }, { path: 'collect-address/:id', name: 'CollectAddressDetail', component: CollectAddressDetailPage, meta: { title: '采集地址详情' } },
{ path: 'collect-log', name: 'CollectLog', component: CollectLogPage, meta: { title: '采集日志' } },
{ path: 'worker', name: 'WorkerList', component: WorkerListPage, meta: { title: '员工管理' } }, { path: 'worker', name: 'WorkerList', component: WorkerListPage, meta: { title: '员工管理' } },
{ path: 'worker/:id', name: 'WorkerDetail', component: WorkerDetailPage, meta: { title: '员工详情' } }, { path: 'worker/:id', name: 'WorkerDetail', component: WorkerDetailPage, meta: { title: '员工详情' } },
{ path: 'production', name: 'Production', component: ProductionPage, meta: { title: '产量报表' } }, { path: 'production', name: 'Production', component: ProductionPage, meta: { title: '产量报表' } },

@ -0,0 +1,389 @@
<template>
<div class="collect-log-page" style="padding: 16px 0">
<el-tabs v-model:active-name="activeTab" lazy>
<!-- 1) Analyses -->
<el-tab-pane label="分析记录" name="analysis">
<el-form :inline="true" class="mb-4" label-width="100px">
<el-form-item label="时间范围">
<el-date-picker v-model="query.dateRange" type="daterange" value-format="YYYY-MM-DD HH:mm:ss" range-separator="~" start-placeholder="" end-placeholder="" />
</el-form-item>
<el-form-item label="采集地址">
<el-select v-model="query.addressId" placeholder="全部" clearable style="min-width: 180px">
<el-option v-for="a in addressList" :key="a.id" :label="a.name" :value="a.id" />
</el-select>
</el-form-item>
<el-form-item label="机床">
<el-select v-model="query.machineId" placeholder="全部" clearable style="min-width: 140px">
<el-option v-for="m in machineList" :key="m.id" :label="m.name" :value="m.id" />
</el-select>
</el-form-item>
<el-form-item label="分析类型">
<el-select v-model="query.analysisType" placeholder="全部" clearable style="min-width: 180px">
<el-option v-for="(label, key) in analysisTypeOptions" :key="key" :label="label" :value="key" />
</el-select>
</el-form-item>
<el-form-item label="程序名">
<el-input v-model="query.programName" placeholder="请输入程序名" />
</el-form-item>
<el-form-item>
<el-button type="primary" @click="loadAnalysis"></el-button>
<el-button @click="resetAnalysis"></el-button>
</el-form-item>
</el-form>
<el-table :data="analysisList" border stripe v-loading="analysisLoading" style="width: 100%">
<el-table-column prop="analysisTime" label="分析时间" sortable width="170" />
<el-table-column label="采集地址" width="180">
<template #default="{ row }">{{ row.addressName || row.collectAddressId }}</template>
</el-table-column>
<el-table-column prop="machineName" label="机床" width="120" show-overflow-tooltip />
<el-table-column label="分析类型" align="center" width="120">
<template #default="{ row }">
<el-tag :type="analysisTypeTag(row.analysisType)" size="small">{{ analysisTypeLabel(row.analysisType) }}</el-tag>
</template>
</el-table-column>
<el-table-column prop="previousProgram" label="前程序" width="120" />
<el-table-column prop="currentProgram" label="当前程序" width="120" />
<el-table-column prop="partCountDelta" label="产量变化" width="110" />
<el-table-column prop="analysisSummary" label="摘要" show-overflow-tooltip />
<el-table-column label="操作" width="120" fixed="right" align="center">
<template #default="{ row }">
<el-button type="text" @click="viewAnalysis(row)"></el-button>
</template>
</el-table-column>
</el-table>
<el-pagination
v-model:current-page="page.page"
v-model:page-size="page.pageSize"
:page-sizes="[20, 50, 100]"
:total="page.total"
background
layout="total, sizes, prev, pager, next, jumper"
/>
<el-dialog v-model="analysisDetailVisible" title="分析详情" width="640px" destroy-on-close>
<el-descriptions :column="1" border>
<el-descriptions-item label="分析时间">{{ detailRow?.analysisTime }}</el-descriptions-item>
<el-descriptions-item label="采集地址">{{ detailRow?.addressName || detailRow?.collectAddressId }}</el-descriptions-item>
<el-descriptions-item label="机床">{{ detailRow?.machineName }}</el-descriptions-item>
<el-descriptions-item label="分析类型">{{ analysisTypeLabel(detailRow?.analysisType) }}</el-descriptions-item>
<el-descriptions-item label="前程序">{{ detailRow?.previousProgram }}</el-descriptions-item>
<el-descriptions-item label="当前程序">{{ detailRow?.currentProgram }}</el-descriptions-item>
<el-descriptions-item label="产量变化">{{ detailRow?.partCountDelta }}</el-descriptions-item>
<el-descriptions-item label="摘要">{{ detailRow?.analysisSummary }}</el-descriptions-item>
</el-descriptions>
<template #footer>
<el-button @click="analysisDetailVisible = false">关闭</el-button>
</template>
</el-dialog>
</el-tab-pane>
<!-- 2) Cycle -->
<el-tab-pane label="采集周期" name="cycle">
<el-form :inline="true" class="mb-4" label-width="100px">
<el-form-item label="时间范围">
<el-date-picker v-model="cycleQuery.dateRange" type="daterange" value-format="YYYY-MM-DD HH:mm:ss" range-separator="~" start-placeholder="" end-placeholder="" />
</el-form-item>
<el-form-item label="采集地址">
<el-select v-model="cycleQuery.addressId" placeholder="全部" clearable style="min-width: 180px">
<el-option v-for="a in addressList" :key="a.id" :label="a.name" :value="a.id" />
</el-select>
</el-form-item>
<el-form-item label="是否异常">
<el-select v-model="cycleQuery.hasAnomaly" placeholder="全部" clearable style="min-width: 120px">
<el-option label="全部" value="" />
<el-option label="有异常" value="1" />
<el-option label="无异常" value="0" />
</el-select>
</el-form-item>
<el-form-item>
<el-button type="primary" @click="loadCycles"></el-button>
<el-button @click="resetCycle"></el-button>
</el-form-item>
</el-form>
<el-table :data="cycleList" border stripe v-loading="cycleLoading" style="width: 100%">
<el-table-column prop="cycleTime" label="周期时间" width="170" />
<el-table-column label="采集地址" width="180">
<template #default="{ row }">{{ row.addressName || row.collectAddressId }}</template>
</el-table-column>
<el-table-column prop="totalMachines" label="总机床" width="120" />
<el-table-column prop="successCount" label="成功" width="80" />
<el-table-column prop="failCount" label="失败" width="80" />
<el-table-column prop="hasAnomaly" label="异常" width="80">
<template #default="{ row }">
<el-tag :type="row.hasAnomaly ? 'danger' : 'success'" size="small">{{ row.hasAnomaly ? '有' : '无' }}</el-tag>
</template>
</el-table-column>
<el-table-column prop="cycleSummary" label="摘要" show-overflow-tooltip />
<el-table-column label="操作" width="120" fixed="right" align="center">
<template #default="{ row }">
<el-button type="text" @click="viewCycle(row)"></el-button>
</template>
</el-table-column>
</el-table>
<el-pagination
v-model:current-page="cyclePage.page"
v-model:page-size="cyclePage.pageSize"
:page-sizes="[20, 50, 100]"
:total="cyclePage.total"
background
layout="total, sizes, prev, pager, next, jumper"
/>
</el-tab-pane>
<!-- 3) Raw -->
<el-tab-pane label="原始数据" name="raw">
<el-form :inline="true" class="mb-4" label-width="100px">
<el-form-item label="采集地址">
<el-select v-model="rawQuery.addressId" placeholder="全部" clearable style="min-width: 180px">
<el-option v-for="a in addressList" :key="a.id" :label="a.name" :value="a.id" />
</el-select>
</el-form-item>
<el-form-item label="时间范围">
<el-date-picker v-model="rawQuery.dateRange" type="daterange" value-format="YYYY-MM-DD HH:mm:ss" range-separator="~" start-placeholder="" end-placeholder="" />
</el-form-item>
<el-form-item>
<el-button type="primary" @click="loadRaw"></el-button>
<el-button @click="resetRaw"></el-button>
</el-form-item>
</el-form>
<el-table :data="rawList" border stripe v-loading="rawLoading" style="width: 100%">
<el-table-column prop="id" label="ID" width="80" />
<el-table-column prop="logTime" label="时间" width="170" />
<el-table-column prop="sourceAddress" label="地址" />
<el-table-column prop="contentPreview" label="内容摘要" show-overflow-tooltip />
</el-table>
<el-pagination
v-model:current-page="rawPage.page"
v-model:page-size="rawPage.pageSize"
:page-sizes="[20, 50, 100]"
:total="rawPage.total"
background
layout="total, sizes, prev, pager, next, jumper"
/>
</el-tab-pane>
</el-tabs>
</div>
</template>
<script setup lang="ts">
import { ref, reactive, onMounted, watch } from 'vue'
import { ElMessage } from 'element-plus'
import request from '@/utils/request'
import type { CollectAnalysis, CollectCycle, CollectRaw } from '@/api/collect-log'
import type { CollectAddress, Machine } from '@/types'
// Tab identification
const activeTab = ref<'analysis' | 'cycle' | 'raw'>('analysis')
//
const addressList = ref<CollectAddress[]>([])
const machineList = ref<Machine[]>([])
// --------------- Tab 1: Analyses ---------------
const analysisList = ref<CollectAnalysis[]>([])
const analysisLoading = ref(false)
const detailRow = ref<CollectAnalysis | null>(null)
const analysisDetailVisible = ref(false)
const page = reactive({ page: 1, pageSize: 20, total: 0 })
const query = reactive({ dateRange: null as string[] | null, addressId: undefined as number | undefined, machineId: undefined as number | undefined, analysisType: '', programName: '', keyword: '' })
const analysisTypeOptions: Record<string, string> = {
NORMAL_UNCHANGED: 'NORMAL_UNCHANGED',
PART_COUNT_INCREASE: 'PART_COUNT_INCREASE',
PROGRAM_SWITCH: 'PROGRAM_SWITCH',
MANUAL_RESET: 'MANUAL_RESET',
DEVICE_ONLINE: 'DEVICE_ONLINE',
DEVICE_OFFLINE: 'DEVICE_OFFLINE',
NEW_DEVICE_FOUND: 'NEW_DEVICE_FOUND',
DATA_ANOMALY: 'DATA_ANOMALY',
COLLECTION_FAILED: 'COLLECTION_FAILED'
}
function analysisTypeTag(type: string) {
const map: Record<string, string> = {
NORMAL_UNCHANGED: 'info',
PART_COUNT_INCREASE: 'success',
PROGRAM_SWITCH: 'warning',
MANUAL_RESET: 'warning',
DEVICE_ONLINE: 'success',
DEVICE_OFFLINE: 'danger',
NEW_DEVICE_FOUND: 'danger',
DATA_ANOMALY: 'danger',
COLLECTION_FAILED: 'danger',
}
return map[type] || 'info'
}
function analysisTypeLabel(type: string | undefined) {
const map: Record<string, string> = {
NORMAL_UNCHANGED: '正常未变',
PART_COUNT_INCREASE: '产量增减',
PROGRAM_SWITCH: '程序切换',
MANUAL_RESET: '手动重置',
DEVICE_ONLINE: '设备在线',
DEVICE_OFFLINE: '设备离线',
NEW_DEVICE_FOUND: '新设备发现',
DATA_ANOMALY: '数据异常',
COLLECTION_FAILED: '采集失败',
}
return type ? (map[type] ?? type) : '未知'
}
function viewAnalysis(row: CollectAnalysis) {
detailRow.value = row
analysisDetailVisible.value = true
}
async function loadAnalysis() {
analysisLoading.value = true
try {
const res = await request.get<{ items: CollectAnalysis[]; total: number }>("/admin/collect-log/analysis", {
params: {
page: page.page,
pageSize: page.pageSize,
dateRange: query.dateRange,
addressId: query.addressId,
machineId: query.machineId,
analysisType: query.analysisType,
programName: query.programName,
keyword: query.keyword,
},
})
analysisList.value = res.data?.items ?? []
page.total = res.data?.total ?? 0
} finally {
analysisLoading.value = false
}
}
function resetAnalysis() {
Object.assign(query, { dateRange: null, addressId: undefined, machineId: undefined, analysisType: '', programName: '', keyword: '' })
loadAnalysis()
}
//
watch(() => [page.page, page.pageSize], () => loadAnalysis())
// --------------- Tab 2: Cycles ---------------
const cycleList = ref<CollectCycle[]>([])
const cycleLoading = ref(false)
const cyclePage = reactive({ page: 1, pageSize: 20, total: 0 })
const cycleQuery = reactive({ dateRange: null as string[] | null, addressId: undefined as number | undefined, hasAnomaly: '' })
async function loadCycles() {
cycleLoading.value = true
try {
const res = await request.get<{ items: CollectCycle[]; total: number }>("/admin/collect-log/cycle", {
params: {
page: cyclePage.page,
pageSize: cyclePage.pageSize,
dateRange: cycleQuery.dateRange,
addressId: cycleQuery.addressId,
hasAnomaly: cycleQuery.hasAnomaly || undefined,
},
})
cycleList.value = res.data?.items ?? []
cyclePage.total = res.data?.total ?? 0
} finally {
cycleLoading.value = false
}
}
function resetCycle() {
Object.assign(cycleQuery, { dateRange: null, addressId: undefined, hasAnomaly: '' })
loadCycles()
}
function viewCycle(row: CollectCycle) {
//
ElMessage.info(`周期 ${row.cycleTime} 区间分析完成,共 ${row.totalMachines} 台机床`)
}
watch(() => cyclePage.page + cyclePage.pageSize, loadCycles)
//
onMounted(() => {
loadAnalysis()
loadCycles()
//
loadAddresses()
loadMachines()
})
// --------------- Tab 3: Raw ---------------
const rawList = ref<CollectRaw[]>([])
const rawLoading = ref(false)
const rawPage = reactive({ page: 1, pageSize: 20, total: 0 })
const rawQuery = reactive({ dateRange: null as string[] | null, addressId: undefined as number | undefined })
const rawURLList = ref<string[]>([])
async function loadRaw() {
rawLoading.value = true
try {
const res = await request.get<{ items: CollectRaw[]; total: number }>("/admin/collect-log/raw", {
params: {
page: rawPage.page,
pageSize: rawPage.pageSize,
dateRange: rawQuery.dateRange,
addressId: rawQuery.addressId,
},
})
rawList.value = res.data?.items ?? []
rawPage.total = res.data?.total ?? 0
} finally {
rawLoading.value = false
}
}
function resetRaw() {
Object.assign(rawQuery, { dateRange: null, addressId: undefined })
loadRaw()
}
onMounted(() => {
loadRaw()
})
// /
async function loadAddresses() {
try {
const r = await request.get<{ items: CollectAddress[] }>("/admin/collect-address/list")
if (r.data?.items && r.data.items.length > 0) {
addressList.value = r.data.items as CollectAddress[]
} else {
//
addressList.value = [
{ id: 1, name: 'FANUC-A栋', url: '', brandId: 0, brandName: '', interval: 60, isEnabled: true, lastCollectTime: '', machineCount: 8, failCount: 0 },
{ id: 2, name: 'FANUC-B栋', url: '', brandId: 0, brandName: '', interval: 60, isEnabled: true, lastCollectTime: '', machineCount: 6, failCount: 0 },
]
}
} catch {
addressList.value = []
}
}
async function loadMachines() {
try {
const r = await request.get<{ items: Machine[] }>('/admin/machine/list')
machineList.value = (r.data?.items as Machine[]) ?? []
} catch {
machineList.value = []
}
}
// -------------- Helpers --------------
// tag/label
</script>
<style scoped lang="scss">
.collect-log-page {
.mb-4 {
margin-bottom: 12px;
}
}
</style>

@ -37,6 +37,18 @@ namespace CncCollector.Config
[JsonProperty("dailySummaryTime")] [JsonProperty("dailySummaryTime")]
public string DailySummaryTime { get; set; } = "01:00"; public string DailySummaryTime { get; set; } = "01:00";
/// <summary>分析日志保留天数0=不删除)</summary>
public int AnalysisLogRetentionDays { get; set; } = 0;
/// <summary>周期日志保留天数0=不删除)</summary>
public int CycleLogRetentionDays { get; set; } = 0;
/// <summary>原始日志保留天数0=不删除)</summary>
public int RawLogRetentionDays { get; set; } = 0;
/// <summary>日志清理检查间隔(分钟)</summary>
public int LogCleanupIntervalMinutes { get; set; } = 60;
/// <summary>服务ID标识</summary> /// <summary>服务ID标识</summary>
[JsonProperty("serviceId")] [JsonProperty("serviceId")]
public string ServiceId { get; set; } = "collector-service"; public string ServiceId { get; set; } = "collector-service";

@ -1,4 +1,5 @@
using System; using System;
using CncCollector.Jobs;
using System.Collections.Concurrent; using System.Collections.Concurrent;
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading; using System.Threading;
@ -27,6 +28,8 @@ namespace CncCollector.Core
private Timer _heartbeatTimer; private Timer _heartbeatTimer;
private Timer _configPollTimer; private Timer _configPollTimer;
private Timer _dailySummaryTimer; private Timer _dailySummaryTimer;
private Timer _logCleanupTimer;
private LogCleanupJob _logCleanupJob;
private DateTime _startTime; private DateTime _startTime;
private long _totalSuccess; private long _totalSuccess;
private long _totalFail; private long _totalFail;
@ -89,6 +92,15 @@ namespace CncCollector.Core
_dailySummaryTimer = new Timer(OnDailySummaryCheck, null, _dailySummaryTimer = new Timer(OnDailySummaryCheck, null,
TimeSpan.FromMinutes(1), TimeSpan.FromMinutes(1)); TimeSpan.FromMinutes(1), TimeSpan.FromMinutes(1));
// 5. 启动日志清理定时器从配置读取间隔0 表示不启用)
_logCleanupJob = new LogCleanupJob(_config.LogConnection, _config);
if (_config.LogCleanupIntervalMinutes > 0)
{
_logCleanupTimer = new Timer(OnLogCleanup, null,
TimeSpan.FromMinutes(_config.LogCleanupIntervalMinutes),
TimeSpan.FromMinutes(_config.LogCleanupIntervalMinutes));
}
_log.Info($"===== 采集引擎已启动({_workers.Count}个采集地址)====="); _log.Info($"===== 采集引擎已启动({_workers.Count}个采集地址)=====");
} }
@ -116,6 +128,7 @@ namespace CncCollector.Core
_heartbeatTimer?.Dispose(); _heartbeatTimer?.Dispose();
_configPollTimer?.Dispose(); _configPollTimer?.Dispose();
_dailySummaryTimer?.Dispose(); _dailySummaryTimer?.Dispose();
_logCleanupTimer?.Dispose();
// 写入停止状态心跳 // 写入停止状态心跳
WriteHeartbeat("stopped"); WriteHeartbeat("stopped");
@ -332,5 +345,20 @@ namespace CncCollector.Core
_log.Error("日终汇总检查失败", ex); _log.Error("日终汇总检查失败", ex);
} }
} }
/// <summary>
/// 日志清理定时回调
/// </summary>
private void OnLogCleanup(object state)
{
try
{
_logCleanupJob?.Execute();
}
catch (Exception ex)
{
_log.Error("日志清理任务执行失败", ex);
}
}
} }
} }

@ -0,0 +1,84 @@
using System;
using Dapper;
using MySqlConnector;
using CncCollector.Config;
using log4net;
namespace CncCollector.Jobs
{
/// <summary>
/// 日志清理定时任务。
/// 根据配置的保留天数清理日志数据。保留天数=0表示不删除。
/// </summary>
public class LogCleanupJob
{
private static readonly ILog _log = LogManager.GetLogger(typeof(LogCleanupJob));
private readonly string _logConnection;
private readonly CollectorConfig _config;
public LogCleanupJob(string logConnection, CollectorConfig config)
{
_logConnection = logConnection;
_config = config;
}
/// <summary>
/// 执行日志清理
/// </summary>
public void Execute()
{
try
{
int total = 0;
using (var conn = new MySqlConnection(_logConnection))
{
// 1) 采集分析日志
int daysA = Math.Max(_config.AnalysisLogRetentionDays, 0);
if (daysA > 0)
{
string sqlA = $"DELETE FROM cnc_log.log_collect_analysis WHERE analysis_time < DATE_SUB(NOW(), INTERVAL {daysA} DAY)";
int del = conn.Execute(sqlA);
total += del;
_log.Info($"日志清理: log_collect_analysis 删除 {del} 行,保留 {daysA} 天");
}
// 2) 采集周期日志
int daysC = Math.Max(_config.CycleLogRetentionDays, 0);
if (daysC > 0)
{
string sqlC = $"DELETE FROM cnc_log.log_collect_cycle WHERE cycle_time < DATE_SUB(NOW(), INTERVAL {daysC} DAY)";
int del = conn.Execute(sqlC);
total += del;
_log.Info($"日志清理: log_collect_cycle 删除 {del} 行,保留 {daysC} 天");
}
// 3) 原始日志
int daysR = Math.Max(_config.RawLogRetentionDays, 0);
if (daysR > 0)
{
// 尝试使用 created_at 字段,如不存在再回退到 request_time
string sqlR = $"DELETE FROM cnc_log.log_collect_raw WHERE created_at < DATE_SUB(NOW(), INTERVAL {daysR} DAY)";
int del = 0;
try
{
del = conn.Execute(sqlR);
}
catch
{
string sqlR2 = $"DELETE FROM cnc_log.log_collect_raw WHERE request_time < DATE_SUB(NOW(), INTERVAL {daysR} DAY)";
del = conn.Execute(sqlR2);
}
total += del;
_log.Info($"日志清理: log_collect_raw 删除 {del} 行,保留 {daysR} 天");
}
}
_log.Info($"日志清理完成,总删除记录数: {total}");
}
catch (Exception ex)
{
_log.Error("执行日志清理任务失败", ex);
}
}
}
}

@ -19,6 +19,9 @@ namespace CncModels.Enum
/// <summary>未知设备</summary> /// <summary>未知设备</summary>
public const string UnknownDevice = "unknown_device"; public const string UnknownDevice = "unknown_device";
/// <summary>数据异常</summary>
public const string DataAnomaly = "data_anomaly";
/// <summary>服务错误</summary> /// <summary>服务错误</summary>
public const string ServiceError = "service_error"; public const string ServiceError = "service_error";
} }

@ -0,0 +1,29 @@
using System.Text.Json;
using Xunit;
using CncService.LogAnalyzer;
using CncService.Models;
namespace CncService.Tests
{
public class LogSerializationTests
{
[Fact]
public void LogAnalysisResult_Serialize_ToJson_Includes_Summary()
{
// Arrange
var analysis = new LogAnalysisResult
{
Summary = "New log entry analyzed: no changes",
DetailsJson = "{\"change\":false}",
Confidence = 0.92
};
// Act
var json = JsonSerializer.Serialize(analysis);
// Assert
Assert.Contains("Summary", json);
Assert.Contains("New log entry analyzed", json);
}
}
}

@ -0,0 +1,33 @@
import { describe, it, expect } from 'vitest'
type LogItem = {
id: string
timestamp: string
machineId: string
programName: string
level: string
message: string
}
type DashboardData = {
total: number
counts: Record<string, number>
logs: LogItem[]
analysis?: string
}
describe('日志看板数据结构', () => {
it('应包含 logs、counts、total 字段且类型正确', () => {
const sample: DashboardData = {
total: 5,
counts: { ERROR: 1, INFO: 4 },
logs: [
{ id: 'l1', timestamp: '2026-05-01T12:00:00Z', machineId: 'M1', programName: 'ProgA', level: 'ERROR', message: 'Something failed' }
],
analysis: '最近一次采集无显著趋势'
}
expect(sample).toHaveProperty('logs')
expect(sample).toHaveProperty('counts')
expect(typeof sample.total).toBe('number')
})
})

@ -0,0 +1,11 @@
测试用例草案
- 用例1分区创建与存在性
- 执行 partitioned_logs.sql确认创建表与分区存在
- 用例2写入分区数据及分析摘要
- 插入若干行数据日志时间分布在不同月份验证数据写入到了相应分区analysis_summary 非空
- 用例3API 查询最新日志及分析摘要
- 调用 API 获取最近日志,校验字段 presence
- 用例4分区查询性能
- 针对历史月份的日志执行查询,验证分区裁剪效果
- 依赖MariaDB 实例、API 服务的可用端点
- 评估标准分区创建成功数据写入正确对应分区检索正确API 返回最新摘要
Loading…
Cancel
Save