From 7d9634af4869f58965515ec9c57517183f1fb888 Mon Sep 17 00:00:00 2001 From: haoliang <821644@qq.com> Date: Tue, 5 May 2026 17:14:16 +0800 Subject: [PATCH] =?UTF-8?q?feat(=E9=87=87=E9=9B=86=E6=97=A5=E5=BF=97):=20?= =?UTF-8?q?=E6=96=B0=E5=A2=9E=E5=89=8D=E7=AB=AF=E5=AE=9E=E7=8E=B0=EF=BC=8C?= =?UTF-8?q?=E5=8C=85=E6=8B=AC=20API=20=E5=B0=81=E8=A3=85=E3=80=81Mock=20?= =?UTF-8?q?=E6=95=B0=E6=8D=AE=E3=80=81Vue=20=E9=A1=B5=E9=9D=A2=E3=80=81?= =?UTF-8?q?=E8=B7=AF=E7=94=B1=E5=92=8C=E8=8F=9C=E5=8D=95=EF=BC=9B=E6=96=B0?= =?UTF-8?q?=E5=A2=9E=20CollectLog=20=E9=A1=B5=E9=9D=A2=E7=BB=84=E4=BB=B6?= =?UTF-8?q?=E3=80=81Mock=20=E6=95=B0=E6=8D=AE=E3=80=81API=20=E6=8E=A5?= =?UTF-8?q?=E5=8F=A3=EF=BC=8C=E4=BB=A5=E5=8F=8A=E8=B7=AF=E7=94=B1=E4=B8=8E?= =?UTF-8?q?=E4=BE=A7=E8=BE=B9=E6=A0=8F=E9=9B=86=E6=88=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CncService/ILogIngestionService.cs | 16 + CncService/LogAnalyzer/LogAnalysisResult.cs | 10 + CncService/Models/LogIngestionResult.cs | 9 + CncService/Models/LogRecord.cs | 16 + .../Controllers/LogIngestionController.cs | 67 +++ database/DDL/Collect_Log.md | 20 + .../sqls/04-auto-partition-and-cleanup.sql | 108 +++++ database/sqls/partitioned_logs.sql | 40 ++ .../01-采集日志/日志分表与分析设计.md | 72 ++++ frontend/mock/collect-log.ts | 82 ++++ frontend/src/api/collect-log.ts | 94 +++++ frontend/src/layouts/AdminLayout.vue | 6 +- frontend/src/router/index.ts | 2 + .../src/views/collect-log/CollectLogPage.vue | 389 ++++++++++++++++++ src/CncCollector/Config/CollectorConfig.cs | 12 + src/CncCollector/Core/CollectorEngine.cs | 28 ++ src/CncCollector/Jobs/LogCleanupJob.cs | 84 ++++ src/CncModels/Enum/AlertType.cs | 3 + .../CncService.Tests/LogSerializationTests.cs | 29 ++ tests/LogsDashboard.test.ts | 33 ++ tests/partitioned_logs_tests.md | 11 + 21 files changed, 1130 insertions(+), 1 deletion(-) create mode 100644 CncService/ILogIngestionService.cs create mode 100644 CncService/LogAnalyzer/LogAnalysisResult.cs create mode 100644 CncService/Models/LogIngestionResult.cs create mode 100644 CncService/Models/LogRecord.cs create mode 100644 CncWebApi/Controllers/LogIngestionController.cs create mode 100644 database/DDL/Collect_Log.md create mode 100644 database/sqls/04-auto-partition-and-cleanup.sql create mode 100644 database/sqls/partitioned_logs.sql create mode 100644 docs/02-功能清单/01-采集日志/日志分表与分析设计.md create mode 100644 frontend/mock/collect-log.ts create mode 100644 frontend/src/api/collect-log.ts create mode 100644 frontend/src/views/collect-log/CollectLogPage.vue create mode 100644 src/CncCollector/Jobs/LogCleanupJob.cs create mode 100644 tests/CncService.Tests/LogSerializationTests.cs create mode 100644 tests/LogsDashboard.test.ts create mode 100644 tests/partitioned_logs_tests.md diff --git a/CncService/ILogIngestionService.cs b/CncService/ILogIngestionService.cs new file mode 100644 index 0000000..abd7403 --- /dev/null +++ b/CncService/ILogIngestionService.cs @@ -0,0 +1,16 @@ +using System.Threading.Tasks; +using CncService.LogAnalyzer; +using CncService.Models; + +namespace CncService +{ + // 扩展日志写入与分析结果传回接口,供分区日志写入及分析摘要能力使用 + public interface ILogIngestionService + { + // 写入采集日志及其分析摘要,返回写入是否成功 + Task WriteLogAsync(LogRecord record, LogAnalysisResult analysis); + + // 读取最新一条日志及其分析摘要(用于后台看板等场景的快速查询示例) + Task GetLatestLogAsync(string machineId, string programName); + } +} diff --git a/CncService/LogAnalyzer/LogAnalysisResult.cs b/CncService/LogAnalyzer/LogAnalysisResult.cs new file mode 100644 index 0000000..bede13a --- /dev/null +++ b/CncService/LogAnalyzer/LogAnalysisResult.cs @@ -0,0 +1,10 @@ +namespace CncService.LogAnalyzer +{ + // 解析结果模型,供日志分析摘要使用 + public class LogAnalysisResult + { + public string Summary { get; set; } // 摘要文本 + public string DetailsJson { get; set; } // 详细信息(JSON 字符串) + public double Confidence { get; set; } // 可信度(0-1) + } +} diff --git a/CncService/Models/LogIngestionResult.cs b/CncService/Models/LogIngestionResult.cs new file mode 100644 index 0000000..7499c82 --- /dev/null +++ b/CncService/Models/LogIngestionResult.cs @@ -0,0 +1,9 @@ +namespace CncService.Models +{ + // Minimal result wrapper for latest log fetch + public class LogIngestionResult + { + public long LogId { get; set; } + public string Message { get; set; } + } +} diff --git a/CncService/Models/LogRecord.cs b/CncService/Models/LogRecord.cs new file mode 100644 index 0000000..1bef61c --- /dev/null +++ b/CncService/Models/LogRecord.cs @@ -0,0 +1,16 @@ +using System; + +namespace CncService.Models +{ + // Represents a raw log entry captured by the ingestion service + public class LogRecord + { + public long LogId { get; set; } + public string MachineId { get; set; } + public string ProgramName { get; set; } + public DateTime LogTime { get; set; } + public string Action { get; set; } + public string Result { get; set; } + public string RawData { get; set; } + } +} diff --git a/CncWebApi/Controllers/LogIngestionController.cs b/CncWebApi/Controllers/LogIngestionController.cs new file mode 100644 index 0000000..db31df3 --- /dev/null +++ b/CncWebApi/Controllers/LogIngestionController.cs @@ -0,0 +1,67 @@ +using System; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Mvc; +using CncService; +using CncService.Models; +using CncService.LogAnalyzer; + +namespace CncWebApi.Controllers +{ + [ApiController] + [Route("api/[controller]")] + public class LogIngestionController : ControllerBase + { + private readonly ILogIngestionService _logIngestionService; + + public LogIngestionController(ILogIngestionService logIngestionService) + { + _logIngestionService = logIngestionService; + } + + [HttpPost("ingest")] + public async Task Ingest([FromBody] LogIngestionRequest request) + { + if (request == null) + return BadRequest("请求为空"); + + var record = new LogRecord + { + LogId = request.LogId, + MachineId = request.MachineId, + ProgramName = request.ProgramName, + LogTime = request.LogTime ?? DateTime.UtcNow, + Action = request.Action, + Result = request.Result, + RawData = request.RawData + }; + + var analysis = new LogAnalysisResult + { + Summary = request.AnalysisSummary, + DetailsJson = request.DetailsJson, + Confidence = request.Confidence + }; + + var ok = await _logIngestionService.WriteLogAsync(record, analysis); + if (ok) + { + return Ok(new { success = true, logId = record.LogId, analysisSummary = analysis.Summary }); + } + return StatusCode(500, new { success = false, message = "写入失败" }); + } + } + + public class LogIngestionRequest + { + public long LogId { get; set; } + public string MachineId { get; set; } + public string ProgramName { get; set; } + public DateTime? LogTime { get; set; } + public string Action { get; set; } + public string Result { get; set; } + public string RawData { get; set; } + public string AnalysisSummary { get; set; } + public string DetailsJson { get; set; } + public double? Confidence { get; set; } + } +} diff --git a/database/DDL/Collect_Log.md b/database/DDL/Collect_Log.md new file mode 100644 index 0000000..ad0bde3 --- /dev/null +++ b/database/DDL/Collect_Log.md @@ -0,0 +1,20 @@ +# Collect_Log 表设计与索引 + +- 目标:支持高并发日志写入,便于日后按月分区查询与分析。 +- 主键:LogId BIGINT AUTO_INCREMENT +- 时间字段:LogTime DATETIME,作为分区键 +- 其他字段示例: + - MachineId VARCHAR(64) + - ProgramName VARCHAR(128) + - Action VARCHAR(32) -- 例如 INSERT/UPDATE/DELETE 或自定义动作 + - Result VARCHAR(32) -- 新增/无变化/替换加工程序等结果标签 + - RawData JSON -- 原始日志片段 ++ - AnalysisSummary JSON -- 分析摘要(由 LogAnalyzer 产出) + +- 索引设计: + - INDEX idx_logtime(LogTime) + - INDEX idx_machine_program(MachineId, ProgramName, LogTime) + - FULLTEXT INDEX for JSON fields (若 MariaDB 版本支持,按需启用) + +- 分区设计概念:按月 RANGE COLUMNS(LogTime) Partition 名分区如 p2024m01, p2024m02 等。 +- 注意:在初始版本中,完整分区脚本需要根据实际 MariaDB 版本做微调。 diff --git a/database/sqls/04-auto-partition-and-cleanup.sql b/database/sqls/04-auto-partition-and-cleanup.sql new file mode 100644 index 0000000..3aae5a1 --- /dev/null +++ b/database/sqls/04-auto-partition-and-cleanup.sql @@ -0,0 +1,108 @@ +-- ============================================================ +-- 自动分区与日志清理(幂等) +-- 1) 分区管理表 log_partition_tracker +-- 2) 存储过程 sp_ensure_partitions +-- 3) 存储过程 sp_check_partitions +-- 4) MariaDB 事件 ev_ensure_partitions +-- 注意:本脚本设计为幂等,重复执行不会重复创建分区 +-- ============================================================ + +USE cnc_log; + +-- 1. 分区追踪表 +CREATE TABLE IF NOT EXISTS log_partition_tracker ( + table_name VARCHAR(100) NOT NULL, + partition_name VARCHAR(50) NOT NULL, + partition_value VARCHAR(30) NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (table_name, partition_name) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci + COMMENT='分区管理追踪表'; + +-- 2. 自动分区存储过程 +DELIMITER $$ +DROP PROCEDURE IF EXISTS sp_ensure_partitions$$ +CREATE PROCEDURE sp_ensure_partitions() +BEGIN + -- 当前月的第一天 + SET @base := DATE_FORMAT(CURDATE(), '%Y-%m-01'); + SET @d1 := DATE_ADD(@base, INTERVAL 1 MONTH); + SET @d2 := DATE_ADD(@base, INTERVAL 2 MONTH); + SET @p1 := CONCAT('p', DATE_FORMAT(@d1, '%Y%m')); + SET @p2 := CONCAT('p', DATE_FORMAT(@d2, '%Y%m')); + + -- 对 log_collect_analysis 表分区 + IF NOT EXISTS (SELECT 1 FROM information_schema.PARTITIONS + WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_analysis' AND PARTITION_NAME = @p1) THEN + SET @dead1 := DATE_FORMAT(@d1, '%Y-%m-01'); + SET @sql := CONCAT('ALTER TABLE cnc_log.log_collect_analysis ADD PARTITION (PARTITION ', @p1, + ' VALUES LESS THAN (TO_DAYS(', '''', @dead1, '''', '))'); + PREPARE stmt FROM @sql; EXECUTE stmt; DEALLOCATE PREPARE stmt; + INSERT IGNORE INTO log_partition_tracker(table_name, partition_name, partition_value) VALUES ('log_collect_analysis', @p1, @dead1); + END IF; + + IF NOT EXISTS (SELECT 1 FROM information_schema.PARTITIONS + WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_analysis' AND PARTITION_NAME = @p2) THEN + SET @dead2 := DATE_FORMAT(@d2, '%Y-%m-01'); + SET @sql := CONCAT('ALTER TABLE cnc_log.log_collect_analysis ADD PARTITION (PARTITION ', @p2, + ' VALUES LESS THAN (TO_DAYS(', '''', @dead2, '''', '))'); + PREPARE stmt FROM @sql; EXECUTE stmt; DEALLOCATE PREPARE stmt; + INSERT IGNORE INTO log_partition_tracker(table_name, partition_name, partition_value) VALUES ('log_collect_analysis', @p2, @dead2); + END IF; + + -- 对 log_collect_cycle 表分区 + IF NOT EXISTS (SELECT 1 FROM information_schema.PARTITIONS + WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_cycle' AND PARTITION_NAME = @p1) THEN + SET @dead1 := DATE_FORMAT(@d1, '%Y-%m-01'); + SET @sql := CONCAT('ALTER TABLE cnc_log.log_collect_cycle ADD PARTITION (PARTITION ', @p1, + ' VALUES LESS THAN (TO_DAYS(', '''', @dead1, '''', '))'); + PREPARE stmt FROM @sql; EXECUTE stmt; DEALLOCATE PREPARE stmt; + INSERT IGNORE INTO log_partition_tracker(table_name, partition_name, partition_value) VALUES ('log_collect_cycle', @p1, @dead1); + END IF; + + IF NOT EXISTS (SELECT 1 FROM information_schema.PARTITIONS + WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_cycle' AND PARTITION_NAME = @p2) THEN + SET @dead2 := DATE_FORMAT(@d2, '%Y-%m-01'); + SET @sql := CONCAT('ALTER TABLE cnc_log.log_collect_cycle ADD PARTITION (PARTITION ', @p2, + ' VALUES LESS THAN (TO_DAYS(', '''', @dead2, '''', '))'); + PREPARE stmt FROM @sql; EXECUTE stmt; DEALLOCATE PREPARE stmt; + INSERT IGNORE INTO log_partition_tracker(table_name, partition_name, partition_value) VALUES ('log_collect_cycle', @p2, @dead2); + END IF; +END$$ +DELIMITER ; + +-- 3. 分区检查存储过程 +DELIMITER $$ +DROP PROCEDURE IF EXISTS sp_check_partitions$$ +CREATE PROCEDURE sp_check_partitions() +BEGIN + -- 计算未来两月分区名是否存在 + SET @base := DATE_FORMAT(CURDATE(), '%Y-%m-01'); + SET @d1 := DATE_ADD(@base, INTERVAL 1 MONTH); + SET @d2 := DATE_ADD(@base, INTERVAL 2 MONTH); + SET @p1 := CONCAT('p', DATE_FORMAT(@d1, '%Y%m')); + SET @p2 := CONCAT('p', DATE_FORMAT(@d2, '%Y%m')); + + SET @need := 0; + IF (SELECT COUNT(*) FROM information_schema.PARTITIONS WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_analysis' AND PARTITION_NAME = @p1) = 0 THEN SET @need = 1; END IF; + IF (SELECT COUNT(*) FROM information_schema.PARTITIONS WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_cycle' AND PARTITION_NAME = @p1) = 0 THEN SET @need = 1; END IF; + IF (SELECT COUNT(*) FROM information_schema.PARTITIONS WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_analysis' AND PARTITION_NAME = @p2) = 0 THEN SET @need = 1; END IF; + IF (SELECT COUNT(*) FROM information_schema.PARTITIONS WHERE TABLE_SCHEMA = 'cnc_log' AND TABLE_NAME = 'log_collect_cycle' AND PARTITION_NAME = @p2) = 0 THEN SET @need = 1; END IF; + + IF @need = 1 THEN + CALL sp_ensure_partitions(); + END IF; + + SELECT @need AS need_partition_creation; +END$$ +DELIMITER ; + +-- 4. MariaDB 事件:每月1日凌晨2:00执行 sp_check_partitions +SET GLOBAL event_scheduler = ON; +DROP EVENT IF EXISTS ev_ensure_partitions; +CREATE EVENT IF NOT EXISTS ev_ensure_partitions +ON SCHEDULE + EVERY 1 MONTH +STARTS TIMESTAMP '2026-06-01 02:00:00' +DO + CALL sp_check_partitions(); diff --git a/database/sqls/partitioned_logs.sql b/database/sqls/partitioned_logs.sql new file mode 100644 index 0000000..d3e408f --- /dev/null +++ b/database/sqls/partitioned_logs.sql @@ -0,0 +1,40 @@ +-- Partitioned logs table draft +-- 目标:按月分区日志表,提升写入吞吐和查询历史的性能 +-- 说明:本草案为初步设计,待评审后落地实现 +-- Assumptions: +-- - MariaDB 10.x 版本,支持分区按 RANGE (TO_DAYS(log_time)) +-- - 日志字段与现有采集日志表接近 +-- - 每月一个分区,覆盖历史数据的归档策略待定 +DROP TABLE IF EXISTS logs_partitioned; +CREATE TABLE logs_partitioned ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + machine_id INT NOT NULL, + program_name VARCHAR(128) NOT NULL, + log_time DATETIME NOT NULL, + log_level VARCHAR(16) DEFAULT 'INFO', + raw_payload JSON, + analysis_summary TEXT, + analysis_version VARCHAR(64) DEFAULT 'v1', + -- 便于按机床与时间筛选的组合索引 + KEY idx_machine_time (machine_id, log_time), + KEY idx_program_time (program_name, log_time) +) +PARTITION BY RANGE (TO_DAYS(log_time)) ( + PARTITION p202401 VALUES LESS THAN (TO_DAYS('2024-02-01')), + PARTITION p202402 VALUES LESS THAN (TO_DAYS('2024-03-01')), + PARTITION p202403 VALUES LESS THAN (TO_DAYS('2024-04-01')), + PARTITION p202404 VALUES LESS THAN (TO_DAYS('2024-05-01')), + PARTITION p202405 VALUES LESS THAN (TO_DAYS('2024-06-01')), + PARTITION p202406 VALUES LESS THAN (TO_DAYS('2024-07-01')), + PARTITION p202407 VALUES LESS THAN (TO_DAYS('2024-08-01')), + PARTITION p202408 VALUES LESS THAN (TO_DAYS('2024-09-01')), + PARTITION p202409 VALUES LESS THAN (TO_DAYS('2024-10-01')), + PARTITION p202410 VALUES LESS THAN (TO_DAYS('2024-11-01')), + PARTITION p202411 VALUES LESS THAN (TO_DAYS('2024-12-01')), + PARTITION p202412 VALUES LESS THAN (TO_DAYS('2025-01-01')), + PARTITION p202501 VALUES LESS THAN (TO_DAYS('2025-02-01')) +); + +-- 备注: +- 未来月份的分区建议通过定期执行脚本自动追加分区 +- 可以通过 ALTER TABLE logs_partitioned REORGANIZE PARTITION ...? 进行滚动归档 diff --git a/docs/02-功能清单/01-采集日志/日志分表与分析设计.md b/docs/02-功能清单/01-采集日志/日志分表与分析设计.md new file mode 100644 index 0000000..d695e81 --- /dev/null +++ b/docs/02-功能清单/01-采集日志/日志分表与分析设计.md @@ -0,0 +1,72 @@ +# 日志分表与分析设计(草案) + +## 目标与范围 +- 对采集日志实现按月分区写入,提升写入吞吐和查询历史的性能。 +- 提供可查询的分析摘要字段,便于后台看板展示本次采集及对比分析。 +- 不引入新的依赖,不改变现有接口接口风格,确保向后兼容。 + +## 设计原则 +- 高并发写入:分区写入尽量避免锁争用,分区表应有合理的索引覆盖查询条件。 +- 易维护:分区边界需要可扩展,提供脚本自动创建未来分区的能力。 +- 可观测:数据结构中包括分析摘要字段,便于 API 与前端直接展示。 +- 兼容性:尽量复用现有字段名与数据类型,避免大规模重构。 + +## 目标表设计(草案) +- 新增分区表 logs_partitioned,字段如下: + - id BIGINT 自增主键 + - machine_id INT:机床唯一标识 + - program_name VARCHAR(128):加工程序名 + - log_time DATETIME:日志时间点 + - log_level VARCHAR(16):日志等级,默认 INFO + - raw_payload JSON:原始日志数据 + - analysis_summary TEXT:本次采集的分析摘要(可追溯、可回放) + - analysis_version VARCHAR(64):分析逻辑版本 + - 索引:idx_machine_time(machine_id, log_time)、idx_program_time(program_name, log_time) +- 分区:PARTITION BY RANGE (TO_DAYS(log_time)) +- 示例分区:p202401, p202402, ..., p202501(按月份边界) + +## 分区键与分区策略 +- 使用 LOG_TIME 的日期维度进行分区:TO_DAYS(log_time) 作为分区区间值。 +- 分区命名建议:按 yyyyMM 命名,如 p202401、p202402,以便直观查看。 +- 初始覆盖期:从系统落地起,覆盖过去 24 个月及未来 12 个月的分区。 +- 未来分区维护:提供周期性脚本( monthly_partition_maintenance.sql )来创建新月份分区。 + +## 分区维护脚本(草案) +- 提供简单的迁移脚本 skeleton,示例位于 database/sqls/partitioned_logs.sql 的分区创建段。 +- 未来可将分区维护封装成 SQL store 程序或外部脚本(bash/python),自动按月扩容。 +- 维护内容包括:创建新的分区、对旧分区归档/归档策略,及对相关日志表的清理策略。 + +## 数据分析字段与 API 将暴露的摘要 +- analysis_summary 字段存放本次采集的要点、差异、以及可能的异常记录。 +- 通过 API 提供最新采集日志及其分析摘要,便于前端看板展示与对比。 +- 日志写入路径保持向后兼容:原有原始日志字段保留,新增分析字段仅供访问。 + +## API/前端对接要点 +- 后端应提供查询接口: + - 根据 machine_id、时间范围筛选日志 + - 返回最新采集日志及分析摘要 +- 前端看板要显示: + - 最新日志时间、机器、程序、分析摘要要点 + - 与历史时间点对比的分析摘要对比信息 + +## 验证与测试计划(草案) +- 基础验证:分区表创建是否成功、是否能够写入数据、是否能查询到分区信息。 +- 功能验证: + - 日志写入时附带 analysis_summary 字段 + - API 能返回最新采集日志及分析摘要 +- 性能/压力测试:在高并发写入情况下分区表的锁争用情况、查询历史时的响应时间。 +- 回归测试:现有日志写入路径不受影响,现有看板字段仍可访问 + +## 后续工作与风险 +- 风险:分区设计对现有 ORM/DAO 层的影响,旧查询路径需兼容。 +- 后续:与前端看板字段对齐、以及归档/清理策略的落地实现。 + +### 草案作者:CI 项目组 +### 审核日期:2026-05 + +## 看板草案设计摘要(日志看板) +- 目标:展示最近采集日志、分析摘要,以及提供筛选入口,便于运维与分析人员快速定位问题。 +- 数据字段:日志时间戳、机床ID、加工程序名、日志等级、日志摘要。以及可选的分析摘要文本。 +- 后端端点草案:GET /api/logs/dashboard,返回数据结构包含最近日志、等级分布、总条数和可展示的分析摘要。 +- 前端展示要点:顶部筛选区、摘要统计、最近日志表格、日志摘要截断预览。 +- 验证要点:前端路由可打开,后端接口能返回结构化数据,字段与前端模板对齐。 diff --git a/frontend/mock/collect-log.ts b/frontend/mock/collect-log.ts new file mode 100644 index 0000000..845dd8b --- /dev/null +++ b/frontend/mock/collect-log.ts @@ -0,0 +1,82 @@ +/** Mock 数据:采集日志模块 + * 参考 alert.ts 的结构,提供 5 种端点的 Mock 数据 + */ +import type { MockMethod } from './types' + +interface CollectAnalysis { + id: number + analysisTime: string + collectAddressId: number + addressName?: string + machineId: number + machineName?: string + analysisType: string + previousProgram?: string + currentProgram?: string + partCountDelta?: number + analysisSummary?: string +} + +interface CollectCycle { + id: number + cycleTime: string + collectAddressId: number + addressName?: string + totalMachines: number + successCount: number + failCount: number + hasAnomaly: number + changeDistribution?: string + cycleSummary?: string +} + +interface CollectRaw { + id: number + logTime: string + sourceAddress?: string + contentPreview?: string +} + +const analyses: CollectAnalysis[] = [ + { id: 1, analysisTime: '2026-05-05 10:30:00', collectAddressId: 1, addressName: 'FANUC-A栋', machineId: 1, machineName: '西-1.8', analysisType: 'NORMAL_UNCHANGED', previousProgram: 'O001', currentProgram: 'O002', partCountDelta: 0, analysisSummary: 'O001 → O002 程序切换后无产量变化' }, + { id: 2, analysisTime: '2026-05-05 11:15:00', collectAddressId: 1, addressName: 'FANUC-A栋', machineId: 2, machineName: '西-1.10', analysisType: 'PART_COUNT_INCREASE', previousProgram: 'O003', currentProgram: 'O004', partCountDelta: 25, analysisSummary: '产量增加,来自新作业' }, + { id: 3, analysisTime: '2026-05-05 12:05:00', collectAddressId: 2, addressName: 'FANUC-B栋', machineId: 3, machineName: '西-2.1', analysisType: 'PROGRAM_SWITCH', previousProgram: 'M5', currentProgram: 'M6', partCountDelta: -5, analysisSummary: '切换程序导致产量略降' }, + { id: 4, analysisTime: '2026-05-05 12:30:00', collectAddressId: 3, addressName: 'FANUC-C栋', machineId: 4, machineName: '东-3.2', analysisType: 'DEVICE_ONLINE', previousProgram: 'P10', currentProgram: 'P10', partCountDelta: 0, analysisSummary: '设备在线,正常运行' }, + { id: 5, analysisTime: '2026-05-05 13:01:00', collectAddressId: 1, addressName: 'FANUC-A栋', machineId: 1, machineName: '西-1.8', analysisType: 'DATA_ANOMALY', previousProgram: 'O001', currentProgram: 'O001', partCountDelta: 0, analysisSummary: '检测到产量异常,需人工复核' }, + { id: 6, analysisTime: '2026-05-05 14:22:00', collectAddressId: 2, addressName: 'FANUC-B栋', machineId: 6, machineName: '西-2.6', analysisType: 'COLLECTION_FAILED', previousProgram: 'O010', currentProgram: 'O010', partCountDelta: 0, analysisSummary: '日志采集失败' }, + { id: 7, analysisTime: '2026-05-05 15:40:00', collectAddressId: 2, addressName: 'FANUC-B栋', machineId: 7, machineName: '西-2.7', analysisType: 'NEW_DEVICE_FOUND', previousProgram: 'O222', currentProgram: 'O223', partCountDelta: 0, analysisSummary: '发现新设备并加入采集' }, + { id: 8, analysisTime: '2026-05-05 16:12:00', collectAddressId: 3, addressName: 'FANUC-C栋', machineId: 8, machineName: '东-3.4', analysisType: 'MANUAL_RESET', previousProgram: 'N/A', currentProgram: 'N/A', partCountDelta: 0, analysisSummary: '管理员手动重置状态' }, +] + +const cycles: CollectCycle[] = [ + { id: 1, cycleTime: '2026-05-05 10:30:00', collectAddressId: 1, addressName: 'FANUC-A栋', totalMachines: 8, successCount: 7, failCount: 1, hasAnomaly: 0, changeDistribution: '{"PROGRAM_SWITCH":2,"PART_COUNT_INCREASE":3,"NORMAL_UNCHANGED":3}', cycleSummary: '共8台机床完成分析' }, + { id: 2, cycleTime: '2026-05-05 11:30:00', collectAddressId: 1, addressName: 'FANUC-A栋', totalMachines: 8, successCount: 8, failCount: 0, hasAnomaly: 0, changeDistribution: '{"PROGRAM_SWITCH":0,"PART_COUNT_INCREASE":0,"NORMAL_UNCHANGED":8}', cycleSummary: '稳定分析周期' }, + { id: 3, cycleTime: '2026-05-05 13:00:00', collectAddressId: 2, addressName: 'FANUC-B栋', totalMachines: 5, successCount: 4, failCount: 1, hasAnomaly: 1, changeDistribution: '{"DATA_ANOMALY":1}', cycleSummary: '存在数据异常' }, + { id: 4, cycleTime: '2026-05-05 14:40:00', collectAddressId: 3, addressName: 'FANUC-C栋', totalMachines: 6, successCount: 6, failCount: 0, hasAnomaly: 0, cycleSummary: '全部机床完成' }, + { id: 5, cycleTime: '2026-05-05 15:20:00', collectAddressId: 1, addressName: 'FANUC-A栋', totalMachines: 8, successCount: 7, failCount: 1, hasAnomaly: 0, cycleSummary: '混合情况' }, +] + +const raws: CollectRaw[] = [ + { id: 1, logTime: '2026-05-05 10:28:12', sourceAddress: 'FANUC-A栋', contentPreview: '{"a":1,"b":2}' }, + { id: 2, logTime: '2026-05-05 11:29:45', sourceAddress: 'FANUC-B栋', contentPreview: '{"c":3,"d":4}' }, + { id: 3, logTime: '2026-05-05 12:31:02', sourceAddress: 'FANUC-C栋', contentPreview: '{"x":9,"y":8}' }, + { id: 4, logTime: '2026-05-05 13:45:10', sourceAddress: 'FANUC-A栋', contentPreview: '{"m":5}' }, + { id: 5, logTime: '2026-05-05 14:05:33', sourceAddress: 'FANUC-B栋', contentPreview: '{"n":6}' }, +] + +const mock: MockMethod[] = [ + { url: '/mock-api/admin/collect-log/analysis', method: 'get', response: () => ({ code: 0, data: { items: analyses, total: analyses.length, page: 1, pageSize: 20 } }) }, + { url: '/mock-api/admin/collect-log/analysis/:id', method: 'get', response: (req) => { + const id = Number(req.params.id) + const item = analyses.find(a => a.id === id) + return { code: 0, data: item || {} } + } }, + { url: '/mock-api/admin/collect-log/analysis/by-raw/:rawLogId', method: 'get', response: (req) => { + // 简单模拟:返回全部分析供查看关联 + return { code: 0, data: { items: analyses } } + } }, + { url: '/mock-api/admin/collect-log/cycle', method: 'get', response: () => ({ code: 0, data: { items: cycles, total: cycles.length, page: 1, pageSize: 20 } }) }, + { url: '/mock-api/admin/collect-log/raw', method: 'get', response: () => ({ code: 0, data: { items: raws, total: raws.length, page: 1, pageSize: 20 } }) }, +] + +export default mock diff --git a/frontend/src/api/collect-log.ts b/frontend/src/api/collect-log.ts new file mode 100644 index 0000000..a68a03e --- /dev/null +++ b/frontend/src/api/collect-log.ts @@ -0,0 +1,94 @@ +import request from '@/utils/request' +import type { ApiResponse, PaginatedResponse } from '@/types' + +// --- 采集日志数据模型 --- +export interface CollectAnalysis { + id: number + analysisTime: string + collectAddressId: number + addressName?: string + machineId: number + machineName?: string + analysisType: string + previousProgram?: string + currentProgram?: string + partCountDelta?: number + analysisSummary?: string +} + +export interface CollectCycle { + id: number + cycleTime: string + collectAddressId: number + addressName?: string + totalMachines: number + successCount: number + failCount: number + hasAnomaly: number + changeDistribution?: string + cycleSummary?: string +} + +export interface CollectRaw { + id: number + logTime: string + sourceAddress?: string + contentPreview?: string +} + +// --- 公开的 API 封装 --- +// 获取分析记录列表 +export function fetchAnalysisList(params?: { + page?: number + pageSize?: number + dateRange?: string[] | null + addressId?: number + machineId?: number + analysisType?: string + programName?: string + keyword?: string +}) { + return request.get<{ items: CollectAnalysis[]; total: number }>( + '/admin/collect-log/analysis', + { params } + ) +} + +// 获取分析详情 +export function fetchAnalysisDetail(id: number) { + return request.get(`/admin/collect-log/analysis/${id}`) +} + +// 根据原始日志检索分析记录 +export function fetchAnalysisByRaw(rawLogId: number | string) { + return request.get<{ items: CollectAnalysis[] }>(`/admin/collect-log/analysis/by-raw/${rawLogId}`) +} + +// 获取采集周期列表 +export function fetchCycleList(params?: { + page?: number + pageSize?: number + dateRange?: string[] | null + addressId?: number + hasAnomaly?: string +}) { + return request.get<{ items: CollectCycle[]; total: number }>( + '/admin/collect-log/cycle', + { params } + ) +} + +// 获取原始日志列表 +export function fetchRawList(params?: { + page?: number + pageSize?: number + dateRange?: string[] | null + addressId?: number +}) { + return request.get<{ items: CollectRaw[]; total: number }>( + '/admin/collect-log/raw', + { params } + ) +} + +export default {} diff --git a/frontend/src/layouts/AdminLayout.vue b/frontend/src/layouts/AdminLayout.vue index 7c85e3b..584f775 100644 --- a/frontend/src/layouts/AdminLayout.vue +++ b/frontend/src/layouts/AdminLayout.vue @@ -47,6 +47,10 @@ + + + + @@ -100,7 +104,7 @@ import { ref, computed } from 'vue' import { useRoute, useRouter } from 'vue-router' import { ElMessageBox, ElMessage } from 'element-plus' -import { ArrowDown } from '@element-plus/icons-vue' +import { ArrowDown, Notebook } from '@element-plus/icons-vue' import { useMockMode } from '@/composables/useMockMode' const route = useRoute() diff --git a/frontend/src/router/index.ts b/frontend/src/router/index.ts index 33fca33..615dbf4 100644 --- a/frontend/src/router/index.ts +++ b/frontend/src/router/index.ts @@ -26,6 +26,7 @@ const SettingsPage = () => import('@/views/settings/SettingsPage.vue') const LogPage = () => import('@/views/log/LogPage.vue') const ScreenConfigPage = () => import('@/views/screen-config/ScreenConfigPage.vue') const ScreenPage = () => import('@/views/screen/ScreenPage.vue') +const CollectLogPage = () => import('@/views/collect-log/CollectLogPage.vue') // 正常路由 const normalRoutes: RouteRecordRaw[] = [ @@ -43,6 +44,7 @@ const normalRoutes: RouteRecordRaw[] = [ { path: 'brand/:id/edit', name: 'BrandEdit', component: BrandEditPage, meta: { title: '编辑品牌' } }, { path: 'collect-address', name: 'CollectAddressList', component: CollectAddressListPage, meta: { title: '采集地址' } }, { path: 'collect-address/:id', name: 'CollectAddressDetail', component: CollectAddressDetailPage, meta: { title: '采集地址详情' } }, + { path: 'collect-log', name: 'CollectLog', component: CollectLogPage, meta: { title: '采集日志' } }, { path: 'worker', name: 'WorkerList', component: WorkerListPage, meta: { title: '员工管理' } }, { path: 'worker/:id', name: 'WorkerDetail', component: WorkerDetailPage, meta: { title: '员工详情' } }, { path: 'production', name: 'Production', component: ProductionPage, meta: { title: '产量报表' } }, diff --git a/frontend/src/views/collect-log/CollectLogPage.vue b/frontend/src/views/collect-log/CollectLogPage.vue new file mode 100644 index 0000000..54fd490 --- /dev/null +++ b/frontend/src/views/collect-log/CollectLogPage.vue @@ -0,0 +1,389 @@ + + + + + diff --git a/src/CncCollector/Config/CollectorConfig.cs b/src/CncCollector/Config/CollectorConfig.cs index 1c23bed..4e89ba4 100644 --- a/src/CncCollector/Config/CollectorConfig.cs +++ b/src/CncCollector/Config/CollectorConfig.cs @@ -37,6 +37,18 @@ namespace CncCollector.Config [JsonProperty("dailySummaryTime")] public string DailySummaryTime { get; set; } = "01:00"; + /// 分析日志保留天数(0=不删除) + public int AnalysisLogRetentionDays { get; set; } = 0; + + /// 周期日志保留天数(0=不删除) + public int CycleLogRetentionDays { get; set; } = 0; + + /// 原始日志保留天数(0=不删除) + public int RawLogRetentionDays { get; set; } = 0; + + /// 日志清理检查间隔(分钟) + public int LogCleanupIntervalMinutes { get; set; } = 60; + /// 服务ID标识 [JsonProperty("serviceId")] public string ServiceId { get; set; } = "collector-service"; diff --git a/src/CncCollector/Core/CollectorEngine.cs b/src/CncCollector/Core/CollectorEngine.cs index 48e9602..fbf3027 100644 --- a/src/CncCollector/Core/CollectorEngine.cs +++ b/src/CncCollector/Core/CollectorEngine.cs @@ -1,4 +1,5 @@ using System; +using CncCollector.Jobs; using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; @@ -27,6 +28,8 @@ namespace CncCollector.Core private Timer _heartbeatTimer; private Timer _configPollTimer; private Timer _dailySummaryTimer; + private Timer _logCleanupTimer; + private LogCleanupJob _logCleanupJob; private DateTime _startTime; private long _totalSuccess; private long _totalFail; @@ -89,6 +92,15 @@ namespace CncCollector.Core _dailySummaryTimer = new Timer(OnDailySummaryCheck, null, TimeSpan.FromMinutes(1), TimeSpan.FromMinutes(1)); + // 5. 启动日志清理定时器(从配置读取间隔,0 表示不启用) + _logCleanupJob = new LogCleanupJob(_config.LogConnection, _config); + if (_config.LogCleanupIntervalMinutes > 0) + { + _logCleanupTimer = new Timer(OnLogCleanup, null, + TimeSpan.FromMinutes(_config.LogCleanupIntervalMinutes), + TimeSpan.FromMinutes(_config.LogCleanupIntervalMinutes)); + } + _log.Info($"===== 采集引擎已启动({_workers.Count}个采集地址)====="); } @@ -116,6 +128,7 @@ namespace CncCollector.Core _heartbeatTimer?.Dispose(); _configPollTimer?.Dispose(); _dailySummaryTimer?.Dispose(); + _logCleanupTimer?.Dispose(); // 写入停止状态心跳 WriteHeartbeat("stopped"); @@ -332,5 +345,20 @@ namespace CncCollector.Core _log.Error("日终汇总检查失败", ex); } } + + /// + /// 日志清理定时回调 + /// + private void OnLogCleanup(object state) + { + try + { + _logCleanupJob?.Execute(); + } + catch (Exception ex) + { + _log.Error("日志清理任务执行失败", ex); + } + } } } diff --git a/src/CncCollector/Jobs/LogCleanupJob.cs b/src/CncCollector/Jobs/LogCleanupJob.cs new file mode 100644 index 0000000..76777ba --- /dev/null +++ b/src/CncCollector/Jobs/LogCleanupJob.cs @@ -0,0 +1,84 @@ +using System; +using Dapper; +using MySqlConnector; +using CncCollector.Config; +using log4net; + +namespace CncCollector.Jobs +{ + /// + /// 日志清理定时任务。 + /// 根据配置的保留天数清理日志数据。保留天数=0表示不删除。 + /// + public class LogCleanupJob + { + private static readonly ILog _log = LogManager.GetLogger(typeof(LogCleanupJob)); + + private readonly string _logConnection; + private readonly CollectorConfig _config; + + public LogCleanupJob(string logConnection, CollectorConfig config) + { + _logConnection = logConnection; + _config = config; + } + + /// + /// 执行日志清理 + /// + public void Execute() + { + try + { + int total = 0; + using (var conn = new MySqlConnection(_logConnection)) + { + // 1) 采集分析日志 + int daysA = Math.Max(_config.AnalysisLogRetentionDays, 0); + if (daysA > 0) + { + string sqlA = $"DELETE FROM cnc_log.log_collect_analysis WHERE analysis_time < DATE_SUB(NOW(), INTERVAL {daysA} DAY)"; + int del = conn.Execute(sqlA); + total += del; + _log.Info($"日志清理: log_collect_analysis 删除 {del} 行,保留 {daysA} 天"); + } + + // 2) 采集周期日志 + int daysC = Math.Max(_config.CycleLogRetentionDays, 0); + if (daysC > 0) + { + string sqlC = $"DELETE FROM cnc_log.log_collect_cycle WHERE cycle_time < DATE_SUB(NOW(), INTERVAL {daysC} DAY)"; + int del = conn.Execute(sqlC); + total += del; + _log.Info($"日志清理: log_collect_cycle 删除 {del} 行,保留 {daysC} 天"); + } + + // 3) 原始日志 + int daysR = Math.Max(_config.RawLogRetentionDays, 0); + if (daysR > 0) + { + // 尝试使用 created_at 字段,如不存在再回退到 request_time + string sqlR = $"DELETE FROM cnc_log.log_collect_raw WHERE created_at < DATE_SUB(NOW(), INTERVAL {daysR} DAY)"; + int del = 0; + try + { + del = conn.Execute(sqlR); + } + catch + { + string sqlR2 = $"DELETE FROM cnc_log.log_collect_raw WHERE request_time < DATE_SUB(NOW(), INTERVAL {daysR} DAY)"; + del = conn.Execute(sqlR2); + } + total += del; + _log.Info($"日志清理: log_collect_raw 删除 {del} 行,保留 {daysR} 天"); + } + } + _log.Info($"日志清理完成,总删除记录数: {total}"); + } + catch (Exception ex) + { + _log.Error("执行日志清理任务失败", ex); + } + } + } +} diff --git a/src/CncModels/Enum/AlertType.cs b/src/CncModels/Enum/AlertType.cs index b63d7a5..de0c1b8 100644 --- a/src/CncModels/Enum/AlertType.cs +++ b/src/CncModels/Enum/AlertType.cs @@ -19,6 +19,9 @@ namespace CncModels.Enum /// 未知设备 public const string UnknownDevice = "unknown_device"; + /// 数据异常 + public const string DataAnomaly = "data_anomaly"; + /// 服务错误 public const string ServiceError = "service_error"; } diff --git a/tests/CncService.Tests/LogSerializationTests.cs b/tests/CncService.Tests/LogSerializationTests.cs new file mode 100644 index 0000000..f417ca1 --- /dev/null +++ b/tests/CncService.Tests/LogSerializationTests.cs @@ -0,0 +1,29 @@ +using System.Text.Json; +using Xunit; +using CncService.LogAnalyzer; +using CncService.Models; + +namespace CncService.Tests +{ + public class LogSerializationTests + { + [Fact] + public void LogAnalysisResult_Serialize_ToJson_Includes_Summary() + { + // Arrange + var analysis = new LogAnalysisResult + { + Summary = "New log entry analyzed: no changes", + DetailsJson = "{\"change\":false}", + Confidence = 0.92 + }; + + // Act + var json = JsonSerializer.Serialize(analysis); + + // Assert + Assert.Contains("Summary", json); + Assert.Contains("New log entry analyzed", json); + } + } +} diff --git a/tests/LogsDashboard.test.ts b/tests/LogsDashboard.test.ts new file mode 100644 index 0000000..44fce2b --- /dev/null +++ b/tests/LogsDashboard.test.ts @@ -0,0 +1,33 @@ +import { describe, it, expect } from 'vitest' + +type LogItem = { + id: string + timestamp: string + machineId: string + programName: string + level: string + message: string +} + +type DashboardData = { + total: number + counts: Record + logs: LogItem[] + analysis?: string +} + +describe('日志看板数据结构', () => { + it('应包含 logs、counts、total 字段且类型正确', () => { + const sample: DashboardData = { + total: 5, + counts: { ERROR: 1, INFO: 4 }, + logs: [ + { id: 'l1', timestamp: '2026-05-01T12:00:00Z', machineId: 'M1', programName: 'ProgA', level: 'ERROR', message: 'Something failed' } + ], + analysis: '最近一次采集无显著趋势' + } + expect(sample).toHaveProperty('logs') + expect(sample).toHaveProperty('counts') + expect(typeof sample.total).toBe('number') + }) +}) diff --git a/tests/partitioned_logs_tests.md b/tests/partitioned_logs_tests.md new file mode 100644 index 0000000..8df68ec --- /dev/null +++ b/tests/partitioned_logs_tests.md @@ -0,0 +1,11 @@ +测试用例草案 +- 用例1:分区创建与存在性 + - 执行 partitioned_logs.sql,确认创建表与分区存在 +- 用例2:写入分区数据及分析摘要 + - 插入若干行数据,日志时间分布在不同月份,验证数据写入到了相应分区,analysis_summary 非空 +- 用例3:API 查询最新日志及分析摘要 + - 调用 API 获取最近日志,校验字段 presence +- 用例4:分区查询性能 + - 针对历史月份的日志执行查询,验证分区裁剪效果 +- 依赖:MariaDB 实例、API 服务的可用端点 +- 评估标准:分区创建成功,数据写入正确,对应分区检索正确,API 返回最新摘要