diff --git a/haobang-security-xdr/logs/syslog-client.log b/haobang-security-xdr/logs/syslog-client.log index 83b468e..a49d649 100644 --- a/haobang-security-xdr/logs/syslog-client.log +++ b/haobang-security-xdr/logs/syslog-client.log @@ -1,4 +1,4 @@ -2025-12-17 23:59:44.041 [main] INFO c.h.syslog.syslogClientApplication - Starting syslogClientApplication using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 17184 (E:\GIT_GOSAME\haobang-security-xdr\syslog-client\target\classes started by chenc in E:\GIT_GOSAME\haobang-security-xdr) -2025-12-17 23:59:44.061 [main] INFO c.h.syslog.syslogClientApplication - No active profile set, falling back to 1 default profile: "default" -2025-12-17 23:59:45.393 [main] INFO c.h.syslog.syslogClientApplication - Started syslogClientApplication in 2.275 seconds (JVM running for 9.275) -2025-12-17 23:59:45.440 [TCP] INFO com.haobang.syslog.ClientClass - 设置syslog端口:514 +2026-02-05 14:48:19.665 [main] INFO c.h.syslog.syslogClientApplication - Starting syslogClientApplication using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 24728 (E:\GIT_GOSAME\ai-security-xdr\haobang-security-xdr\syslog-client\target\classes started by chenc in E:\GIT_GOSAME\ai-security-xdr\haobang-security-xdr) +2026-02-05 14:48:19.670 [main] INFO c.h.syslog.syslogClientApplication - No active profile set, falling back to 1 default profile: "default" +2026-02-05 14:48:20.191 [main] INFO c.h.syslog.syslogClientApplication - Started syslogClientApplication in 0.904 seconds (JVM running for 5.256) +2026-02-05 14:48:20.207 [TCP] INFO com.haobang.syslog.ClientClass - syslog˿ڣ514 diff --git a/haobang-security-xdr/logs/syslog-consumer.log b/haobang-security-xdr/logs/syslog-consumer.log index 3ec2a32..d23c8d0 100644 --- a/haobang-security-xdr/logs/syslog-consumer.log +++ b/haobang-security-xdr/logs/syslog-consumer.log @@ -1,41 +1,45 @@ -2026-01-10 13:27:26.100 [main] INFO com.syslogApplication - Starting syslogApplication using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 12944 (E:\GIT_GOSAME\haobang-security-xdr\syslog-consumer\target\classes started by chenc in E:\GIT_GOSAME\haobang-security-xdr) -2026-01-10 13:27:26.104 [background-preinit] INFO o.h.validator.internal.util.Version - HV000001: Hibernate Validator 6.2.5.Final -2026-01-10 13:27:26.108 [main] INFO com.syslogApplication - No active profile set, falling back to 1 default profile: "default" -2026-01-10 13:27:28.693 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:27:28.693 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Elasticsearch repositories in DEFAULT mode. -2026-01-10 13:27:29.287 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 588 ms. Found 1 Elasticsearch repository interfaces. -2026-01-10 13:27:29.294 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:27:29.294 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Reactive Elasticsearch repositories in DEFAULT mode. -2026-01-10 13:27:29.457 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Reactive Elasticsearch - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Reactive Elasticsearch repository, consider annotating your entities with one of these annotations: org.springframework.data.elasticsearch.annotations.Document (preferred), or consider extending one of the following types with your repository: org.springframework.data.elasticsearch.repository.ReactiveElasticsearchRepository -2026-01-10 13:27:29.457 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 161 ms. Found 0 Reactive Elasticsearch repository interfaces. -2026-01-10 13:27:29.479 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:27:29.479 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode. -2026-01-10 13:27:29.655 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository -2026-01-10 13:27:29.655 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 163 ms. Found 0 Redis repository interfaces. -2026-01-10 13:27:30.440 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8089 (http) -2026-01-10 13:27:30.451 [main] INFO o.a.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8089"] -2026-01-10 13:27:30.451 [main] INFO o.a.catalina.core.StandardService - Starting service [Tomcat] -2026-01-10 13:27:30.451 [main] INFO o.a.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.65] -2026-01-10 13:27:30.727 [main] INFO o.a.c.c.C.[.[.[/xdrservice] - Initializing Spring embedded WebApplicationContext -2026-01-10 13:27:30.727 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - Root WebApplicationContext: initialization completed in 4552 ms -2026-01-10 13:27:30.777 [main] INFO o.s.b.f.a.AutowiredAnnotationBeanPostProcessor - Autowired annotation is not supported on static fields: private static com.common.service.DmColumnService com.syslogApplication.dmColumnService -2026-01-10 13:27:33.588 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes -2026-01-10 13:27:33.860 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.insert] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Insert] -2026-01-10 13:27:33.875 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.update] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Update] -2026-01-10 13:27:33.907 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.deleteById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.DeleteById] -2026-01-10 13:27:33.910 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.selectById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.SelectById] -2026-01-10 13:27:33.960 [main] ERROR c.b.m.core.MybatisConfiguration - mapper[com.common.mapper.SecExceptionAlgorithmMapper.findById] is ignored, because it exists, maybe from xml file -2026-01-10 13:27:38.866 [main] INFO c.c.service.AccessLogAlertService - 初始化AccessLogAlertService,上次处理时间: 2026-01-10T13:25:38.866 -2026-01-10 13:27:38.882 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting... -2026-01-10 13:27:39.100 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed. -2026-01-10 13:27:39.131 [main] INFO c.c.service.AccessLogAlertService - 加载了 2 个启用的算法配置 -2026-01-10 13:27:39.940 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes -2026-01-10 13:27:40.248 [main] INFO com.common.util.MyBatisUtil - MyBatis 初始化成功 -2026-01-10 13:27:41.102 [main] INFO org.quartz.impl.StdSchedulerFactory - Using default implementation for ThreadExecutor -2026-01-10 13:27:41.112 [main] INFO o.quartz.core.SchedulerSignalerImpl - Initialized Scheduler Signaller of type: class org.quartz.core.SchedulerSignalerImpl -2026-01-10 13:27:41.112 [main] INFO org.quartz.core.QuartzScheduler - Quartz Scheduler v.2.3.2 created. -2026-01-10 13:27:41.112 [main] INFO org.quartz.simpl.RAMJobStore - RAMJobStore initialized. -2026-01-10 13:27:41.112 [main] INFO org.quartz.core.QuartzScheduler - Scheduler meta-data: Quartz Scheduler (v2.3.2) 'quartzScheduler' with instanceId 'NON_CLUSTERED' +2026-03-09 18:20:29.258 [main] INFO com.syslogApplication - Starting syslogApplication using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 31516 (E:\GIT_GOSAME\ai-security-xdr\haobang-security-xdr\syslog-consumer\target\classes started by chenc in E:\GIT_GOSAME\ai-security-xdr\haobang-security-xdr) +2026-03-09 18:20:29.258 [background-preinit] INFO o.h.validator.internal.util.Version - HV000001: Hibernate Validator 6.2.5.Final +2026-03-09 18:20:29.264 [main] INFO com.syslogApplication - No active profile set, falling back to 1 default profile: "default" +2026-03-09 18:20:32.501 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode +2026-03-09 18:20:32.504 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Elasticsearch repositories in DEFAULT mode. +2026-03-09 18:20:33.247 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 735 ms. Found 1 Elasticsearch repository interfaces. +2026-03-09 18:20:33.255 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode +2026-03-09 18:20:33.256 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Reactive Elasticsearch repositories in DEFAULT mode. +2026-03-09 18:20:33.435 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Reactive Elasticsearch - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Reactive Elasticsearch repository, consider annotating your entities with one of these annotations: org.springframework.data.elasticsearch.annotations.Document (preferred), or consider extending one of the following types with your repository: org.springframework.data.elasticsearch.repository.ReactiveElasticsearchRepository +2026-03-09 18:20:33.435 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 179 ms. Found 0 Reactive Elasticsearch repository interfaces. +2026-03-09 18:20:33.460 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode +2026-03-09 18:20:33.461 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode. +2026-03-09 18:20:33.643 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository +2026-03-09 18:20:33.643 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 167 ms. Found 0 Redis repository interfaces. +2026-03-09 18:20:34.518 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8089 (http) +2026-03-09 18:20:34.530 [main] INFO o.a.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8089"] +2026-03-09 18:20:34.531 [main] INFO o.a.catalina.core.StandardService - Starting service [Tomcat] +2026-03-09 18:20:34.531 [main] INFO o.a.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.65] +2026-03-09 18:20:34.885 [main] INFO o.a.c.c.C.[.[.[/xdrservice] - Initializing Spring embedded WebApplicationContext +2026-03-09 18:20:34.885 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - Root WebApplicationContext: initialization completed in 5554 ms +2026-03-09 18:20:34.950 [main] INFO o.s.b.f.a.AutowiredAnnotationBeanPostProcessor - Autowired annotation is not supported on static fields: private static com.common.service.DmColumnService com.syslogApplication.dmColumnService +2026-03-09 18:20:37.863 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes +2026-03-09 18:20:38.381 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.insert] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Insert] +2026-03-09 18:20:38.394 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.update] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Update] +2026-03-09 18:20:38.410 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.deleteById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.DeleteById] +2026-03-09 18:20:38.414 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.selectById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.SelectById] +2026-03-09 18:20:38.469 [main] ERROR c.b.m.core.MybatisConfiguration - mapper[com.common.mapper.SecExceptionAlgorithmMapper.findById] is ignored, because it exists, maybe from xml file +2026-03-09 18:20:44.376 [main] INFO c.c.s.RealtimeAnalysisScheduler - ========== ʼʵʱ ========== +2026-03-09 18:20:44.398 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-SyslogConsumer - Starting... +2026-03-09 18:20:45.062 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-SyslogConsumer - Start completed. +2026-03-09 18:20:45.249 [main] INFO c.c.s.RealtimeAnalysisScheduler - ѯ 0 ʵʱ +2026-03-09 18:20:45.250 [main] INFO c.c.s.RealtimeAnalysisScheduler - ========== ʵʱʼ ========== +2026-03-09 18:20:45.256 [main] INFO o.s.b.f.a.AutowiredAnnotationBeanPostProcessor - Autowired annotation is not supported on static fields: public static com.common.service.DeviceDeviceService com.common.service.AccessLogAlertService.deviceDeviceService +2026-03-09 18:20:45.296 [main] INFO c.c.service.AccessLogAlertService - ʼAccessLogAlertServiceϴδʱ: 2026-03-09T18:19:45.296 +2026-03-09 18:20:45.457 [main] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:20:46.497 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes +2026-03-09 18:20:46.694 [main] INFO com.common.util.MyBatisUtil - MyBatis ʼɹ +2026-03-09 18:20:47.630 [main] INFO org.quartz.impl.StdSchedulerFactory - Using default implementation for ThreadExecutor +2026-03-09 18:20:47.642 [main] INFO o.quartz.core.SchedulerSignalerImpl - Initialized Scheduler Signaller of type: class org.quartz.core.SchedulerSignalerImpl +2026-03-09 18:20:47.643 [main] INFO org.quartz.core.QuartzScheduler - Quartz Scheduler v.2.3.2 created. +2026-03-09 18:20:47.644 [main] INFO org.quartz.simpl.RAMJobStore - RAMJobStore initialized. +2026-03-09 18:20:47.644 [main] INFO org.quartz.core.QuartzScheduler - Scheduler meta-data: Quartz Scheduler (v2.3.2) 'quartzScheduler' with instanceId 'NON_CLUSTERED' Scheduler class: 'org.quartz.core.QuartzScheduler' - running locally. NOT STARTED. Currently in standby mode. @@ -43,1714 +47,329 @@ Using thread pool 'org.quartz.simpl.SimpleThreadPool' - with 10 threads. Using job-store 'org.quartz.simpl.RAMJobStore' - which does not support persistence. and is not clustered. -2026-01-10 13:27:41.112 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler 'quartzScheduler' initialized from an externally provided properties instance. -2026-01-10 13:27:41.112 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler version: 2.3.2 -2026-01-10 13:27:41.112 [main] INFO org.quartz.core.QuartzScheduler - JobFactory set to: org.springframework.scheduling.quartz.SpringBeanJobFactory@1de78f97 -2026-01-10 13:27:41.285 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 -2026-01-10 13:27:41.285 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 -2026-01-10 13:27:41.285 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1768022861284 -2026-01-10 13:27:41.303 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 -2026-01-10 13:27:41.303 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 -2026-01-10 13:27:41.304 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1768022861303 -2026-01-10 13:27:41.304 [main] INFO o.a.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8089"] -2026-01-10 13:27:41.320 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat started on port(s): 8089 (http) with context path '/xdrservice' -2026-01-10 13:27:41.320 [main] INFO o.s.s.quartz.SchedulerFactoryBean - Starting Quartz Scheduler now -2026-01-10 13:27:41.320 [main] INFO org.quartz.core.QuartzScheduler - Scheduler quartzScheduler_$_NON_CLUSTERED started. -2026-01-10 13:27:41.338 [main] INFO com.syslogApplication - Started syslogApplication in 15.567 seconds (JVM running for 20.706) -2026-01-10 13:27:43.685 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - agent-syslog-group: partitions assigned: [] -2026-01-10 13:27:43.685 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - agent-syslog-group: partitions assigned: [] -2026-01-10 13:28:00.008 [log-processor-1] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:28:00.008 [scheduling-1] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:28:00.027 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:28:00.027 [scheduling-3] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:28:00.003晚于默认保留时间2026-01-03T13:28:00.027,使用默认时间 -2026-01-10 13:28:00.038 [scheduling-1] INFO c.c.service.AccessLogAlertService - 加载了 2 个启用的算法配置 -2026-01-10 13:28:00.039 [log-processor-1] INFO c.c.service.AccessLogAlertService - 加载了 2 个启用的算法配置 -2026-01-10 13:28:00.044 [scheduling-3] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:28:00.027天前的日志,共删除0条 -2026-01-10 13:28:00.057 [scheduling-3] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:28:00.367 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:2,耗时:340ms -2026-01-10 13:28:00.367 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:28:00.367 -2026-01-10 13:28:00.369 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:28:00.369 -2026-01-10 13:28:00.396 [scheduling-1] INFO c.c.service.AccessLogAlertService - 获取到 938 条新的日志数据,时间范围: 2026-01-10T13:25:38.866 到 2026-01-10T13:28:00.039 -2026-01-10 13:28:00.396 [scheduling-1] INFO c.c.service.AccessLogAlertService - 开始处理算法: Webshell 算法 (ID: 2004037120094425090) -2026-01-10 13:28:00.475 [log-processor-1] INFO c.c.service.AccessLogAlertService - 获取到 942 条新的日志数据,时间范围: 2026-01-10T13:25:38.866 到 2026-01-10T13:28:00.039 -2026-01-10 13:28:00.475 [log-processor-1] INFO c.c.service.AccessLogAlertService - 开始处理算法: Webshell 算法 (ID: 2004037120094425090) -2026-01-10 13:28:02.619 [log-processor-1] ERROR c.c.service.AccessLogAlertService - 调用算法API异常 [URL: http://192.168.4.33:5001/Webshell]: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect -org.springframework.web.client.ResourceAccessException: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:785) - at org.springframework.web.client.RestTemplate.execute(RestTemplate.java:711) - at org.springframework.web.client.RestTemplate.exchange(RestTemplate.java:602) - at com.common.service.AccessLogAlertService.callAlgorithmApi(AccessLogAlertService.java:275) - at com.common.service.AccessLogAlertService.processAlgorithm(AccessLogAlertService.java:153) - at com.common.service.AccessLogAlertService.processAccessLogAlert(AccessLogAlertService.java:122) - at com.common.service.AccessLogAlertService$$FastClassBySpringCGLIB$$4807ae0a.invoke() - at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) - at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:793) - at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) - at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:763) - at org.springframework.aop.interceptor.AsyncExecutionInterceptor.lambda$invoke$0(AsyncExecutionInterceptor.java:115) - at java.util.concurrent.FutureTask.run(FutureTask.java:266) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.net.ConnectException: Connection refused: connect - at java.net.DualStackPlainSocketImpl.waitForConnect(Native Method) - at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:85) - at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) - at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) - at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) - at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) - at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) - at java.net.Socket.connect(Socket.java:589) - at sun.net.NetworkClient.doConnect(NetworkClient.java:175) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) - at sun.net.www.http.HttpClient.(HttpClient.java:211) - at sun.net.www.http.HttpClient.New(HttpClient.java:308) - at sun.net.www.http.HttpClient.New(HttpClient.java:326) - at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1202) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1138) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1032) - at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:966) - at org.springframework.http.client.SimpleBufferingClientHttpRequest.executeInternal(SimpleBufferingClientHttpRequest.java:76) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:109) - at com.config.RestTemplateConfig$LoggingInterceptor.intercept(RestTemplateConfig.java:62) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:93) - at org.springframework.http.client.InterceptingClientHttpRequest.executeInternal(InterceptingClientHttpRequest.java:77) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:776) - ... 15 common frames omitted -2026-01-10 13:28:02.619 [scheduling-1] ERROR c.c.service.AccessLogAlertService - 调用算法API异常 [URL: http://192.168.4.33:5001/Webshell]: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect -org.springframework.web.client.ResourceAccessException: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:785) - at org.springframework.web.client.RestTemplate.execute(RestTemplate.java:711) - at org.springframework.web.client.RestTemplate.exchange(RestTemplate.java:602) - at com.common.service.AccessLogAlertService.callAlgorithmApi(AccessLogAlertService.java:275) - at com.common.service.AccessLogAlertService.processAlgorithm(AccessLogAlertService.java:153) - at com.common.service.AccessLogAlertService.processAccessLogAlert(AccessLogAlertService.java:122) - at com.common.service.AccessLogAlertService.safeProcessTask(AccessLogAlertService.java:387) - at com.common.service.AccessLogAlertService$$FastClassBySpringCGLIB$$4807ae0a.invoke() - at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) - at org.springframework.aop.framework.CglibAopProxy.invokeMethod(CglibAopProxy.java:386) - at org.springframework.aop.framework.CglibAopProxy.access$000(CglibAopProxy.java:85) - at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:704) - at com.common.service.AccessLogAlertService$$EnhancerBySpringCGLIB$$b85131f2.safeProcessTask() - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:498) - at org.springframework.scheduling.support.ScheduledMethodRunnable.run(ScheduledMethodRunnable.java:84) - at org.springframework.scheduling.support.DelegatingErrorHandlingRunnable.run(DelegatingErrorHandlingRunnable.java:54) - at org.springframework.scheduling.concurrent.ReschedulingRunnable.run(ReschedulingRunnable.java:95) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) - at java.util.concurrent.FutureTask.run(FutureTask.java:266) - at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180) - at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.net.ConnectException: Connection refused: connect - at java.net.DualStackPlainSocketImpl.waitForConnect(Native Method) - at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:85) - at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) - at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) - at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) - at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) - at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) - at java.net.Socket.connect(Socket.java:589) - at sun.net.NetworkClient.doConnect(NetworkClient.java:175) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) - at sun.net.www.http.HttpClient.(HttpClient.java:211) - at sun.net.www.http.HttpClient.New(HttpClient.java:308) - at sun.net.www.http.HttpClient.New(HttpClient.java:326) - at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1202) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1138) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1032) - at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:966) - at org.springframework.http.client.SimpleBufferingClientHttpRequest.executeInternal(SimpleBufferingClientHttpRequest.java:76) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:109) - at com.config.RestTemplateConfig$LoggingInterceptor.intercept(RestTemplateConfig.java:62) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:93) - at org.springframework.http.client.InterceptingClientHttpRequest.executeInternal(InterceptingClientHttpRequest.java:77) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:776) - ... 26 common frames omitted -2026-01-10 13:28:02.623 [log-processor-1] ERROR c.c.service.AccessLogAlertService - 调用算法API失败: http://192.168.4.33:5001/Webshell - 无响应 -2026-01-10 13:28:02.623 [scheduling-1] ERROR c.c.service.AccessLogAlertService - 调用算法API失败: http://192.168.4.33:5001/Webshell - 无响应 -2026-01-10 13:28:02.623 [log-processor-1] INFO c.c.service.AccessLogAlertService - 开始处理算法: 测试算法1111 (ID: 2004133377664204801) -2026-01-10 13:28:02.623 [scheduling-1] INFO c.c.service.AccessLogAlertService - 开始处理算法: 测试算法1111 (ID: 2004133377664204801) -2026-01-10 13:28:03.611 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 4,已更新: 1 -2026-01-10 13:28:03.611 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 3244ms -2026-01-10 13:28:04.729 [scheduling-1] ERROR c.c.service.AccessLogAlertService - 调用算法API异常 [URL: http://192.168.4.33:5001/outtoin_php]: I/O error on POST request for "http://192.168.4.33:5001/outtoin_php": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect -org.springframework.web.client.ResourceAccessException: I/O error on POST request for "http://192.168.4.33:5001/outtoin_php": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:785) - at org.springframework.web.client.RestTemplate.execute(RestTemplate.java:711) - at org.springframework.web.client.RestTemplate.exchange(RestTemplate.java:602) - at com.common.service.AccessLogAlertService.callAlgorithmApi(AccessLogAlertService.java:275) - at com.common.service.AccessLogAlertService.processAlgorithm(AccessLogAlertService.java:153) - at com.common.service.AccessLogAlertService.processAccessLogAlert(AccessLogAlertService.java:122) - at com.common.service.AccessLogAlertService.safeProcessTask(AccessLogAlertService.java:387) - at com.common.service.AccessLogAlertService$$FastClassBySpringCGLIB$$4807ae0a.invoke() - at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) - at org.springframework.aop.framework.CglibAopProxy.invokeMethod(CglibAopProxy.java:386) - at org.springframework.aop.framework.CglibAopProxy.access$000(CglibAopProxy.java:85) - at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:704) - at com.common.service.AccessLogAlertService$$EnhancerBySpringCGLIB$$b85131f2.safeProcessTask() - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:498) - at org.springframework.scheduling.support.ScheduledMethodRunnable.run(ScheduledMethodRunnable.java:84) - at org.springframework.scheduling.support.DelegatingErrorHandlingRunnable.run(DelegatingErrorHandlingRunnable.java:54) - at org.springframework.scheduling.concurrent.ReschedulingRunnable.run(ReschedulingRunnable.java:95) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) - at java.util.concurrent.FutureTask.run(FutureTask.java:266) - at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180) - at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.net.ConnectException: Connection refused: connect - at java.net.DualStackPlainSocketImpl.waitForConnect(Native Method) - at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:85) - at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) - at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) - at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) - at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) - at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) - at java.net.Socket.connect(Socket.java:589) - at sun.net.NetworkClient.doConnect(NetworkClient.java:175) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) - at sun.net.www.http.HttpClient.(HttpClient.java:211) - at sun.net.www.http.HttpClient.New(HttpClient.java:308) - at sun.net.www.http.HttpClient.New(HttpClient.java:326) - at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1202) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1138) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1032) - at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:966) - at org.springframework.http.client.SimpleBufferingClientHttpRequest.executeInternal(SimpleBufferingClientHttpRequest.java:76) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:109) - at com.config.RestTemplateConfig$LoggingInterceptor.intercept(RestTemplateConfig.java:62) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:93) - at org.springframework.http.client.InterceptingClientHttpRequest.executeInternal(InterceptingClientHttpRequest.java:77) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:776) - ... 26 common frames omitted -2026-01-10 13:28:04.739 [scheduling-1] ERROR c.c.service.AccessLogAlertService - 调用算法API失败: http://192.168.4.33:5001/outtoin_php - 无响应 -2026-01-10 13:28:04.739 [scheduling-1] INFO c.c.service.AccessLogAlertService - 访问日志告警处理任务完成,下次将从 2026-01-10T13:28:00.039 开始处理 -2026-01-10 13:28:04.746 [log-processor-1] ERROR c.c.service.AccessLogAlertService - 调用算法API异常 [URL: http://192.168.4.33:5001/outtoin_php]: I/O error on POST request for "http://192.168.4.33:5001/outtoin_php": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect -org.springframework.web.client.ResourceAccessException: I/O error on POST request for "http://192.168.4.33:5001/outtoin_php": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:785) - at org.springframework.web.client.RestTemplate.execute(RestTemplate.java:711) - at org.springframework.web.client.RestTemplate.exchange(RestTemplate.java:602) - at com.common.service.AccessLogAlertService.callAlgorithmApi(AccessLogAlertService.java:275) - at com.common.service.AccessLogAlertService.processAlgorithm(AccessLogAlertService.java:153) - at com.common.service.AccessLogAlertService.processAccessLogAlert(AccessLogAlertService.java:122) - at com.common.service.AccessLogAlertService$$FastClassBySpringCGLIB$$4807ae0a.invoke() - at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) - at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:793) - at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) - at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:763) - at org.springframework.aop.interceptor.AsyncExecutionInterceptor.lambda$invoke$0(AsyncExecutionInterceptor.java:115) - at java.util.concurrent.FutureTask.run(FutureTask.java:266) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.net.ConnectException: Connection refused: connect - at java.net.DualStackPlainSocketImpl.waitForConnect(Native Method) - at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:85) - at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) - at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) - at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) - at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) - at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) - at java.net.Socket.connect(Socket.java:589) - at sun.net.NetworkClient.doConnect(NetworkClient.java:175) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) - at sun.net.www.http.HttpClient.(HttpClient.java:211) - at sun.net.www.http.HttpClient.New(HttpClient.java:308) - at sun.net.www.http.HttpClient.New(HttpClient.java:326) - at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1202) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1138) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1032) - at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:966) - at org.springframework.http.client.SimpleBufferingClientHttpRequest.executeInternal(SimpleBufferingClientHttpRequest.java:76) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:109) - at com.config.RestTemplateConfig$LoggingInterceptor.intercept(RestTemplateConfig.java:62) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:93) - at org.springframework.http.client.InterceptingClientHttpRequest.executeInternal(InterceptingClientHttpRequest.java:77) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:776) - ... 15 common frames omitted -2026-01-10 13:28:04.754 [log-processor-1] ERROR c.c.service.AccessLogAlertService - 调用算法API失败: http://192.168.4.33:5001/outtoin_php - 无响应 -2026-01-10 13:28:04.754 [log-processor-1] INFO c.c.service.AccessLogAlertService - 访问日志告警处理任务完成,下次将从 2026-01-10T13:28:00.039 开始处理 -2026-01-10 13:29:00.009 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:29:00.013 [scheduling-4] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:29:00.003晚于默认保留时间2026-01-03T13:29:00.013,使用默认时间 -2026-01-10 13:29:00.026 [scheduling-4] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:29:00.013天前的日志,共删除0条 -2026-01-10 13:29:00.035 [scheduling-4] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:29:00.318 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:2,耗时:309ms -2026-01-10 13:29:00.318 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:29:00.318 -2026-01-10 13:29:00.318 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:29:00.318 -2026-01-10 13:29:03.556 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 4,已更新: 1 -2026-01-10 13:29:03.556 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 3238ms -2026-01-10 13:34:08.884 [main] INFO com.syslogApplication - Starting syslogApplication using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 22388 (E:\GIT_GOSAME\haobang-security-xdr\syslog-consumer\target\classes started by chenc in E:\GIT_GOSAME\haobang-security-xdr) -2026-01-10 13:34:08.887 [background-preinit] INFO o.h.validator.internal.util.Version - HV000001: Hibernate Validator 6.2.5.Final -2026-01-10 13:34:08.889 [main] INFO com.syslogApplication - No active profile set, falling back to 1 default profile: "default" -2026-01-10 13:34:11.245 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:34:11.247 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Elasticsearch repositories in DEFAULT mode. -2026-01-10 13:34:11.934 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 681 ms. Found 1 Elasticsearch repository interfaces. -2026-01-10 13:34:11.934 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:34:11.934 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Reactive Elasticsearch repositories in DEFAULT mode. -2026-01-10 13:34:12.025 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Reactive Elasticsearch - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Reactive Elasticsearch repository, consider annotating your entities with one of these annotations: org.springframework.data.elasticsearch.annotations.Document (preferred), or consider extending one of the following types with your repository: org.springframework.data.elasticsearch.repository.ReactiveElasticsearchRepository -2026-01-10 13:34:12.025 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 86 ms. Found 0 Reactive Elasticsearch repository interfaces. -2026-01-10 13:34:12.031 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:34:12.031 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode. -2026-01-10 13:34:12.152 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository -2026-01-10 13:34:12.152 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 112 ms. Found 0 Redis repository interfaces. -2026-01-10 13:34:12.700 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8089 (http) -2026-01-10 13:34:12.706 [main] INFO o.a.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8089"] -2026-01-10 13:34:12.706 [main] INFO o.a.catalina.core.StandardService - Starting service [Tomcat] -2026-01-10 13:34:12.706 [main] INFO o.a.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.65] -2026-01-10 13:34:12.902 [main] INFO o.a.c.c.C.[.[.[/xdrservice] - Initializing Spring embedded WebApplicationContext -2026-01-10 13:34:12.902 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - Root WebApplicationContext: initialization completed in 3923 ms -2026-01-10 13:34:12.945 [main] INFO o.s.b.f.a.AutowiredAnnotationBeanPostProcessor - Autowired annotation is not supported on static fields: private static com.common.service.DmColumnService com.syslogApplication.dmColumnService -2026-01-10 13:34:15.402 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes -2026-01-10 13:34:15.623 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.insert] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Insert] -2026-01-10 13:34:15.634 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.update] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Update] -2026-01-10 13:34:15.659 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.deleteById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.DeleteById] -2026-01-10 13:34:15.664 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.selectById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.SelectById] -2026-01-10 13:34:15.701 [main] ERROR c.b.m.core.MybatisConfiguration - mapper[com.common.mapper.SecExceptionAlgorithmMapper.findById] is ignored, because it exists, maybe from xml file -2026-01-10 13:34:19.854 [main] INFO c.c.service.AccessLogAlertService - 初始化AccessLogAlertService,上次处理时间: 2026-01-10T13:32:19.854 -2026-01-10 13:34:19.874 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting... -2026-01-10 13:34:20.472 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed. -2026-01-10 13:34:20.572 [main] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:34:21.190 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes -2026-01-10 13:34:21.459 [main] INFO com.common.util.MyBatisUtil - MyBatis 初始化成功 -2026-01-10 13:34:22.148 [main] INFO org.quartz.impl.StdSchedulerFactory - Using default implementation for ThreadExecutor -2026-01-10 13:34:22.153 [main] INFO o.quartz.core.SchedulerSignalerImpl - Initialized Scheduler Signaller of type: class org.quartz.core.SchedulerSignalerImpl -2026-01-10 13:34:22.153 [main] INFO org.quartz.core.QuartzScheduler - Quartz Scheduler v.2.3.2 created. -2026-01-10 13:34:22.153 [main] INFO org.quartz.simpl.RAMJobStore - RAMJobStore initialized. -2026-01-10 13:34:22.153 [main] INFO org.quartz.core.QuartzScheduler - Scheduler meta-data: Quartz Scheduler (v2.3.2) 'quartzScheduler' with instanceId 'NON_CLUSTERED' - Scheduler class: 'org.quartz.core.QuartzScheduler' - running locally. - NOT STARTED. - Currently in standby mode. - Number of jobs executed: 0 - Using thread pool 'org.quartz.simpl.SimpleThreadPool' - with 10 threads. - Using job-store 'org.quartz.simpl.RAMJobStore' - which does not support persistence. and is not clustered. +2026-03-09 18:20:47.644 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler 'quartzScheduler' initialized from an externally provided properties instance. +2026-03-09 18:20:47.644 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler version: 2.3.2 +2026-03-09 18:20:47.645 [main] INFO org.quartz.core.QuartzScheduler - JobFactory set to: org.springframework.scheduling.quartz.SpringBeanJobFactory@25297d52 +2026-03-09 18:20:47.838 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 +2026-03-09 18:20:47.838 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 +2026-03-09 18:20:47.838 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1773051647836 +2026-03-09 18:20:47.859 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 +2026-03-09 18:20:47.859 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 +2026-03-09 18:20:47.859 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1773051647859 +2026-03-09 18:20:47.861 [main] INFO o.a.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8089"] +2026-03-09 18:20:47.878 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat started on port(s): 8089 (http) with context path '/xdrservice' +2026-03-09 18:20:47.879 [main] INFO o.s.s.quartz.SchedulerFactoryBean - Starting Quartz Scheduler now +2026-03-09 18:20:47.880 [main] INFO org.quartz.core.QuartzScheduler - Scheduler quartzScheduler_$_NON_CLUSTERED started. +2026-03-09 18:20:47.897 [main] INFO com.syslogApplication - Started syslogApplication in 19.043 seconds (JVM running for 24.576) +2026-03-09 18:20:48.685 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions assigned: [] +2026-03-09 18:20:48.753 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions assigned: [test-topic-0] +2026-03-09 18:21:00.012 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - ETLʼִУʼʱ䣺2026-03-09 18:15:00,ʱ䣺2026-03-09 18:20:00 +2026-03-09 18:21:00.017 [scheduling-1] INFO com.common.service.DataExtractor - ʼ澯ָʱ䷶Χݣʱ䷶Χ: 2026-03-09T18:15 - 2026-03-09T18:20 +2026-03-09 18:21:00.017 [log-processor-1] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:21:00.017 [scheduling-5] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:21:00.099 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:21:00.186 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸0ʱ87ms +2026-03-09 18:21:00.186 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:21:00.186 +2026-03-09 18:21:00.191 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:21:00.191 +2026-03-09 18:21:00.243 [log-processor-1] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:21:00.243 [scheduling-5] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:21:00.250 [scheduling-1] INFO com.common.service.DataExtractor - ָʱ䷶Χ: 0 +2026-03-09 18:21:00.250 [scheduling-1] INFO com.common.service.DataExtractor - ûҪ +2026-03-09 18:21:00.250 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - ʱETLִɣʱ: 0 +2026-03-09 18:21:00.250 [scheduling-1] INFO c.c.s.NormalizeRuleHitTimeService - ʼִзʱʱ䣺2026-03-09T18:21:00.250 +2026-03-09 18:21:00.672 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:21:00.672 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 484ms +2026-03-09 18:21:00.833 [scheduling-5] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:19:45.296 +2026-03-09 18:21:00.833 [log-processor-1] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:19:45.296 +2026-03-09 18:21:00.915 [scheduling-1] INFO c.c.s.NormalizeRuleHitTimeService - syslog_normal_data ͳƵ 0 м¼ +2026-03-09 18:21:00.915 [scheduling-1] INFO c.c.s.NormalizeRuleHitTimeService - syslog_normal_alarm ͳƵ 0 м¼ +2026-03-09 18:21:00.915 [scheduling-1] INFO c.c.s.NormalizeRuleHitTimeService - ϲҪµĹ0 +2026-03-09 18:21:01.069 [scheduling-1] INFO c.c.s.NormalizeRuleHitTimeService - ǰ״̬Ĺ173 +2026-03-09 18:21:01.069 [scheduling-1] INFO c.c.s.NormalizeRuleHitTimeService - ʼ£1731 +2026-03-09 18:21:01.070 [scheduling-1] INFO c.c.s.NormalizeRuleHitTimeService - ʱɣ¹0ʱ820ms +2026-03-09 18:21:32.055 [http-nio-8089-exec-1] INFO o.s.web.servlet.DispatcherServlet - Initializing Servlet 'dispatcherServlet' +2026-03-09 18:21:32.060 [http-nio-8089-exec-1] INFO o.s.web.servlet.DispatcherServlet - Completed initialization in 5 ms +2026-03-09 18:21:32.233 [http-nio-8089-exec-1] INFO com.controllers.SyslogPushController - յsyslog: SyslogRequest{ip='192.168.1.19', port=514, logContent='<0> 2026-01-12T14:37:53+08:00 ubuntu log_forward[3419]: {"flow_id": 1028204815001825, "serial_num": "CJFBT92", "src_ip": "120.238.245.132", "src_port": 60838, "dest_ip": "211.136.192.6", "dest_port": 53, "proto": "UDP", "app_proto": "dns", "direction": "CTS", "attacker_ip": "120.238.245.132", "victim_ip": "211.136.192.6", "rule_id": "0x20001e", "rule_name": "ִDNSΪ", "attack_type": "̽", "severity": "1", "bulletin": "ȷܺԼϢʱ", "detail_info": "DNSLOGַ", "vuln_type": "̽", "vuln_desc": "DNSLOGַ", "vuln_harm": "DNSLOGַ", "tags": "dnslog", "cnnvd_id": null, "cve_id": null, "killchain": "", "enable": "", "attack_result": "ͼ", "attack_method": "Զ", "site_app": null, "code_language": "ͨ", "att_ck": "TA0002", "timestamp": "2026-01-12T14:37:53.588+0800", "custom": "{}", "feature_field": "", "feature_payload": "", "": null, "payload": "SQkBAAABAAAAAAAAB3BvbGxpbmcHb2FzdGlmeQNjb20AAAEAAQ==", "packet_size": 37, "pcap_file": ""}', protocol='TCP', facility='USER', severity='INFO'} +2026-03-09 18:21:32.234 [http-nio-8089-exec-1] INFO com.common.service.SyslogService - ʼsyslogϢ: IP=192.168.1.19, Port=514 +2026-03-09 18:21:32.235 [http-nio-8089-exec-1] INFO com.common.service.SyslogService - TCP SyslogϢͳɹ: 192.168.1.19:514 +2026-03-09 18:21:32.235 [http-nio-8089-exec-1] INFO com.controllers.SyslogPushController - SyslogϢͳɹ: IP=192.168.1.19, Port=514 +2026-03-09 18:21:34.502 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO c.Modules.NormalData.SysLogProcessor - ʼϢ: 1 +2026-03-09 18:21:34.502 [log-processor-2] INFO c.Modules.NormalData.SysLogProcessor - յsyslogmessage[receive_time=20260309182133303 device_id=103 device_name=˾ڲ̽ vendor=null data_type=json device_collect_id=1]<0> 2026-01-12T14:37:53+08:00 ubuntu log_forward[3419]: {"flow_id": 1028204815001825, "serial_num": "CJFBT92", "src_ip": "120.238.245.132", "src_port": 60838, "dest_ip": "211.136.192.6", "dest_port": 53, "proto": "UDP", "app_proto": "dns", "direction": "CTS", "attacker_ip": "120.238.245.132", "victim_ip": "211.136.192.6", "rule_id": "0x20001e", "rule_name": "???????????DNS???????", "attack_type": "???????", "severity": "1", "bulletin": "??????????????????????????????????", "detail_info": "????????????????DNSLOG?????????", "vuln_type": "???????", "vuln_desc": "????????????????DNSLOG?????????", "vuln_harm": "????????????????DNSLOG?????????", "tags": "dnslog", "cnnvd_id": null, "cve_id": null, "killchain": "??????", "enable": "????", "attack_result": "???", "attack_method": "???", "site_app": null, "code_language": "???", "att_ck": "TA0002", "timestamp": "2026-01-12T14:37:53.588+0800", "custom": "{}", "feature_field": "", "feature_payload": "", "": null, "payload": "SQkBAAABAAAAAAAAB3BvbGxpbmcHb2FzdGlmeQNjb20AAAEAAQ==", "packet_size": 37, "pcap_file": ""} +2026-03-09 18:21:40.696 [log-processor-2] ERROR c.M.NormalData.LogNormalProcessor - OrginalColumnMap ȡΪ +2026-03-09 18:21:41.051 [log-processor-2] ERROR c.M.NormalData.LogNormalProcessor - OrginalColumnMap ȡΪ +2026-03-09 18:21:41.062 [log-processor-2] ERROR c.M.NormalData.LogNormalProcessor - OrginalColumnMap ȡΪ +2026-03-09 18:21:41.153 [log-processor-2] WARN c.c.service.LogDataFilterService - -ݹ˹ΪգĬϲ! +2026-03-09 18:21:41.611 [log-processor-2] ERROR c.c.service.LogDataFilterService - ˹ʧܻfilters_paramsΪ: null +2026-03-09 18:21:41.797 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO c.Modules.NormalData.SysLogProcessor - δɣ: 1 +2026-03-09 18:22:00.006 [scheduling-1] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:22:00.006 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:22:00.007 [log-processor-3] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:22:00.168 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ162ms +2026-03-09 18:22:00.168 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:22:00.168 +2026-03-09 18:22:00.168 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:22:00.168 +2026-03-09 18:22:00.236 [log-processor-3] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:22:00.238 [scheduling-1] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:22:00.602 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:22:00.602 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 434ms +2026-03-09 18:22:00.638 [scheduling-1] INFO c.c.service.AccessLogAlertService - ȡ 1 µ־ݣʱ䷶Χ: 2026-03-09T18:19:45.296 2026-03-09T18:22:00.238 +2026-03-09 18:22:00.638 [scheduling-1] INFO c.c.service.AccessLogAlertService - ʼ㷨: 㷨3 (ID: 2004083121877696514) +2026-03-09 18:22:00.720 [scheduling-1] INFO c.c.service.AccessLogAlertService - 㷨 㷨3 δ⵽澯 +2026-03-09 18:22:00.722 [scheduling-1] INFO c.c.service.AccessLogAlertService - ־澯ɣ´ν 2026-03-09T18:22:00.238 ʼ +2026-03-09 18:22:00.785 [log-processor-3] INFO c.c.service.AccessLogAlertService - ȡ 1 µ־ݣʱ䷶Χ: 2026-03-09T18:22:00.238 2026-03-09T18:22:00.236 +2026-03-09 18:22:00.785 [log-processor-3] INFO c.c.service.AccessLogAlertService - ʼ㷨: 㷨3 (ID: 2004083121877696514) +2026-03-09 18:22:01.137 [log-processor-3] INFO c.c.service.AccessLogAlertService - 㷨 㷨3 δ⵽澯 +2026-03-09 18:22:01.137 [log-processor-3] INFO c.c.service.AccessLogAlertService - ־澯ɣ´ν 2026-03-09T18:22:00.236 ʼ +2026-03-09 18:23:00.003 [scheduling-6] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:23:00.003 [log-processor-4] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:23:00.084 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:23:00.235 [scheduling-6] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:23:00.235 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ151ms +2026-03-09 18:23:00.235 [log-processor-4] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:23:00.235 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:23:00.235 +2026-03-09 18:23:00.236 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:23:00.236 +2026-03-09 18:23:00.444 [log-processor-4] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:23:00.452 [scheduling-6] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:23:00.684 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:23:00.684 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 448ms +2026-03-09 18:23:01.145 [scheduling-2] INFO c.c.s.RealtimeAnalysisScheduler - ִй: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, ruleName=澯--V2, nextTime=2026-03-05T19:12, now=2026-03-09T18:23:00.971 +2026-03-09 18:23:01.145 [scheduling-2] INFO c.c.s.impl.AnalysisRuleServiceImpl - ִʵʱ: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765 +2026-03-09 18:23:01.608 [scheduling-2] INFO c.c.s.impl.RealtimeAnalysisEngine - ڲѯΧ: ڴС=5mѯʱ䷶Χ=[2026-03-09 18:18:00, 2026-03-09 18:23:00] +2026-03-09 18:23:01.608 [scheduling-2] INFO c.c.s.impl.RealtimeAnalysisEngine - ʼִʵʱ: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, ruleName=澯--V2, batchNo=20260309182301302, windowType=tumble, dataStartTime=2026-03-09 18:18:00, dataEndTime=2026-03-09 18:23:00 +2026-03-09 18:23:03.009 [scheduling-2] INFO c.c.s.impl.RealtimeAnalysisEngine - ɵSQL: SELECT src_ip AS attack_ip, +dest_ip AS victim_ip, +origin_event_name AS alarm_name, +ARRAY_AGG(DISTINCT src_port) AS attack_port, +ARRAY_AGG(DISTINCT dest_port) AS victim_port, +MAX(event_level) AS alarm_level, +MODE() WITHIN GROUP (ORDER BY dest_domain) AS dns_info, +MODE() WITHIN GROUP (ORDER BY origin_event_type) AS alarm_type, +COUNT(dest_ip) AS log_count, +MAX(attack_result) AS attack_result, +ARRAY_AGG(DISTINCT http_req_header) AS http_req_header, +ARRAY_AGG(DISTINCT http_req_body) AS http_req_body, +ARRAY_AGG(DISTINCT http_resp_header) AS http_resp_header, +ARRAY_AGG(DISTINCT http_resp_body) AS http_resp_body, +ARRAY_AGG(DISTINCT http_url) AS victim_web_url, +ARRAY_AGG(DISTINCT id) AS origin_log_ids, +MIN(log_time) AS log_start_at, +MAX(log_time) AS log_end_at, +ARRAY_AGG(DISTINCT device_id) AS device_id, +ARRAY_AGG(DISTINCT payload) AS payload, +TUMBLE(log_time, INTERVAL '5 MINUTE') AS window_time +FROM syslog_normal_alarm AS t +WHERE log_time >= '2026-03-09 18:18:00' AND log_time < '2026-03-09 18:23:00' AND src_ip != '127.0.0.1' AND event_level >= 1 +GROUP BY src_ip, dest_ip, origin_event_name, TUMBLE(log_time, INTERVAL '5 MINUTE') -2026-01-10 13:34:22.153 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler 'quartzScheduler' initialized from an externally provided properties instance. -2026-01-10 13:34:22.153 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler version: 2.3.2 -2026-01-10 13:34:22.153 [main] INFO org.quartz.core.QuartzScheduler - JobFactory set to: org.springframework.scheduling.quartz.SpringBeanJobFactory@4c18b432 -2026-01-10 13:34:22.288 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 -2026-01-10 13:34:22.288 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 -2026-01-10 13:34:22.288 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1768023262287 -2026-01-10 13:34:22.300 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 -2026-01-10 13:34:22.300 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 -2026-01-10 13:34:22.300 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1768023262300 -2026-01-10 13:34:22.300 [main] INFO o.a.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8089"] -2026-01-10 13:34:22.314 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat started on port(s): 8089 (http) with context path '/xdrservice' -2026-01-10 13:34:22.315 [main] INFO o.s.s.quartz.SchedulerFactoryBean - Starting Quartz Scheduler now -2026-01-10 13:34:22.315 [main] INFO org.quartz.core.QuartzScheduler - Scheduler quartzScheduler_$_NON_CLUSTERED started. -2026-01-10 13:34:22.325 [main] INFO com.syslogApplication - Started syslogApplication in 13.911 seconds (JVM running for 17.788) -2026-01-10 13:34:24.501 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:24.501 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:24.502 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:24.502 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:26.667 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:26.667 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:26.667 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:26.668 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:28.841 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:28.841 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:28.841 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:28.841 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:31.078 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:31.079 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:31.131 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:31.131 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:33.568 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:33.568 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:33.652 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:33.652 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:36.327 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:36.327 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:36.596 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:36.596 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:39.418 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:39.418 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:39.545 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:39.545 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:42.599 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:42.599 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:42.716 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:42.716 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:45.607 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:45.607 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:45.917 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:45.917 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:48.857 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:48.857 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-2, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:34:48.933 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Connection to node -1 (/192.168.222.130:9092) could not be established. Broker may not be available. -2026-01-10 13:34:48.933 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN o.apache.kafka.clients.NetworkClient - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Bootstrap broker 192.168.222.130:9092 (id: -1 rack: null) disconnected -2026-01-10 13:37:22.456 [main] INFO com.syslogApplication - Starting syslogApplication using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 28304 (E:\GIT_GOSAME\haobang-security-xdr\syslog-consumer\target\classes started by chenc in E:\GIT_GOSAME\haobang-security-xdr) -2026-01-10 13:37:22.456 [background-preinit] INFO o.h.validator.internal.util.Version - HV000001: Hibernate Validator 6.2.5.Final -2026-01-10 13:37:22.462 [main] INFO com.syslogApplication - No active profile set, falling back to 1 default profile: "default" -2026-01-10 13:37:24.921 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:37:24.923 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Elasticsearch repositories in DEFAULT mode. -2026-01-10 13:37:25.427 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 500 ms. Found 1 Elasticsearch repository interfaces. -2026-01-10 13:37:25.433 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:37:25.434 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Reactive Elasticsearch repositories in DEFAULT mode. -2026-01-10 13:37:25.530 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Reactive Elasticsearch - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Reactive Elasticsearch repository, consider annotating your entities with one of these annotations: org.springframework.data.elasticsearch.annotations.Document (preferred), or consider extending one of the following types with your repository: org.springframework.data.elasticsearch.repository.ReactiveElasticsearchRepository -2026-01-10 13:37:25.530 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 96 ms. Found 0 Reactive Elasticsearch repository interfaces. -2026-01-10 13:37:25.543 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:37:25.544 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode. -2026-01-10 13:37:25.674 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository -2026-01-10 13:37:25.674 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 123 ms. Found 0 Redis repository interfaces. -2026-01-10 13:37:26.366 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8089 (http) -2026-01-10 13:37:26.375 [main] INFO o.a.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8089"] -2026-01-10 13:37:26.375 [main] INFO o.a.catalina.core.StandardService - Starting service [Tomcat] -2026-01-10 13:37:26.375 [main] INFO o.a.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.65] -2026-01-10 13:37:26.607 [main] INFO o.a.c.c.C.[.[.[/xdrservice] - Initializing Spring embedded WebApplicationContext -2026-01-10 13:37:26.607 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - Root WebApplicationContext: initialization completed in 4071 ms -2026-01-10 13:37:26.664 [main] INFO o.s.b.f.a.AutowiredAnnotationBeanPostProcessor - Autowired annotation is not supported on static fields: private static com.common.service.DmColumnService com.syslogApplication.dmColumnService -2026-01-10 13:37:29.183 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes -2026-01-10 13:37:29.471 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.insert] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Insert] -2026-01-10 13:37:29.485 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.update] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Update] -2026-01-10 13:37:29.515 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.deleteById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.DeleteById] -2026-01-10 13:37:29.519 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.selectById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.SelectById] -2026-01-10 13:37:29.559 [main] ERROR c.b.m.core.MybatisConfiguration - mapper[com.common.mapper.SecExceptionAlgorithmMapper.findById] is ignored, because it exists, maybe from xml file -2026-01-10 13:37:33.821 [main] INFO c.c.service.AccessLogAlertService - 初始化AccessLogAlertService,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:37:33.836 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting... -2026-01-10 13:37:34.436 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed. -2026-01-10 13:37:34.537 [main] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:37:35.447 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes -2026-01-10 13:37:35.554 [main] INFO com.common.util.MyBatisUtil - MyBatis 初始化成功 -2026-01-10 13:37:36.442 [main] INFO org.quartz.impl.StdSchedulerFactory - Using default implementation for ThreadExecutor -2026-01-10 13:37:36.455 [main] INFO o.quartz.core.SchedulerSignalerImpl - Initialized Scheduler Signaller of type: class org.quartz.core.SchedulerSignalerImpl -2026-01-10 13:37:36.455 [main] INFO org.quartz.core.QuartzScheduler - Quartz Scheduler v.2.3.2 created. -2026-01-10 13:37:36.456 [main] INFO org.quartz.simpl.RAMJobStore - RAMJobStore initialized. -2026-01-10 13:37:36.456 [main] INFO org.quartz.core.QuartzScheduler - Scheduler meta-data: Quartz Scheduler (v2.3.2) 'quartzScheduler' with instanceId 'NON_CLUSTERED' - Scheduler class: 'org.quartz.core.QuartzScheduler' - running locally. - NOT STARTED. - Currently in standby mode. - Number of jobs executed: 0 - Using thread pool 'org.quartz.simpl.SimpleThreadPool' - with 10 threads. - Using job-store 'org.quartz.simpl.RAMJobStore' - which does not support persistence. and is not clustered. +2026-03-09 18:23:03.655 [scheduling-2] INFO c.c.s.impl.RealtimeAnalysisEngine - ִгɹ: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, processedCount=1, alarmCount=1 +2026-03-09 18:23:03.970 [scheduling-2] INFO c.c.s.i.RuleExecutionTimeServiceImpl - ¹´ִʱ䣬ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, ruleName=澯--V2, windowType=tumble, nextExecuteTime=2026-03-09 18:28:00 +2026-03-09 18:23:03.970 [scheduling-2] INFO c.c.s.RealtimeAnalysisScheduler - εִй: 1, : 0 +2026-03-09 18:24:00.001 [scheduling-5] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:24:00.001 [log-processor-5] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:24:00.077 [scheduling-9] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:24:00.226 [scheduling-5] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:24:00.229 [scheduling-9] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ152ms +2026-03-09 18:24:00.229 [log-processor-5] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:24:00.229 [scheduling-9] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:24:00.229 +2026-03-09 18:24:00.229 [scheduling-9] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:24:00.229 +2026-03-09 18:24:00.419 [log-processor-5] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:24:00.423 [scheduling-5] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:24:00.673 [scheduling-9] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:24:00.673 [scheduling-9] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 444ms +2026-03-09 18:25:00.003 [scheduling-3] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:25:00.003 [log-processor-6] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:25:00.079 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:25:00.230 [log-processor-6] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:25:00.230 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ151ms +2026-03-09 18:25:00.230 [scheduling-3] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:25:00.230 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:25:00.230 +2026-03-09 18:25:00.230 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:25:00.230 +2026-03-09 18:25:00.420 [log-processor-6] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:25:00.420 [scheduling-3] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:25:00.667 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:25:00.667 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 437ms +2026-03-09 18:26:00.003 [scheduling-2] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:26:00.003 [scheduling-7] INFO com.common.schedule.ETLOrchestrator - ETLʼִУʼʱ䣺2026-03-09 18:20:00,ʱ䣺2026-03-09 18:25:00 +2026-03-09 18:26:00.003 [scheduling-7] INFO com.common.service.DataExtractor - ʼ澯ָʱ䷶Χݣʱ䷶Χ: 2026-03-09T18:20 - 2026-03-09T18:25 +2026-03-09 18:26:00.003 [log-processor-7] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:26:00.080 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:26:00.229 [log-processor-7] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:26:00.229 [scheduling-7] INFO com.common.service.DataExtractor - ָʱ䷶Χ: 1 +2026-03-09 18:26:00.229 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ149ms +2026-03-09 18:26:00.229 [scheduling-2] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:26:00.229 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:26:00.229 +2026-03-09 18:26:00.229 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:26:00.229 +2026-03-09 18:26:00.420 [scheduling-2] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:26:00.424 [log-processor-7] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:26:00.656 [scheduling-7] INFO com.common.service.DataLoader - 澯ɣɹ: 1 : 1 +2026-03-09 18:26:00.668 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:26:00.668 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 439ms +2026-03-09 18:26:00.737 [scheduling-7] INFO com.common.service.DataExtractor - ݴ: 1/1 (100.00%) +2026-03-09 18:26:00.737 [scheduling-7] INFO com.common.service.DataExtractor - ݴɣ 1 +2026-03-09 18:26:00.737 [scheduling-7] INFO com.common.schedule.ETLOrchestrator - ʱETLִɣʱ: 0 +2026-03-09 18:26:00.737 [scheduling-7] INFO c.c.s.NormalizeRuleHitTimeService - ʼִзʱʱ䣺2026-03-09T18:26:00.737 +2026-03-09 18:26:01.294 [scheduling-7] INFO c.c.s.NormalizeRuleHitTimeService - syslog_normal_data ͳƵ 1 м¼ +2026-03-09 18:26:01.294 [scheduling-7] INFO c.c.s.NormalizeRuleHitTimeService - syslog_normal_alarm ͳƵ 1 м¼ +2026-03-09 18:26:01.294 [scheduling-7] INFO c.c.s.NormalizeRuleHitTimeService - ϲҪµĹ2 +2026-03-09 18:26:01.444 [scheduling-7] INFO c.c.s.NormalizeRuleHitTimeService - ǰ״̬Ĺ173 +2026-03-09 18:26:01.444 [scheduling-7] INFO c.c.s.NormalizeRuleHitTimeService - ʼ£1731 +2026-03-09 18:26:01.761 [scheduling-7] INFO c.c.s.NormalizeRuleHitTimeService - ʱɣ¹2ʱ1024ms +2026-03-09 18:27:00.005 [scheduling-6] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:27:00.005 [log-processor-8] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:27:00.078 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:27:00.233 [scheduling-6] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:27:00.234 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ156ms +2026-03-09 18:27:00.234 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:27:00.234 +2026-03-09 18:27:00.234 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:27:00.234 +2026-03-09 18:27:00.480 [scheduling-6] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:27:00.494 [log-processor-8] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:27:00.704 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:27:00.704 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 470ms +2026-03-09 18:27:00.755 [log-processor-8] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:28:00.004 [scheduling-3] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:28:00.004 [log-processor-9] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:28:00.081 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:28:00.231 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ150ms +2026-03-09 18:28:00.231 [log-processor-9] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:28:00.231 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:28:00.231 +2026-03-09 18:28:00.231 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:28:00.231 +2026-03-09 18:28:00.231 [scheduling-3] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:28:00.429 [log-processor-9] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:28:00.529 [scheduling-3] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:28:00.637 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:28:00.638 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 406ms +2026-03-09 18:28:00.865 [scheduling-8] INFO c.c.s.RealtimeAnalysisScheduler - ִй: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, ruleName=澯--V2, nextTime=2026-03-09T18:28, now=2026-03-09T18:28:00.711 +2026-03-09 18:28:00.865 [scheduling-8] INFO c.c.s.impl.AnalysisRuleServiceImpl - ִʵʱ: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765 +2026-03-09 18:28:01.335 [scheduling-8] INFO c.c.s.impl.RealtimeAnalysisEngine - ڲѯΧ: ڴС=5mѯʱ䷶Χ=[2026-03-09 18:23:00, 2026-03-09 18:28:00] +2026-03-09 18:28:01.335 [scheduling-8] INFO c.c.s.impl.RealtimeAnalysisEngine - ʼִʵʱ: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, ruleName=澯--V2, batchNo=20260309182801024, windowType=tumble, dataStartTime=2026-03-09 18:23:00, dataEndTime=2026-03-09 18:28:00 +2026-03-09 18:28:02.580 [scheduling-8] INFO c.c.s.impl.RealtimeAnalysisEngine - ɵSQL: SELECT src_ip AS attack_ip, +dest_ip AS victim_ip, +origin_event_name AS alarm_name, +ARRAY_AGG(DISTINCT src_port) AS attack_port, +ARRAY_AGG(DISTINCT dest_port) AS victim_port, +MAX(event_level) AS alarm_level, +MODE() WITHIN GROUP (ORDER BY dest_domain) AS dns_info, +MODE() WITHIN GROUP (ORDER BY origin_event_type) AS alarm_type, +COUNT(dest_ip) AS log_count, +MAX(attack_result) AS attack_result, +ARRAY_AGG(DISTINCT http_req_header) AS http_req_header, +ARRAY_AGG(DISTINCT http_req_body) AS http_req_body, +ARRAY_AGG(DISTINCT http_resp_header) AS http_resp_header, +ARRAY_AGG(DISTINCT http_resp_body) AS http_resp_body, +ARRAY_AGG(DISTINCT http_url) AS victim_web_url, +ARRAY_AGG(DISTINCT id) AS origin_log_ids, +MIN(log_time) AS log_start_at, +MAX(log_time) AS log_end_at, +ARRAY_AGG(DISTINCT device_id) AS device_id, +ARRAY_AGG(DISTINCT payload) AS payload, +TUMBLE(log_time, INTERVAL '5 MINUTE') AS window_time +FROM syslog_normal_alarm AS t +WHERE log_time >= '2026-03-09 18:23:00' AND log_time < '2026-03-09 18:28:00' AND src_ip != '127.0.0.1' AND event_level >= 1 +GROUP BY src_ip, dest_ip, origin_event_name, TUMBLE(log_time, INTERVAL '5 MINUTE') -2026-01-10 13:37:36.456 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler 'quartzScheduler' initialized from an externally provided properties instance. -2026-01-10 13:37:36.456 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler version: 2.3.2 -2026-01-10 13:37:36.456 [main] INFO org.quartz.core.QuartzScheduler - JobFactory set to: org.springframework.scheduling.quartz.SpringBeanJobFactory@43f50bfe -2026-01-10 13:37:36.628 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 -2026-01-10 13:37:36.629 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 -2026-01-10 13:37:36.629 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1768023456627 -2026-01-10 13:37:36.645 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 -2026-01-10 13:37:36.647 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 -2026-01-10 13:37:36.647 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1768023456645 -2026-01-10 13:37:36.649 [main] INFO o.a.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8089"] -2026-01-10 13:37:36.662 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat started on port(s): 8089 (http) with context path '/xdrservice' -2026-01-10 13:37:36.662 [main] INFO o.s.s.quartz.SchedulerFactoryBean - Starting Quartz Scheduler now -2026-01-10 13:37:36.662 [main] INFO org.quartz.core.QuartzScheduler - Scheduler quartzScheduler_$_NON_CLUSTERED started. -2026-01-10 13:37:36.681 [main] INFO com.syslogApplication - Started syslogApplication in 14.581 seconds (JVM running for 19.802) -2026-01-10 13:37:37.161 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions assigned: [] -2026-01-10 13:37:37.188 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions assigned: [test-topic-0] -2026-01-10 13:38:00.019 [log-processor-1] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:38:00.019 [scheduling-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:38:00.108 [scheduling-1] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:38:00.010晚于默认保留时间2026-01-03T13:38:00.108,使用默认时间 -2026-01-10 13:38:00.194 [scheduling-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:38:00.198 [log-processor-1] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:38:00.202 [scheduling-1] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:38:00.108天前的日志,共删除0条 -2026-01-10 13:38:00.287 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:38:00.374 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:38:00.463 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:89ms -2026-01-10 13:38:00.463 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:38:00.463 -2026-01-10 13:38:00.469 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:38:00.469 -2026-01-10 13:38:00.637 [scheduling-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:38:00.639 [log-processor-1] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:38:00.974 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:38:00.975 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 510ms -2026-01-10 13:38:58.615 [http-nio-8089-exec-1] INFO o.s.web.servlet.DispatcherServlet - Initializing Servlet 'dispatcherServlet' -2026-01-10 13:38:58.616 [http-nio-8089-exec-1] INFO o.s.web.servlet.DispatcherServlet - Completed initialization in 1 ms -2026-01-10 13:38:58.863 [http-nio-8089-exec-1] INFO com.controllers.SyslogPushController - 收到syslog发送请求: SyslogRequest{ip='192.168.0.103', port=514, logContent='<0> 2026-01-10T05:28:32+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T05:28:32.806781+0800","flow_id":1671852309144385,"community_id":"uLeKRLkXu9m0D0DNn6wIg7CcdOs=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":51114,"dest_ip":"110.43.89.7","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3553898360,"tcp_ack_sequence":3537707565,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"rq.lbcct.cloud.duba.net","host_md5":"51cfa6d0981c8eb355a9b3af716da08d","uri":"/query?1767994112","uri_md5":"f28f2c62d0dd01c355caa05815d93d99","referer":"","method":"POST","protocol":"HTTP/1.1","status":200,"req_content_type":"application/x-www-form-urlencoded","request_headers":"host: rq.lbcct.cloud.duba.net\r\naccept: */*\r\ncontent-length: 85\r\ncontent-type: application/x-www-form-urlencoded\r\n","rsp_content_type":"text/plain","response_headers":"server: Tengine/1.5.2\r\ndate: Fri, 09 Jan 2026 21:28:32 GMT\r\ncontent-type: text/plain\r\nContent-Length: 54\r\nConnection: keep-alive\r\nContent-Tag: 1936292435\r\n"}', protocol='TCP', facility='USER', severity='INFO'} -2026-01-10 13:38:58.863 [http-nio-8089-exec-1] INFO com.common.service.SyslogService - 开始发送syslog消息: IP=192.168.0.103, Port=514 -2026-01-10 13:38:58.864 [http-nio-8089-exec-1] INFO com.common.service.SyslogService - TCP Syslog消息发送成功: 192.168.0.103:514 -2026-01-10 13:38:58.864 [http-nio-8089-exec-1] INFO com.controllers.SyslogPushController - Syslog消息发送成功: IP=192.168.0.103, Port=514 -2026-01-10 13:39:00.090 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:39:00.093 [scheduling-1] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:39:00.010晚于默认保留时间2026-01-03T13:39:00.093,使用默认时间 -2026-01-10 13:39:00.173 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:83ms -2026-01-10 13:39:00.173 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:39:00.173 -2026-01-10 13:39:00.173 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:39:00.173 -2026-01-10 13:39:00.178 [scheduling-1] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:39:00.093天前的日志,共删除0条 -2026-01-10 13:39:00.272 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:39:00.633 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:39:00.633 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 460ms -2026-01-10 13:40:00.010 [scheduling-5] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:40:00.011 [log-processor-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:40:00.092 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:40:00.010晚于默认保留时间2026-01-03T13:40:00.092,使用默认时间 -2026-01-10 13:40:00.095 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:40:00.173 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:40:00.092天前的日志,共删除0条 -2026-01-10 13:40:00.179 [log-processor-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:40:00.182 [scheduling-5] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:40:00.184 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:89ms -2026-01-10 13:40:00.185 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:40:00.185 -2026-01-10 13:40:00.185 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:40:00.185 -2026-01-10 13:40:00.256 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:40:00.300 [log-processor-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:40:00.307 [scheduling-5] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:40:00.709 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:40:00.710 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 525ms -2026-01-10 13:41:00.004 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 13:35:00,结束时间:2026-01-10 13:40:00 -2026-01-10 13:41:00.010 [scheduling-2] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T13:35 - 2026-01-10T13:40 -2026-01-10 13:41:00.086 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:41:00.003晚于默认保留时间2026-01-03T13:41:00.086,使用默认时间 -2026-01-10 13:41:00.089 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:41:00.168 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:41:00.086天前的日志,共删除0条 -2026-01-10 13:41:00.178 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:89ms -2026-01-10 13:41:00.178 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:41:00.178 -2026-01-10 13:41:00.178 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:41:00.178 -2026-01-10 13:41:00.184 [scheduling-2] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 13:41:00.184 [scheduling-2] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 13:41:00.185 [scheduling-2] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T13:35 - 2026-01-10T13:40 -2026-01-10 13:41:00.250 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:41:00.277 [scheduling-2] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 13:41:00.277 [scheduling-2] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 13:41:00.278 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 13:41:00.662 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:41:00.662 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 484ms -2026-01-10 13:42:00.010 [scheduling-4] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:42:00.010 [log-processor-3] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:42:00.092 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:42:00.095 [scheduling-8] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:42:00.010晚于默认保留时间2026-01-03T13:42:00.095,使用默认时间 -2026-01-10 13:42:00.175 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:83ms -2026-01-10 13:42:00.175 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:42:00.175 -2026-01-10 13:42:00.175 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:42:00.175 -2026-01-10 13:42:00.178 [log-processor-3] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:42:00.180 [scheduling-8] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:42:00.095天前的日志,共删除0条 -2026-01-10 13:42:00.181 [scheduling-4] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:42:00.263 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:42:00.316 [log-processor-3] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:42:00.320 [scheduling-4] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:42:00.629 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:42:00.629 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 454ms -2026-01-10 13:43:00.085 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:43:00.088 [scheduling-2] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:43:00.005晚于默认保留时间2026-01-03T13:43:00.088,使用默认时间 -2026-01-10 13:43:00.171 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:86ms -2026-01-10 13:43:00.172 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:43:00.171 -2026-01-10 13:43:00.172 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:43:00.172 -2026-01-10 13:43:00.173 [scheduling-2] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:43:00.088天前的日志,共删除0条 -2026-01-10 13:43:00.255 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:43:00.713 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:43:00.713 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 541ms -2026-01-10 13:44:00.012 [scheduling-6] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:44:00.012 [log-processor-4] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:44:00.089 [scheduling-7] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:44:00.012晚于默认保留时间2026-01-03T13:44:00.089,使用默认时间 -2026-01-10 13:44:00.097 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:44:00.176 [scheduling-7] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:44:00.089天前的日志,共删除0条 -2026-01-10 13:44:00.178 [log-processor-4] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:44:00.183 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:86ms -2026-01-10 13:44:00.183 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:44:00.183 -2026-01-10 13:44:00.183 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:44:00.183 -2026-01-10 13:44:00.183 [scheduling-6] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:44:00.257 [scheduling-7] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:44:00.315 [log-processor-4] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:44:00.321 [scheduling-6] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:44:00.716 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:44:00.716 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 533ms -2026-01-10 13:45:00.092 [scheduling-7] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:45:00.011晚于默认保留时间2026-01-03T13:45:00.092,使用默认时间 -2026-01-10 13:45:00.095 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:45:00.174 [scheduling-7] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:45:00.092天前的日志,共删除0条 -2026-01-10 13:45:00.183 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:88ms -2026-01-10 13:45:00.183 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:45:00.183 -2026-01-10 13:45:00.183 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:45:00.183 -2026-01-10 13:45:00.255 [scheduling-7] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:45:00.671 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:45:00.671 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 488ms -2026-01-10 13:46:00.014 [scheduling-7] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:46:00.014 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 13:40:00,结束时间:2026-01-10 13:45:00 -2026-01-10 13:46:00.014 [scheduling-1] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T13:40 - 2026-01-10T13:45 -2026-01-10 13:46:00.014 [log-processor-5] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:46:00.098 [scheduling-2] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:46:00.014晚于默认保留时间2026-01-03T13:46:00.098,使用默认时间 -2026-01-10 13:46:00.099 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:46:00.177 [scheduling-7] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:46:00.181 [scheduling-2] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:46:00.098天前的日志,共删除0条 -2026-01-10 13:46:00.191 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:92ms -2026-01-10 13:46:00.191 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:46:00.191 -2026-01-10 13:46:00.192 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:46:00.192 -2026-01-10 13:46:00.224 [scheduling-1] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 13:46:00.224 [scheduling-1] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 13:46:00.224 [scheduling-1] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T13:40 - 2026-01-10T13:45 -2026-01-10 13:46:00.263 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:46:00.312 [scheduling-1] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 13:46:00.312 [scheduling-1] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 13:46:00.313 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 13:46:00.519 [log-processor-5] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:46:00.614 [scheduling-7] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:46:00.736 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:46:00.736 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 545ms -2026-01-10 13:46:00.898 [log-processor-5] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:47:00.096 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:47:00.097 [scheduling-4] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:47:00.010晚于默认保留时间2026-01-03T13:47:00.097,使用默认时间 -2026-01-10 13:47:00.183 [scheduling-4] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:47:00.097天前的日志,共删除0条 -2026-01-10 13:47:00.183 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:87ms -2026-01-10 13:47:00.183 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:47:00.183 -2026-01-10 13:47:00.183 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:47:00.183 -2026-01-10 13:47:00.268 [scheduling-4] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:47:00.693 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:47:00.693 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 510ms -2026-01-10 13:48:00.010 [scheduling-10] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:48:00.010 [log-processor-6] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:48:00.092 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:48:00.010晚于默认保留时间2026-01-03T13:48:00.092,使用默认时间 -2026-01-10 13:48:00.096 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:48:00.174 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:48:00.092天前的日志,共删除0条 -2026-01-10 13:48:00.177 [log-processor-6] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:48:00.177 [scheduling-10] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:48:00.182 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:86ms -2026-01-10 13:48:00.182 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:48:00.182 -2026-01-10 13:48:00.182 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:48:00.182 -2026-01-10 13:48:00.254 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:48:00.325 [log-processor-6] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:48:00.692 [scheduling-10] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:48:00.702 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:48:00.703 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 521ms -2026-01-10 13:49:00.091 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:49:00.010晚于默认保留时间2026-01-03T13:49:00.091,使用默认时间 -2026-01-10 13:49:00.096 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:49:00.175 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:49:00.091天前的日志,共删除0条 -2026-01-10 13:49:00.183 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:87ms -2026-01-10 13:49:00.183 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:49:00.183 -2026-01-10 13:49:00.183 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:49:00.183 -2026-01-10 13:49:00.255 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:49:00.753 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:49:00.753 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 570ms -2026-01-10 13:50:00.012 [scheduling-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:50:00.013 [log-processor-7] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:50:00.093 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:50:00.105 [scheduling-1] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:50:00.012晚于默认保留时间2026-01-03T13:50:00.105,使用默认时间 -2026-01-10 13:50:00.176 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:83ms -2026-01-10 13:50:00.177 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:50:00.177 -2026-01-10 13:50:00.177 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:50:00.177 -2026-01-10 13:50:00.182 [log-processor-7] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:50:00.187 [scheduling-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:50:00.214 [scheduling-1] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:50:00.105天前的日志,共删除0条 -2026-01-10 13:50:00.297 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:50:00.317 [log-processor-7] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:50:00.394 [scheduling-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:50:00.683 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:50:00.683 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 506ms -2026-01-10 13:51:00.008 [scheduling-5] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 13:45:00,结束时间:2026-01-10 13:50:00 -2026-01-10 13:51:00.008 [scheduling-5] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T13:45 - 2026-01-10T13:50 -2026-01-10 13:51:00.093 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:51:00.094 [scheduling-4] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:51:00.008晚于默认保留时间2026-01-03T13:51:00.094,使用默认时间 -2026-01-10 13:51:00.168 [scheduling-5] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 13:51:00.168 [scheduling-5] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 13:51:00.168 [scheduling-5] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T13:45 - 2026-01-10T13:50 -2026-01-10 13:51:00.176 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:83ms -2026-01-10 13:51:00.176 [scheduling-4] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:51:00.094天前的日志,共删除0条 -2026-01-10 13:51:00.176 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:51:00.176 -2026-01-10 13:51:00.176 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:51:00.176 -2026-01-10 13:51:00.253 [scheduling-5] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 13:51:00.253 [scheduling-5] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 13:51:00.253 [scheduling-5] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 13:51:00.265 [scheduling-4] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:51:00.684 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:51:00.684 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 508ms -2026-01-10 13:52:00.009 [scheduling-9] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:52:00.010 [log-processor-8] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:52:00.094 [scheduling-8] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:52:00.009晚于默认保留时间2026-01-03T13:52:00.094,使用默认时间 -2026-01-10 13:52:00.095 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:52:00.175 [scheduling-9] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:52:00.176 [log-processor-8] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:52:00.179 [scheduling-8] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:52:00.094天前的日志,共删除0条 -2026-01-10 13:52:00.183 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:88ms -2026-01-10 13:52:00.183 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:52:00.183 -2026-01-10 13:52:00.183 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:52:00.183 -2026-01-10 13:52:00.263 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:52:00.308 [scheduling-9] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:52:00.393 [log-processor-8] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:52:00.663 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:52:00.663 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 480ms -2026-01-10 13:53:00.094 [scheduling-8] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:53:00.009晚于默认保留时间2026-01-03T13:53:00.094,使用默认时间 -2026-01-10 13:53:00.094 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:53:00.180 [scheduling-8] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:53:00.094天前的日志,共删除0条 -2026-01-10 13:53:00.184 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:90ms -2026-01-10 13:53:00.184 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:53:00.184 -2026-01-10 13:53:00.184 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:53:00.184 -2026-01-10 13:53:00.264 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:53:00.682 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:53:00.683 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 499ms -2026-01-10 13:53:58.080 [scheduling-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:53:58.080 [log-processor-9] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 13:53:58.163 [scheduling-1] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T13:53:58.080晚于默认保留时间2026-01-03T13:53:58.163,使用默认时间 -2026-01-10 13:53:58.164 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 13:53:58.243 [scheduling-1] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T13:53:58.163天前的日志,共删除0条 -2026-01-10 13:53:58.245 [scheduling-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:53:58.250 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:86ms -2026-01-10 13:53:58.250 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T13:53:58.250 -2026-01-10 13:53:58.251 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T13:53:58.251 -2026-01-10 13:53:58.251 [log-processor-9] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 13:53:58.324 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 13:53:58.492 [log-processor-9] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:53:58.697 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 13:53:58.697 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 446ms -2026-01-10 13:53:58.737 [scheduling-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 13:59:54.078 [HikariPool-1 housekeeper] WARN com.zaxxer.hikari.pool.HikariPool - HikariPool-1 - Thread starvation or clock leap detected (housekeeper delta=5m51s169ms755µs800ns). -2026-01-10 14:00:21.164 [scheduling-7] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:00:21.164 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 13:55:00,结束时间:2026-01-10 14:00:00 -2026-01-10 14:00:21.164 [scheduling-8] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T13:55 - 2026-01-10T14:00 -2026-01-10 14:00:21.164 [log-processor-10] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:00:26.187 [log-processor-10] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@7c2603ab (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:26.187 [scheduling-8] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@296964cd (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:26.187 [scheduling-7] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@5da1fd68 (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:26.187 [scheduling-4] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@7e34d60 (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:26.187 [scheduling-9] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@654d837b (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:31.199 [scheduling-9] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@a4c44d (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:31.199 [log-processor-10] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@6722cb96 (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:31.199 [scheduling-4] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@510720d2 (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:31.199 [scheduling-8] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@818833a (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:31.199 [scheduling-7] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@32621718 (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:00:31.284 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:00:21.164晚于默认保留时间2026-01-03T14:00:31.284,使用默认时间 -2026-01-10 14:00:31.285 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:00:31.368 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:00:31.284天前的日志,共删除0条 -2026-01-10 14:00:31.370 [scheduling-7] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:00:31.370 [scheduling-8] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:00:31.370 [scheduling-8] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:00:31.370 [log-processor-10] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:00:31.370 [scheduling-8] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T13:55 - 2026-01-10T14:00 -2026-01-10 14:00:31.373 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:88ms -2026-01-10 14:00:31.373 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:00:31.373 -2026-01-10 14:00:31.373 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:00:31.373 -2026-01-10 14:00:31.452 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:00:31.459 [scheduling-8] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:00:31.459 [scheduling-8] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:00:31.459 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 10 秒 -2026-01-10 14:00:31.790 [scheduling-7] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:00:31.795 [log-processor-10] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:00:31.905 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:00:31.905 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 532ms -2026-01-10 14:01:00.001 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 13:55:00,结束时间:2026-01-10 14:00:00 -2026-01-10 14:01:00.001 [scheduling-2] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T13:55 - 2026-01-10T14:00 -2026-01-10 14:01:00.087 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:01:00.001晚于默认保留时间2026-01-03T14:01:00.087,使用默认时间 -2026-01-10 14:01:00.102 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:01:00.172 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:01:00.087天前的日志,共删除0条 -2026-01-10 14:01:00.173 [scheduling-2] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:01:00.173 [scheduling-2] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:01:00.173 [scheduling-2] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T13:55 - 2026-01-10T14:00 -2026-01-10 14:01:00.191 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:89ms -2026-01-10 14:01:00.192 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:01:00.192 -2026-01-10 14:01:00.192 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:01:00.192 -2026-01-10 14:01:00.256 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:01:00.266 [scheduling-2] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:01:00.266 [scheduling-2] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:01:00.266 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:01:00.678 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:01:00.678 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 486ms -2026-01-10 14:02:00.009 [scheduling-6] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:02:00.010 [log-processor-1] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:02:00.095 [scheduling-1] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:02:00.009晚于默认保留时间2026-01-03T14:02:00.095,使用默认时间 -2026-01-10 14:02:00.095 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:02:00.180 [scheduling-1] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:02:00.095天前的日志,共删除0条 -2026-01-10 14:02:00.180 [log-processor-1] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:02:00.181 [scheduling-6] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:02:00.187 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:92ms -2026-01-10 14:02:00.187 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:02:00.187 -2026-01-10 14:02:00.187 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:02:00.187 -2026-01-10 14:02:00.265 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:02:00.666 [log-processor-1] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:02:00.666 [scheduling-6] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:02:00.715 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:02:00.715 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 528ms -2026-01-10 14:03:00.088 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:03:00.088 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:03:00.003晚于默认保留时间2026-01-03T14:03:00.088,使用默认时间 -2026-01-10 14:03:00.175 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:03:00.088天前的日志,共删除0条 -2026-01-10 14:03:00.176 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:88ms -2026-01-10 14:03:00.176 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:03:00.176 -2026-01-10 14:03:00.176 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:03:00.176 -2026-01-10 14:03:00.263 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:03:00.752 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:03:00.752 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 576ms -2026-01-10 14:04:00.003 [scheduling-8] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:04:00.003 [log-processor-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:04:00.093 [scheduling-5] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:04:00.003晚于默认保留时间2026-01-03T14:04:00.093,使用默认时间 -2026-01-10 14:04:00.174 [scheduling-8] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:04:00.174 [log-processor-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:04:00.180 [scheduling-5] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:04:00.093天前的日志,共删除0条 -2026-01-10 14:04:00.268 [scheduling-5] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:04:00.300 [log-processor-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:04:00.300 [scheduling-8] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:04:00.372 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:04:00.467 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:95ms -2026-01-10 14:04:00.468 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:04:00.468 -2026-01-10 14:04:00.468 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:04:00.468 -2026-01-10 14:04:00.952 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:04:00.952 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 484ms -2026-01-10 14:05:00.087 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:05:00.001晚于默认保留时间2026-01-03T14:05:00.087,使用默认时间 -2026-01-10 14:05:00.120 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:05:00.170 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:05:00.087天前的日志,共删除0条 -2026-01-10 14:05:00.211 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:91ms -2026-01-10 14:05:00.212 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:05:00.212 -2026-01-10 14:05:00.212 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:05:00.212 -2026-01-10 14:05:00.255 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:05:01.063 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:05:01.063 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 851ms -2026-01-10 14:06:00.004 [scheduling-9] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:06:00.004 [log-processor-3] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:06:00.004 [scheduling-5] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 14:00:00,结束时间:2026-01-10 14:05:00 -2026-01-10 14:06:00.004 [scheduling-5] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T14:00 - 2026-01-10T14:05 -2026-01-10 14:06:00.092 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:06:00.109 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:06:00.004晚于默认保留时间2026-01-03T14:06:00.109,使用默认时间 -2026-01-10 14:06:00.171 [scheduling-9] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:06:00.171 [log-processor-3] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:06:00.180 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:88ms -2026-01-10 14:06:00.180 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:06:00.180 -2026-01-10 14:06:00.180 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:06:00.180 -2026-01-10 14:06:00.208 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:06:00.109天前的日志,共删除0条 -2026-01-10 14:06:00.208 [scheduling-5] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:06:00.208 [scheduling-5] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:06:00.209 [scheduling-5] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T14:00 - 2026-01-10T14:05 -2026-01-10 14:06:00.293 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:06:00.293 [scheduling-5] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:06:00.293 [scheduling-5] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:06:00.297 [scheduling-5] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:06:00.315 [log-processor-3] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:06:00.316 [scheduling-9] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:06:00.674 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:06:00.674 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 494ms -2026-01-10 14:07:00.090 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:07:00.090 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:07:00.003晚于默认保留时间2026-01-03T14:07:00.090,使用默认时间 -2026-01-10 14:07:00.178 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:07:00.090天前的日志,共删除0条 -2026-01-10 14:07:00.178 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:88ms -2026-01-10 14:07:00.179 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:07:00.179 -2026-01-10 14:07:00.179 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:07:00.179 -2026-01-10 14:07:00.263 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:07:00.695 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:07:00.695 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 516ms -2026-01-10 14:08:00.002 [scheduling-3] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:08:00.002 [log-processor-4] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:08:00.096 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:08:00.096 [scheduling-4] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:08:00.002晚于默认保留时间2026-01-03T14:08:00.096,使用默认时间 -2026-01-10 14:08:00.170 [scheduling-3] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:08:00.170 [log-processor-4] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:08:00.181 [scheduling-4] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:08:00.096天前的日志,共删除0条 -2026-01-10 14:08:00.184 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:0,耗时:88ms -2026-01-10 14:08:00.185 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:08:00.185 -2026-01-10 14:08:00.185 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:08:00.185 -2026-01-10 14:08:00.271 [scheduling-4] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:08:00.310 [log-processor-4] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:08:00.311 [scheduling-3] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:08:00.636 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:08:00.636 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 451ms -2026-01-10 14:08:50.215 [http-nio-8089-exec-3] INFO com.controllers.SyslogPushController - 收到syslog发送请求: SyslogRequest{ip='192.168.0.103', port=514, logContent='<0> 2026-01-10T05:28:32+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T05:28:32.806781+0800","flow_id":1671852309144385,"community_id":"uLeKRLkXu9m0D0DNn6wIg7CcdOs=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":51114,"dest_ip":"110.43.89.7","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3553898360,"tcp_ack_sequence":3537707565,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"rq.lbcct.cloud.duba.net","host_md5":"51cfa6d0981c8eb355a9b3af716da08d","uri":"/query?1767994112","uri_md5":"f28f2c62d0dd01c355caa05815d93d99","referer":"","method":"POST","protocol":"HTTP/1.1","status":200,"req_content_type":"application/x-www-form-urlencoded","request_headers":"host: rq.lbcct.cloud.duba.net\r\naccept: */*\r\ncontent-length: 85\r\ncontent-type: application/x-www-form-urlencoded\r\n","rsp_content_type":"text/plain","response_headers":"server: Tengine/1.5.2\r\ndate: Fri, 09 Jan 2026 21:28:32 GMT\r\ncontent-type: text/plain\r\nContent-Length: 54\r\nConnection: keep-alive\r\nContent-Tag: 1936292435\r\n"}', protocol='TCP', facility='USER', severity='INFO'} -2026-01-10 14:08:50.215 [http-nio-8089-exec-3] INFO com.common.service.SyslogService - 开始发送syslog消息: IP=192.168.0.103, Port=514 -2026-01-10 14:08:50.219 [http-nio-8089-exec-3] INFO com.common.service.SyslogService - TCP Syslog消息发送成功: 192.168.0.103:514 -2026-01-10 14:08:50.219 [http-nio-8089-exec-3] INFO com.controllers.SyslogPushController - Syslog消息发送成功: IP=192.168.0.103, Port=514 -2026-01-10 14:08:56.198 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO c.Modules.NormalData.SysLogProcessor - 开始处理批次消息,数量: 1 -2026-01-10 14:08:56.198 [log-processor-5] INFO c.Modules.NormalData.SysLogProcessor - 收到syslogmessage:[receive_time=20260110140855438 device_id=248 device_name=开发环境设备-HOME vendor=HFish data_type=json device_collect_id=1]<0> 2026-01-10T05:28:32+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T05:28:32.806781+0800","flow_id":1671852309144385,"community_id":"uLeKRLkXu9m0D0DNn6wIg7CcdOs=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":51114,"dest_ip":"110.43.89.7","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3553898360,"tcp_ack_sequence":3537707565,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"rq.lbcct.cloud.duba.net","host_md5":"51cfa6d0981c8eb355a9b3af716da08d","uri":"/query?1767994112","uri_md5":"f28f2c62d0dd01c355caa05815d93d99","referer":"","method":"POST","protocol":"HTTP/1.1","status":200,"req_content_type":"application/x-www-form-urlencoded","request_headers":"host: rq.lbcct.cloud.duba.net\r\naccept: */*\r\ncontent-length: 85\r\ncontent-type: application/x-www-form-urlencoded\r\n","rsp_content_type":"text/plain","response_headers":"server: Tengine/1.5.2\r\ndate: Fri, 09 Jan 2026 21:28:32 GMT\r\ncontent-type: text/plain\r\nContent-Length: 54\r\nConnection: keep-alive\r\nContent-Tag: 1936292435\r\n"} -2026-01-10 14:09:00.088 [scheduling-4] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:09:00.002晚于默认保留时间2026-01-03T14:09:00.088,使用默认时间 -2026-01-10 14:09:00.107 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:09:00.178 [scheduling-4] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:09:00.088天前的日志,共删除0条 -2026-01-10 14:09:00.262 [scheduling-4] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:09:00.297 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:190ms -2026-01-10 14:09:00.297 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:09:00.297 -2026-01-10 14:09:00.297 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:09:00.297 -2026-01-10 14:09:00.740 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:09:00.740 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 443ms -2026-01-10 14:09:01.651 [log-processor-5] WARN c.c.service.LogDataFilterService - 泛化规则-数据过滤规则为空,默认不处理! -2026-01-10 14:10:19.341 [scheduling-6] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:10:19.341 [HikariPool-1 housekeeper] WARN com.zaxxer.hikari.pool.HikariPool - HikariPool-1 - Thread starvation or clock leap detected (housekeeper delta=1m25s115ms628µs300ns). -2026-01-10 14:10:19.341 [log-processor-6] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:10:19.346 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] WARN c.Modules.NormalData.SysLogProcessor - 批次处理超时,已处理: 0/1 -2026-01-10 14:10:20.390 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:10:19.341晚于默认保留时间2026-01-03T14:10:20.390,使用默认时间 -2026-01-10 14:10:20.390 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:10:20.562 [scheduling-6] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:10:20.562 [log-processor-6] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:10:20.688 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:10:20.390天前的日志,共删除0条 -2026-01-10 14:10:37.594 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:10:37.594 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:17204ms -2026-01-10 14:10:37.594 [log-processor-6] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:10:37.594 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:10:37.594 -2026-01-10 14:10:37.595 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:10:37.595 -2026-01-10 14:10:40.009 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions assigned: [test-topic-0] -2026-01-10 14:10:40.017 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] ERROR o.a.k.c.c.i.ConsumerCoordinator - [Consumer clientId=consumer-test-group-app-1, groupId=test-group-app] Offset commit failed on partition test-topic-0 at offset 477: The coordinator is not aware of this member. -2026-01-10 14:10:40.021 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] ERROR o.s.k.l.KafkaMessageListenerContainer - Consumer exception -java.lang.IllegalStateException: This error handler cannot process 'org.apache.kafka.clients.consumer.CommitFailedException's; no record information is available - at org.springframework.kafka.listener.DefaultErrorHandler.handleOtherException(DefaultErrorHandler.java:157) - at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.handleConsumerException(KafkaMessageListenerContainer.java:1812) - at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1301) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) - at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266) - at java.util.concurrent.FutureTask.run(FutureTask.java) - at java.lang.Thread.run(Thread.java:745) -Caused by: org.apache.kafka.clients.consumer.CommitFailedException: Commit cannot be completed since the group has already rebalanced and assigned the partitions to another member. This means that the time between subsequent calls to poll() was longer than the configured max.poll.interval.ms, which typically implies that the poll loop is spending too much time message processing. You can address this either by increasing max.poll.interval.ms or by reducing the maximum size of batches returned in poll() with max.poll.records. - at org.apache.kafka.clients.consumer.internals.ConsumerCoordinator$OffsetCommitResponseHandler.handle(ConsumerCoordinator.java:1441) - at org.apache.kafka.clients.consumer.internals.ConsumerCoordinator$OffsetCommitResponseHandler.handle(ConsumerCoordinator.java:1341) - at org.apache.kafka.clients.consumer.internals.AbstractCoordinator$CoordinatorResponseHandler.onSuccess(AbstractCoordinator.java:1260) - at org.apache.kafka.clients.consumer.internals.AbstractCoordinator$CoordinatorResponseHandler.onSuccess(AbstractCoordinator.java:1235) - at org.apache.kafka.clients.consumer.internals.RequestFuture$1.onSuccess(RequestFuture.java:206) - at org.apache.kafka.clients.consumer.internals.RequestFuture.fireSuccess(RequestFuture.java:169) - at org.apache.kafka.clients.consumer.internals.RequestFuture.complete(RequestFuture.java:129) - at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient$RequestFutureCompletionHandler.fireCompletion(ConsumerNetworkClient.java:617) - at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.firePendingCompletedRequests(ConsumerNetworkClient.java:427) - at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:312) - at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:230) - at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:214) - at org.apache.kafka.clients.consumer.internals.ConsumerCoordinator.commitOffsetsSync(ConsumerCoordinator.java:1174) - at org.apache.kafka.clients.consumer.KafkaConsumer.commitSync(KafkaConsumer.java:1502) - at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doCommitSync(KafkaMessageListenerContainer.java:3062) - at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.commitSync(KafkaMessageListenerContainer.java:3057) - at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.commitIfNecessary(KafkaMessageListenerContainer.java:3043) - at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.processCommits(KafkaMessageListenerContainer.java:2835) - at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1329) - at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1255) - ... 4 common frames omitted -2026-01-10 14:10:40.021 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions lost: [test-topic-0] -2026-01-10 14:10:40.033 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions revoked: [test-topic-0] -2026-01-10 14:10:40.078 [scheduling-6] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T13:35:33.821 -2026-01-10 14:10:40.486 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:10:40.487 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 2893ms -2026-01-10 14:10:40.514 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] INFO c.Modules.NormalData.SysLogProcessor - 开始处理批次消息,数量: 1 -2026-01-10 14:10:40.514 [log-processor-7] INFO c.Modules.NormalData.SysLogProcessor - 收到syslogmessage:[receive_time=20260110140855438 device_id=248 device_name=开发环境设备-HOME vendor=HFish data_type=json device_collect_id=1]<0> 2026-01-10T05:28:32+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T05:28:32.806781+0800","flow_id":1671852309144385,"community_id":"uLeKRLkXu9m0D0DNn6wIg7CcdOs=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":51114,"dest_ip":"110.43.89.7","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3553898360,"tcp_ack_sequence":3537707565,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"rq.lbcct.cloud.duba.net","host_md5":"51cfa6d0981c8eb355a9b3af716da08d","uri":"/query?1767994112","uri_md5":"f28f2c62d0dd01c355caa05815d93d99","referer":"","method":"POST","protocol":"HTTP/1.1","status":200,"req_content_type":"application/x-www-form-urlencoded","request_headers":"host: rq.lbcct.cloud.duba.net\r\naccept: */*\r\ncontent-length: 85\r\ncontent-type: application/x-www-form-urlencoded\r\n","rsp_content_type":"text/plain","response_headers":"server: Tengine/1.5.2\r\ndate: Fri, 09 Jan 2026 21:28:32 GMT\r\ncontent-type: text/plain\r\nContent-Length: 54\r\nConnection: keep-alive\r\nContent-Tag: 1936292435\r\n"} -2026-01-10 14:10:40.565 [log-processor-7] WARN c.c.service.LogDataFilterService - 泛化规则-数据过滤规则为空,默认不处理! -2026-01-10 14:10:55.526 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] INFO c.Modules.NormalData.SysLogProcessor - 批次处理完成,总数: 1 -2026-01-10 14:10:55.549 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions revoked: [test-topic-0] -2026-01-10 14:10:55.564 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions assigned: [] -2026-01-10 14:10:55.570 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions assigned: [test-topic-0] -2026-01-10 14:11:00.002 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 14:05:00,结束时间:2026-01-10 14:10:00 -2026-01-10 14:11:00.002 [scheduling-8] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T14:05 - 2026-01-10T14:10 -2026-01-10 14:11:00.085 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:11:00.002晚于默认保留时间2026-01-03T14:11:00.085,使用默认时间 -2026-01-10 14:11:00.088 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:11:00.171 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:11:00.085天前的日志,共删除0条 -2026-01-10 14:11:00.171 [scheduling-8] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:11:00.171 [scheduling-8] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:11:00.171 [scheduling-8] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T14:05 - 2026-01-10T14:10 -2026-01-10 14:11:00.256 [scheduling-8] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:11:00.256 [scheduling-8] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:11:00.256 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:11:00.575 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:487ms -2026-01-10 14:11:00.575 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:11:00.575 -2026-01-10 14:11:00.575 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:11:00.575 -2026-01-10 14:11:00.847 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:11:01.055 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:11:01.055 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 480ms -2026-01-10 14:12:00.012 [scheduling-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:12:00.012 [log-processor-8] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:12:00.096 [scheduling-7] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:12:00.011晚于默认保留时间2026-01-03T14:12:00.096,使用默认时间 -2026-01-10 14:12:00.133 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:12:00.180 [scheduling-7] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:12:00.096天前的日志,共删除0条 -2026-01-10 14:12:00.180 [scheduling-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:12:00.183 [log-processor-8] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:12:00.263 [scheduling-7] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:12:00.295 [scheduling-2] INFO c.c.service.AccessLogAlertService - 获取到 2 条新的日志数据,时间范围: 2026-01-10T13:35:33.821 到 2026-01-10T14:12:00.180 -2026-01-10 14:12:00.297 [scheduling-2] INFO c.c.service.AccessLogAlertService - 开始处理算法: 测试算法3 (ID: 2004083121877696514) -2026-01-10 14:12:00.307 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:174ms -2026-01-10 14:12:00.307 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:12:00.307 -2026-01-10 14:12:00.307 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:12:00.307 -2026-01-10 14:12:00.625 [log-processor-8] INFO c.c.service.AccessLogAlertService - 获取到 2 条新的日志数据,时间范围: 2026-01-10T13:35:33.821 到 2026-01-10T14:12:00.183 -2026-01-10 14:12:00.625 [log-processor-8] INFO c.c.service.AccessLogAlertService - 开始处理算法: 测试算法3 (ID: 2004083121877696514) -2026-01-10 14:12:00.784 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:12:00.784 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 477ms -2026-01-10 14:12:02.417 [scheduling-2] ERROR c.c.service.AccessLogAlertService - 调用算法API异常 [URL: http://192.168.4.33:5001/Webshell]: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect -org.springframework.web.client.ResourceAccessException: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:785) - at org.springframework.web.client.RestTemplate.execute(RestTemplate.java:711) - at org.springframework.web.client.RestTemplate.exchange(RestTemplate.java:602) - at com.common.service.AccessLogAlertService.callAlgorithmApi(AccessLogAlertService.java:275) - at com.common.service.AccessLogAlertService.processAlgorithm(AccessLogAlertService.java:153) - at com.common.service.AccessLogAlertService.processAccessLogAlert(AccessLogAlertService.java:122) - at com.common.service.AccessLogAlertService.safeProcessTask(AccessLogAlertService.java:387) - at com.common.service.AccessLogAlertService$$FastClassBySpringCGLIB$$4807ae0a.invoke() - at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) - at org.springframework.aop.framework.CglibAopProxy.invokeMethod(CglibAopProxy.java:386) - at org.springframework.aop.framework.CglibAopProxy.access$000(CglibAopProxy.java:85) - at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:704) - at com.common.service.AccessLogAlertService$$EnhancerBySpringCGLIB$$a38bc9af.safeProcessTask() - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:498) - at org.springframework.scheduling.support.ScheduledMethodRunnable.run(ScheduledMethodRunnable.java:84) - at org.springframework.scheduling.support.DelegatingErrorHandlingRunnable.run(DelegatingErrorHandlingRunnable.java:54) - at org.springframework.scheduling.concurrent.ReschedulingRunnable.run(ReschedulingRunnable.java:95) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) - at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266) - at java.util.concurrent.FutureTask.run(FutureTask.java) - at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180) - at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.net.ConnectException: Connection refused: connect - at java.net.DualStackPlainSocketImpl.waitForConnect(Native Method) - at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:85) - at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) - at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) - at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) - at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) - at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) - at java.net.Socket.connect(Socket.java:589) - at sun.net.NetworkClient.doConnect(NetworkClient.java:175) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) - at sun.net.www.http.HttpClient.(HttpClient.java:211) - at sun.net.www.http.HttpClient.New(HttpClient.java:308) - at sun.net.www.http.HttpClient.New(HttpClient.java:326) - at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1202) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1138) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1032) - at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:966) - at org.springframework.http.client.SimpleBufferingClientHttpRequest.executeInternal(SimpleBufferingClientHttpRequest.java:76) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:109) - at com.config.RestTemplateConfig$LoggingInterceptor.intercept(RestTemplateConfig.java:62) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:93) - at org.springframework.http.client.InterceptingClientHttpRequest.executeInternal(InterceptingClientHttpRequest.java:77) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:776) - ... 27 common frames omitted -2026-01-10 14:12:02.421 [scheduling-2] ERROR c.c.service.AccessLogAlertService - 调用算法API失败: http://192.168.4.33:5001/Webshell - 无响应 -2026-01-10 14:12:02.421 [scheduling-2] INFO c.c.service.AccessLogAlertService - 访问日志告警处理任务完成,下次将从 2026-01-10T14:12:00.180 开始处理 -2026-01-10 14:12:02.728 [log-processor-8] ERROR c.c.service.AccessLogAlertService - 调用算法API异常 [URL: http://192.168.4.33:5001/Webshell]: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect -org.springframework.web.client.ResourceAccessException: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:785) - at org.springframework.web.client.RestTemplate.execute(RestTemplate.java:711) - at org.springframework.web.client.RestTemplate.exchange(RestTemplate.java:602) - at com.common.service.AccessLogAlertService.callAlgorithmApi(AccessLogAlertService.java:275) - at com.common.service.AccessLogAlertService.processAlgorithm(AccessLogAlertService.java:153) - at com.common.service.AccessLogAlertService.processAccessLogAlert(AccessLogAlertService.java:122) - at com.common.service.AccessLogAlertService$$FastClassBySpringCGLIB$$4807ae0a.invoke() - at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) - at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:793) - at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) - at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:763) - at org.springframework.aop.interceptor.AsyncExecutionInterceptor.lambda$invoke$0(AsyncExecutionInterceptor.java:115) - at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266) - at java.util.concurrent.FutureTask.run(FutureTask.java) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.net.ConnectException: Connection refused: connect - at java.net.DualStackPlainSocketImpl.waitForConnect(Native Method) - at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:85) - at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) - at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) - at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) - at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) - at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) - at java.net.Socket.connect(Socket.java:589) - at sun.net.NetworkClient.doConnect(NetworkClient.java:175) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) - at sun.net.www.http.HttpClient.(HttpClient.java:211) - at sun.net.www.http.HttpClient.New(HttpClient.java:308) - at sun.net.www.http.HttpClient.New(HttpClient.java:326) - at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1202) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1138) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1032) - at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:966) - at org.springframework.http.client.SimpleBufferingClientHttpRequest.executeInternal(SimpleBufferingClientHttpRequest.java:76) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:109) - at com.config.RestTemplateConfig$LoggingInterceptor.intercept(RestTemplateConfig.java:62) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:93) - at org.springframework.http.client.InterceptingClientHttpRequest.executeInternal(InterceptingClientHttpRequest.java:77) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:776) - ... 16 common frames omitted -2026-01-10 14:12:02.744 [log-processor-8] ERROR c.c.service.AccessLogAlertService - 调用算法API失败: http://192.168.4.33:5001/Webshell - 无响应 -2026-01-10 14:12:02.744 [log-processor-8] INFO c.c.service.AccessLogAlertService - 访问日志告警处理任务完成,下次将从 2026-01-10T14:12:00.183 开始处理 -2026-01-10 14:13:00.092 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:13:00.097 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:13:00.007晚于默认保留时间2026-01-03T14:13:00.097,使用默认时间 -2026-01-10 14:13:00.183 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:13:00.097天前的日志,共删除0条 -2026-01-10 14:13:00.267 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:176ms -2026-01-10 14:13:00.267 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:13:00.267 -2026-01-10 14:13:00.267 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:13:00.267 -2026-01-10 14:13:00.268 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:13:00.753 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:13:00.753 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 486ms -2026-01-10 14:14:00.005 [scheduling-5] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:14:00.005 [log-processor-9] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:14:00.092 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:14:00.094 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:14:00.005晚于默认保留时间2026-01-03T14:14:00.094,使用默认时间 -2026-01-10 14:14:00.179 [log-processor-9] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:14:00.179 [scheduling-5] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:14:00.181 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:14:00.094天前的日志,共删除0条 -2026-01-10 14:14:00.264 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:172ms -2026-01-10 14:14:00.265 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:14:00.265 -2026-01-10 14:14:00.265 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:14:00.265 -2026-01-10 14:14:00.265 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:14:00.415 [scheduling-5] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:14:00.627 [log-processor-9] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:14:00.779 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:14:00.779 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 514ms -2026-01-10 14:15:00.094 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:15:00.095 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:15:00.009晚于默认保留时间2026-01-03T14:15:00.095,使用默认时间 -2026-01-10 14:15:00.181 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:15:00.095天前的日志,共删除0条 -2026-01-10 14:15:00.261 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:167ms -2026-01-10 14:15:00.261 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:15:00.261 -2026-01-10 14:15:00.261 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:15:00.261 -2026-01-10 14:15:00.267 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:15:01.087 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:15:01.087 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 826ms -2026-01-10 14:16:00.010 [scheduling-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:16:00.011 [log-processor-10] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:16:00.011 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 14:10:00,结束时间:2026-01-10 14:15:00 -2026-01-10 14:16:00.011 [scheduling-6] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T14:10 - 2026-01-10T14:15 -2026-01-10 14:16:00.095 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:16:00.095 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:16:00.010晚于默认保留时间2026-01-03T14:16:00.095,使用默认时间 -2026-01-10 14:16:00.180 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:16:00.095天前的日志,共删除0条 -2026-01-10 14:16:00.181 [scheduling-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:16:00.181 [log-processor-10] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:16:00.263 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:16:00.264 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:169ms -2026-01-10 14:16:00.264 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:16:00.264 -2026-01-10 14:16:00.264 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:16:00.264 -2026-01-10 14:16:00.320 [log-processor-10] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:16:00.320 [scheduling-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:16:00.477 [scheduling-6] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:16:00.478 [scheduling-6] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:16:00.478 [scheduling-6] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T14:10 - 2026-01-10T14:15 -2026-01-10 14:16:00.564 [scheduling-6] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:16:00.564 [scheduling-6] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:16:00.564 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:16:00.740 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:16:00.740 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 476ms -2026-01-10 14:17:00.091 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:17:00.006晚于默认保留时间2026-01-03T14:17:00.091,使用默认时间 -2026-01-10 14:17:00.091 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:17:00.175 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:17:00.091天前的日志,共删除0条 -2026-01-10 14:17:00.261 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:17:00.267 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:176ms -2026-01-10 14:17:00.267 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:17:00.267 -2026-01-10 14:17:00.267 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:17:00.267 -2026-01-10 14:17:00.725 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:17:00.726 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 459ms -2026-01-10 14:18:00.002 [scheduling-1] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:18:00.002 [log-processor-1] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:18:00.087 [scheduling-5] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:18:00.002晚于默认保留时间2026-01-03T14:18:00.087,使用默认时间 -2026-01-10 14:18:00.172 [scheduling-5] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:18:00.087天前的日志,共删除0条 -2026-01-10 14:18:00.175 [log-processor-1] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:18:00.256 [scheduling-5] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:18:00.328 [log-processor-1] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:18:00.382 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:18:00.468 [scheduling-1] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:18:00.555 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:173ms -2026-01-10 14:18:00.555 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:18:00.555 -2026-01-10 14:18:00.555 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:18:00.555 -2026-01-10 14:18:00.625 [scheduling-1] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:18:01.081 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:18:01.081 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 526ms -2026-01-10 14:19:00.096 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:19:00.097 [scheduling-5] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:19:00.013晚于默认保留时间2026-01-03T14:19:00.097,使用默认时间 -2026-01-10 14:19:00.186 [scheduling-5] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:19:00.097天前的日志,共删除0条 -2026-01-10 14:19:00.271 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:175ms -2026-01-10 14:19:00.271 [scheduling-5] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:19:00.271 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:19:00.271 -2026-01-10 14:19:00.271 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:19:00.271 -2026-01-10 14:19:00.757 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:19:00.757 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 486ms -2026-01-10 14:20:00.000 [scheduling-4] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:20:00.016 [log-processor-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:20:00.084 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:20:00.101 [scheduling-3] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:20:00.016晚于默认保留时间2026-01-03T14:20:00.101,使用默认时间 -2026-01-10 14:20:00.174 [scheduling-4] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:20:00.184 [log-processor-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:20:00.189 [scheduling-3] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:20:00.101天前的日志,共删除0条 -2026-01-10 14:20:00.254 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:170ms -2026-01-10 14:20:00.254 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:20:00.254 -2026-01-10 14:20:00.254 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:20:00.254 -2026-01-10 14:20:00.276 [scheduling-3] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:20:00.418 [scheduling-4] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:20:00.421 [log-processor-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:20:00.723 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:20:00.723 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 469ms -2026-01-10 14:21:00.002 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 14:15:00,结束时间:2026-01-10 14:20:00 -2026-01-10 14:21:00.003 [scheduling-6] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T14:15 - 2026-01-10T14:20 -2026-01-10 14:21:00.089 [scheduling-1] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:21:00.002晚于默认保留时间2026-01-03T14:21:00.089,使用默认时间 -2026-01-10 14:21:00.103 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:21:00.174 [scheduling-6] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:21:00.174 [scheduling-6] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:21:00.174 [scheduling-6] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T14:15 - 2026-01-10T14:20 -2026-01-10 14:21:00.174 [scheduling-1] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:21:00.089天前的日志,共删除0条 -2026-01-10 14:21:00.261 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:21:00.262 [scheduling-6] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:21:00.262 [scheduling-6] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:21:00.262 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:21:00.281 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:178ms -2026-01-10 14:21:00.281 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:21:00.281 -2026-01-10 14:21:00.281 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:21:00.281 -2026-01-10 14:21:00.766 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:21:00.766 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 485ms -2026-01-10 14:22:00.014 [scheduling-9] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:22:00.014 [log-processor-3] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:22:00.097 [scheduling-5] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:22:00.014晚于默认保留时间2026-01-03T14:22:00.097,使用默认时间 -2026-01-10 14:22:00.097 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:22:00.182 [log-processor-3] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:22:00.182 [scheduling-5] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:22:00.097天前的日志,共删除0条 -2026-01-10 14:22:00.265 [scheduling-5] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:22:00.277 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:180ms -2026-01-10 14:22:00.277 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:22:00.277 -2026-01-10 14:22:00.277 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:22:00.277 -2026-01-10 14:22:00.418 [log-processor-3] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:22:00.766 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:22:00.766 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 489ms -2026-01-10 14:22:00.771 [scheduling-9] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:22:00.890 [scheduling-9] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:23:00.088 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:23:00.089 [scheduling-2] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:23:00.004晚于默认保留时间2026-01-03T14:23:00.089,使用默认时间 -2026-01-10 14:23:00.176 [scheduling-2] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:23:00.089天前的日志,共删除0条 -2026-01-10 14:23:00.258 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:170ms -2026-01-10 14:23:00.258 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:23:00.258 -2026-01-10 14:23:00.258 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:23:00.258 -2026-01-10 14:23:00.261 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:23:01.079 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:23:01.079 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 821ms -2026-01-10 14:24:00.008 [log-processor-4] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:24:00.008 [scheduling-3] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:24:00.090 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:24:00.090 [scheduling-1] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:24:00.008晚于默认保留时间2026-01-03T14:24:00.090,使用默认时间 -2026-01-10 14:24:00.175 [log-processor-4] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:24:00.178 [scheduling-1] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:24:00.090天前的日志,共删除0条 -2026-01-10 14:24:00.263 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:173ms -2026-01-10 14:24:00.263 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:24:00.263 -2026-01-10 14:24:00.263 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:24:00.263 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:24:00.263 -2026-01-10 14:24:00.406 [log-processor-4] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:24:00.468 [scheduling-3] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:24:00.703 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:24:00.703 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 440ms -2026-01-10 14:24:00.714 [scheduling-3] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:12:00.183 -2026-01-10 14:25:00.094 [scheduling-6] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:25:00.010晚于默认保留时间2026-01-03T14:25:00.094,使用默认时间 -2026-01-10 14:25:00.096 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:25:00.179 [scheduling-6] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:25:00.094天前的日志,共删除0条 -2026-01-10 14:25:00.262 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:25:00.273 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:177ms -2026-01-10 14:25:00.273 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:25:00.273 -2026-01-10 14:25:00.273 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:25:00.273 -2026-01-10 14:25:00.829 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:25:00.829 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 556ms -2026-01-10 14:25:15.949 [http-nio-8089-exec-5] INFO com.controllers.SyslogPushController - 收到syslog发送请求: SyslogRequest{ip='192.168.0.103', port=514, logContent='<0> 2026-01-10T13:47:27+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T13:47:27.249503+0800","flow_id":767115114538067,"community_id":"fFU2gDB2+pyUS6xQpAqqLdPLG4k=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"192.168.2.81","src_port":51018,"dest_ip":"120.241.131.42","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":423808413,"tcp_ack_sequence":3371175627,"ether":{},"host":"szextshort.weixin.qq.com","host_md5":"d7745538302ebc766b77ca8a4f3dd735","uri":"/mmtls/1abfe317","uri_md5":"e889825636e4d22b1d364b6bd6400ad5","agent":"MicroMessenger Client","referer":"","method":"POST","protocol":"HTTP/1.1","req_content_type":"application/octet-stream","request_headers":"accept: */*\r\ncache-control: no-cache\r\nconnection: Keep-Alive\r\ncontent-length: 2579\r\ncontent-type: application/octet-stream\r\nHost: szextshort.weixin.qq.com\r\nUpgrade: mmtls\r\nUser-Agent: MicroMessenger Client\r\n","rsp_content_type":"","response_headers":""}', protocol='TCP', facility='USER', severity='INFO'} -2026-01-10 14:25:15.949 [http-nio-8089-exec-5] INFO com.common.service.SyslogService - 开始发送syslog消息: IP=192.168.0.103, Port=514 -2026-01-10 14:25:15.950 [http-nio-8089-exec-5] INFO com.common.service.SyslogService - TCP Syslog消息发送成功: 192.168.0.103:514 -2026-01-10 14:25:15.950 [http-nio-8089-exec-5] INFO com.controllers.SyslogPushController - Syslog消息发送成功: IP=192.168.0.103, Port=514 -2026-01-10 14:25:16.513 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO c.Modules.NormalData.SysLogProcessor - 开始处理批次消息,数量: 1 -2026-01-10 14:25:16.513 [log-processor-6] INFO c.Modules.NormalData.SysLogProcessor - 收到syslogmessage:[receive_time=20260110142515957 device_id=248 device_name=开发环境设备-HOME vendor=HFish data_type=json device_collect_id=1]<0> 2026-01-10T13:47:27+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T13:47:27.249503+0800","flow_id":767115114538067,"community_id":"fFU2gDB2+pyUS6xQpAqqLdPLG4k=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"192.168.2.81","src_port":51018,"dest_ip":"120.241.131.42","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":423808413,"tcp_ack_sequence":3371175627,"ether":{},"host":"szextshort.weixin.qq.com","host_md5":"d7745538302ebc766b77ca8a4f3dd735","uri":"/mmtls/1abfe317","uri_md5":"e889825636e4d22b1d364b6bd6400ad5","agent":"MicroMessenger Client","referer":"","method":"POST","protocol":"HTTP/1.1","req_content_type":"application/octet-stream","request_headers":"accept: */*\r\ncache-control: no-cache\r\nconnection: Keep-Alive\r\ncontent-length: 2579\r\ncontent-type: application/octet-stream\r\nHost: szextshort.weixin.qq.com\r\nUpgrade: mmtls\r\nUser-Agent: MicroMessenger Client\r\n","rsp_content_type":"","response_headers":""} -2026-01-10 14:25:16.554 [log-processor-6] WARN c.c.service.LogDataFilterService - 泛化规则-数据过滤规则为空,默认不处理! -2026-01-10 14:25:17.148 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO c.Modules.NormalData.SysLogProcessor - 批次处理完成,总数: 1 -2026-01-10 14:26:00.002 [scheduling-4] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:26:00.002 [log-processor-5] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:26:00.002 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 14:20:00,结束时间:2026-01-10 14:25:00 -2026-01-10 14:26:00.003 [scheduling-2] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T14:20 - 2026-01-10T14:25 -2026-01-10 14:26:00.086 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:26:00.086 [scheduling-5] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:26:00.002晚于默认保留时间2026-01-03T14:26:00.086,使用默认时间 -2026-01-10 14:26:00.172 [scheduling-5] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:26:00.086天前的日志,共删除0条 -2026-01-10 14:26:00.172 [scheduling-2] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:26:00.172 [scheduling-2] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:26:00.172 [scheduling-2] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T14:20 - 2026-01-10T14:25 -2026-01-10 14:26:00.172 [log-processor-5] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:26:00.172 [scheduling-4] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:26:00.252 [scheduling-5] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:26:00.258 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:172ms -2026-01-10 14:26:00.258 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:26:00.258 -2026-01-10 14:26:00.259 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:26:00.259 -2026-01-10 14:26:00.259 [scheduling-2] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:26:00.259 [scheduling-2] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:26:00.259 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:26:00.323 [log-processor-5] INFO c.c.service.AccessLogAlertService - 获取到 1 条新的日志数据,时间范围: 2026-01-10T14:12:00.183 到 2026-01-10T14:26:00.172 -2026-01-10 14:26:00.323 [log-processor-5] INFO c.c.service.AccessLogAlertService - 开始处理算法: 测试算法3 (ID: 2004083121877696514) -2026-01-10 14:26:00.414 [scheduling-4] INFO c.c.service.AccessLogAlertService - 获取到 1 条新的日志数据,时间范围: 2026-01-10T14:12:00.183 到 2026-01-10T14:26:00.172 -2026-01-10 14:26:00.414 [scheduling-4] INFO c.c.service.AccessLogAlertService - 开始处理算法: 测试算法3 (ID: 2004083121877696514) -2026-01-10 14:26:00.773 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:26:00.773 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 514ms -2026-01-10 14:26:02.434 [log-processor-5] ERROR c.c.service.AccessLogAlertService - 调用算法API异常 [URL: http://192.168.4.33:5001/Webshell]: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect -org.springframework.web.client.ResourceAccessException: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:785) - at org.springframework.web.client.RestTemplate.execute(RestTemplate.java:711) - at org.springframework.web.client.RestTemplate.exchange(RestTemplate.java:602) - at com.common.service.AccessLogAlertService.callAlgorithmApi(AccessLogAlertService.java:275) - at com.common.service.AccessLogAlertService.processAlgorithm(AccessLogAlertService.java:153) - at com.common.service.AccessLogAlertService.processAccessLogAlert(AccessLogAlertService.java:122) - at com.common.service.AccessLogAlertService$$FastClassBySpringCGLIB$$4807ae0a.invoke() - at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) - at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:793) - at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) - at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:763) - at org.springframework.aop.interceptor.AsyncExecutionInterceptor.lambda$invoke$0(AsyncExecutionInterceptor.java:115) - at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266) - at java.util.concurrent.FutureTask.run(FutureTask.java) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.net.ConnectException: Connection refused: connect - at java.net.DualStackPlainSocketImpl.waitForConnect(Native Method) - at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:85) - at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) - at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) - at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) - at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) - at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) - at java.net.Socket.connect(Socket.java:589) - at sun.net.NetworkClient.doConnect(NetworkClient.java:175) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) - at sun.net.www.http.HttpClient.(HttpClient.java:211) - at sun.net.www.http.HttpClient.New(HttpClient.java:308) - at sun.net.www.http.HttpClient.New(HttpClient.java:326) - at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1202) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1138) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1032) - at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:966) - at org.springframework.http.client.SimpleBufferingClientHttpRequest.executeInternal(SimpleBufferingClientHttpRequest.java:76) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:109) - at com.config.RestTemplateConfig$LoggingInterceptor.intercept(RestTemplateConfig.java:62) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:93) - at org.springframework.http.client.InterceptingClientHttpRequest.executeInternal(InterceptingClientHttpRequest.java:77) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:776) - ... 16 common frames omitted -2026-01-10 14:26:02.449 [log-processor-5] ERROR c.c.service.AccessLogAlertService - 调用算法API失败: http://192.168.4.33:5001/Webshell - 无响应 -2026-01-10 14:26:02.449 [log-processor-5] INFO c.c.service.AccessLogAlertService - 访问日志告警处理任务完成,下次将从 2026-01-10T14:26:00.172 开始处理 -2026-01-10 14:26:02.528 [scheduling-4] ERROR c.c.service.AccessLogAlertService - 调用算法API异常 [URL: http://192.168.4.33:5001/Webshell]: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect -org.springframework.web.client.ResourceAccessException: I/O error on POST request for "http://192.168.4.33:5001/Webshell": Connection refused: connect; nested exception is java.net.ConnectException: Connection refused: connect - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:785) - at org.springframework.web.client.RestTemplate.execute(RestTemplate.java:711) - at org.springframework.web.client.RestTemplate.exchange(RestTemplate.java:602) - at com.common.service.AccessLogAlertService.callAlgorithmApi(AccessLogAlertService.java:275) - at com.common.service.AccessLogAlertService.processAlgorithm(AccessLogAlertService.java:153) - at com.common.service.AccessLogAlertService.processAccessLogAlert(AccessLogAlertService.java:122) - at com.common.service.AccessLogAlertService.safeProcessTask(AccessLogAlertService.java:387) - at com.common.service.AccessLogAlertService$$FastClassBySpringCGLIB$$4807ae0a.invoke() - at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) - at org.springframework.aop.framework.CglibAopProxy.invokeMethod(CglibAopProxy.java:386) - at org.springframework.aop.framework.CglibAopProxy.access$000(CglibAopProxy.java:85) - at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:704) - at com.common.service.AccessLogAlertService$$EnhancerBySpringCGLIB$$a38bc9af.safeProcessTask() - at sun.reflect.GeneratedMethodAccessor147.invoke(Unknown Source) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:498) - at org.springframework.scheduling.support.ScheduledMethodRunnable.run(ScheduledMethodRunnable.java:84) - at org.springframework.scheduling.support.DelegatingErrorHandlingRunnable.run(DelegatingErrorHandlingRunnable.java:54) - at org.springframework.scheduling.concurrent.ReschedulingRunnable.run(ReschedulingRunnable.java:95) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) - at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266) - at java.util.concurrent.FutureTask.run(FutureTask.java) - at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180) - at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.net.ConnectException: Connection refused: connect - at java.net.DualStackPlainSocketImpl.waitForConnect(Native Method) - at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:85) - at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) - at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) - at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) - at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) - at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) - at java.net.Socket.connect(Socket.java:589) - at sun.net.NetworkClient.doConnect(NetworkClient.java:175) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) - at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) - at sun.net.www.http.HttpClient.(HttpClient.java:211) - at sun.net.www.http.HttpClient.New(HttpClient.java:308) - at sun.net.www.http.HttpClient.New(HttpClient.java:326) - at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1202) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1138) - at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1032) - at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:966) - at org.springframework.http.client.SimpleBufferingClientHttpRequest.executeInternal(SimpleBufferingClientHttpRequest.java:76) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:109) - at com.config.RestTemplateConfig$LoggingInterceptor.intercept(RestTemplateConfig.java:62) - at org.springframework.http.client.InterceptingClientHttpRequest$InterceptingRequestExecution.execute(InterceptingClientHttpRequest.java:93) - at org.springframework.http.client.InterceptingClientHttpRequest.executeInternal(InterceptingClientHttpRequest.java:77) - at org.springframework.http.client.AbstractBufferingClientHttpRequest.executeInternal(AbstractBufferingClientHttpRequest.java:48) - at org.springframework.http.client.AbstractClientHttpRequest.execute(AbstractClientHttpRequest.java:66) - at org.springframework.web.client.RestTemplate.doExecute(RestTemplate.java:776) - ... 26 common frames omitted -2026-01-10 14:26:02.543 [scheduling-4] ERROR c.c.service.AccessLogAlertService - 调用算法API失败: http://192.168.4.33:5001/Webshell - 无响应 -2026-01-10 14:26:02.543 [scheduling-4] INFO c.c.service.AccessLogAlertService - 访问日志告警处理任务完成,下次将从 2026-01-10T14:26:00.172 开始处理 -2026-01-10 14:27:00.097 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:27:00.097 [scheduling-8] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:27:00.013晚于默认保留时间2026-01-03T14:27:00.097,使用默认时间 -2026-01-10 14:27:00.181 [scheduling-8] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:27:00.097天前的日志,共删除0条 -2026-01-10 14:27:00.265 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:27:00.270 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:173ms -2026-01-10 14:27:00.270 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:27:00.270 -2026-01-10 14:27:00.270 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:27:00.270 -2026-01-10 14:27:00.748 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:27:00.748 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 478ms -2026-01-10 14:28:00.002 [scheduling-6] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:28:00.002 [log-processor-7] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:28:00.085 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:28:00.085 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:28:00.002晚于默认保留时间2026-01-03T14:28:00.085,使用默认时间 -2026-01-10 14:28:00.172 [scheduling-6] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:28:00.173 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:28:00.085天前的日志,共删除0条 -2026-01-10 14:28:00.173 [log-processor-7] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:28:00.257 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:172ms -2026-01-10 14:28:00.257 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:28:00.257 -2026-01-10 14:28:00.257 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:28:00.257 -2026-01-10 14:28:00.257 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:28:00.322 [scheduling-6] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:28:00.415 [log-processor-7] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:28:00.725 [scheduling-5] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:28:00.725 [scheduling-5] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 468ms -2026-01-10 14:29:00.089 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:29:00.089 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:29:00.003晚于默认保留时间2026-01-03T14:29:00.089,使用默认时间 -2026-01-10 14:29:00.172 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:29:00.089天前的日志,共删除0条 -2026-01-10 14:29:00.262 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:29:00.262 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:173ms -2026-01-10 14:29:00.262 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:29:00.262 -2026-01-10 14:29:00.262 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:29:00.262 -2026-01-10 14:29:00.715 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:29:00.715 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 453ms -2026-01-10 14:30:00.009 [scheduling-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:30:00.009 [log-processor-8] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:30:00.093 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:30:00.094 [scheduling-4] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:30:00.009晚于默认保留时间2026-01-03T14:30:00.094,使用默认时间 -2026-01-10 14:30:00.179 [scheduling-4] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:30:00.094天前的日志,共删除0条 -2026-01-10 14:30:00.179 [log-processor-8] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:30:00.179 [scheduling-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:30:00.263 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:170ms -2026-01-10 14:30:00.263 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:30:00.263 -2026-01-10 14:30:00.263 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:30:00.263 -2026-01-10 14:30:00.264 [scheduling-4] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:30:00.329 [scheduling-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:30:00.624 [log-processor-8] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:30:00.726 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:30:00.727 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 464ms -2026-01-10 14:31:00.011 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 14:25:00,结束时间:2026-01-10 14:30:00 -2026-01-10 14:31:00.011 [scheduling-10] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T14:25 - 2026-01-10T14:30 -2026-01-10 14:31:00.095 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:31:00.275 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:180ms -2026-01-10 14:31:00.275 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:31:00.275 -2026-01-10 14:31:00.275 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:31:00.275 -2026-01-10 14:31:00.480 [scheduling-10] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:31:00.480 [scheduling-10] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:31:00.481 [scheduling-10] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T14:25 - 2026-01-10T14:30 -2026-01-10 14:31:00.569 [scheduling-10] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:31:00.569 [scheduling-10] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:31:00.569 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:31:00.774 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:31:00.774 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 499ms -2026-01-10 14:31:05.022 [scheduling-4] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@7cb5b9e1 (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:31:05.395 [scheduling-4] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:31:00.011晚于默认保留时间2026-01-03T14:31:05.395,使用默认时间 -2026-01-10 14:31:05.480 [scheduling-4] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:31:05.395天前的日志,共删除0条 -2026-01-10 14:31:05.563 [scheduling-4] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:32:00.005 [scheduling-5] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:32:00.005 [log-processor-9] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:32:00.087 [scheduling-2] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:32:00.005晚于默认保留时间2026-01-03T14:32:00.087,使用默认时间 -2026-01-10 14:32:00.088 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:32:00.171 [scheduling-2] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:32:00.087天前的日志,共删除0条 -2026-01-10 14:32:00.178 [log-processor-9] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:32:00.179 [scheduling-5] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:32:00.254 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:32:00.256 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:168ms -2026-01-10 14:32:00.257 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:32:00.257 -2026-01-10 14:32:00.257 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:32:00.257 -2026-01-10 14:32:00.653 [log-processor-9] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:32:00.653 [scheduling-5] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:32:00.694 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:32:00.694 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 437ms -2026-01-10 14:33:00.090 [scheduling-2] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:33:00.008晚于默认保留时间2026-01-03T14:33:00.090,使用默认时间 -2026-01-10 14:33:00.091 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:33:00.173 [scheduling-2] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:33:00.090天前的日志,共删除0条 -2026-01-10 14:33:00.255 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:33:00.260 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:169ms -2026-01-10 14:33:00.260 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:33:00.260 -2026-01-10 14:33:00.260 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:33:00.260 -2026-01-10 14:33:00.763 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:33:00.763 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 503ms -2026-01-10 14:34:00.002 [scheduling-9] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:34:00.002 [log-processor-10] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:34:00.086 [scheduling-2] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:34:00.002晚于默认保留时间2026-01-03T14:34:00.086,使用默认时间 -2026-01-10 14:34:00.087 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:34:00.169 [scheduling-2] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:34:00.086天前的日志,共删除0条 -2026-01-10 14:34:00.174 [log-processor-10] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:34:00.176 [scheduling-9] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:34:00.244 [scheduling-2] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:34:00.257 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:170ms -2026-01-10 14:34:00.257 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:34:00.257 -2026-01-10 14:34:00.257 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:34:00.257 -2026-01-10 14:34:00.295 [log-processor-10] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:34:00.299 [scheduling-9] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:34:00.724 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:34:00.724 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 467ms -2026-01-10 14:35:00.102 [scheduling-7] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:35:00.018晚于默认保留时间2026-01-03T14:35:00.102,使用默认时间 -2026-01-10 14:35:00.102 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:35:00.186 [scheduling-7] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:35:00.102天前的日志,共删除0条 -2026-01-10 14:35:00.270 [scheduling-7] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:35:00.275 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:173ms -2026-01-10 14:35:00.275 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:35:00.275 -2026-01-10 14:35:00.275 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:35:00.275 -2026-01-10 14:35:00.800 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:35:00.800 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 525ms -2026-01-10 14:36:00.002 [scheduling-5] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:36:00.002 [log-processor-1] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:36:00.002 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 14:30:00,结束时间:2026-01-10 14:35:00 -2026-01-10 14:36:00.003 [scheduling-6] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T14:30 - 2026-01-10T14:35 -2026-01-10 14:36:00.088 [scheduling-1] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:36:00.002晚于默认保留时间2026-01-03T14:36:00.088,使用默认时间 -2026-01-10 14:36:00.104 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:36:00.173 [scheduling-1] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:36:00.088天前的日志,共删除0条 -2026-01-10 14:36:00.174 [scheduling-5] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:36:00.174 [scheduling-6] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:36:00.174 [scheduling-6] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:36:00.174 [scheduling-6] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T14:30 - 2026-01-10T14:35 -2026-01-10 14:36:00.255 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:36:00.263 [scheduling-6] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:36:00.263 [scheduling-6] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:36:00.263 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:36:00.282 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:178ms -2026-01-10 14:36:00.282 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:36:00.282 -2026-01-10 14:36:00.282 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:36:00.282 -2026-01-10 14:36:00.307 [scheduling-5] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:36:00.471 [log-processor-1] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:36:00.765 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:36:00.766 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 484ms -2026-01-10 14:36:00.900 [log-processor-1] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:37:00.093 [scheduling-7] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:37:00.013晚于默认保留时间2026-01-03T14:37:00.093,使用默认时间 -2026-01-10 14:37:00.093 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:37:00.178 [scheduling-7] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:37:00.093天前的日志,共删除0条 -2026-01-10 14:37:00.259 [scheduling-7] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:37:00.573 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:480ms -2026-01-10 14:37:00.573 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:37:00.573 -2026-01-10 14:37:00.573 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:37:00.573 -2026-01-10 14:37:01.144 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:37:01.145 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 572ms -2026-01-10 14:38:00.012 [scheduling-9] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:38:00.012 [log-processor-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:38:00.095 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:38:00.095 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:38:00.012晚于默认保留时间2026-01-03T14:38:00.095,使用默认时间 -2026-01-10 14:38:00.178 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:38:00.095天前的日志,共删除0条 -2026-01-10 14:38:00.179 [log-processor-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:38:00.180 [scheduling-9] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:38:00.262 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:38:00.262 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:167ms -2026-01-10 14:38:00.262 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:38:00.262 -2026-01-10 14:38:00.262 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:38:00.262 -2026-01-10 14:38:00.320 [log-processor-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:38:00.320 [scheduling-9] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:38:00.755 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:38:00.755 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 493ms -2026-01-10 14:39:00.095 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:39:00.011晚于默认保留时间2026-01-03T14:39:00.095,使用默认时间 -2026-01-10 14:39:00.096 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:39:00.179 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:39:00.095天前的日志,共删除0条 -2026-01-10 14:39:00.262 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:166ms -2026-01-10 14:39:00.262 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:39:00.262 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:39:00.262 -2026-01-10 14:39:00.262 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:39:00.262 -2026-01-10 14:39:00.713 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:39:00.714 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 452ms -2026-01-10 14:40:00.013 [scheduling-3] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:40:00.013 [log-processor-3] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:40:00.104 [scheduling-6] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:40:00.013晚于默认保留时间2026-01-03T14:40:00.104,使用默认时间 -2026-01-10 14:40:00.105 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:40:00.176 [log-processor-3] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:40:00.178 [scheduling-3] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:40:00.190 [scheduling-6] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:40:00.104天前的日志,共删除0条 -2026-01-10 14:40:00.274 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:40:00.274 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:169ms -2026-01-10 14:40:00.274 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:40:00.274 -2026-01-10 14:40:00.274 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:40:00.274 -2026-01-10 14:40:00.281 [log-processor-3] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:40:00.536 [scheduling-3] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:40:00.747 [scheduling-4] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:40:00.747 [scheduling-4] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 473ms -2026-01-10 14:41:00.013 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 14:35:00,结束时间:2026-01-10 14:40:00 -2026-01-10 14:41:00.013 [scheduling-8] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T14:35 - 2026-01-10T14:40 -2026-01-10 14:41:00.095 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:41:00.097 [scheduling-6] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:41:00.013晚于默认保留时间2026-01-03T14:41:00.097,使用默认时间 -2026-01-10 14:41:00.179 [scheduling-8] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:41:00.179 [scheduling-8] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:41:00.179 [scheduling-8] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T14:35 - 2026-01-10T14:40 -2026-01-10 14:41:00.180 [scheduling-6] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:41:00.097天前的日志,共删除0条 -2026-01-10 14:41:00.262 [scheduling-8] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:41:00.262 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:167ms -2026-01-10 14:41:00.262 [scheduling-8] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:41:00.262 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:41:00.262 -2026-01-10 14:41:00.262 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:41:00.262 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:41:00.262 -2026-01-10 14:41:00.265 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:41:00.698 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:41:00.698 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 436ms -2026-01-10 14:42:00.001 [log-processor-4] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:42:00.001 [scheduling-5] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:42:00.081 [scheduling-7] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:42:00.001晚于默认保留时间2026-01-03T14:42:00.081,使用默认时间 -2026-01-10 14:42:00.101 [scheduling-9] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:42:00.161 [scheduling-7] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:42:00.081天前的日志,共删除0条 -2026-01-10 14:42:00.166 [log-processor-4] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:42:00.166 [scheduling-5] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:42:00.240 [scheduling-7] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:42:00.271 [scheduling-9] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:170ms -2026-01-10 14:42:00.271 [scheduling-9] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:42:00.271 -2026-01-10 14:42:00.272 [scheduling-9] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:42:00.272 -2026-01-10 14:42:00.272 [log-processor-4] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:42:00.272 [scheduling-5] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:42:00.748 [scheduling-9] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:42:00.749 [scheduling-9] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 478ms -2026-01-10 14:43:00.091 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:43:00.095 [scheduling-9] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:43:00.010晚于默认保留时间2026-01-03T14:43:00.095,使用默认时间 -2026-01-10 14:43:00.179 [scheduling-9] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:43:00.095天前的日志,共删除0条 -2026-01-10 14:43:00.262 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:171ms -2026-01-10 14:43:00.262 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:43:00.262 -2026-01-10 14:43:00.262 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:43:00.262 -2026-01-10 14:43:00.263 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:43:00.724 [scheduling-7] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:43:00.724 [scheduling-7] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 462ms -2026-01-10 14:44:00.010 [scheduling-3] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:44:00.011 [log-processor-6] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:44:00.093 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:44:00.097 [scheduling-6] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:44:00.011晚于默认保留时间2026-01-03T14:44:00.097,使用默认时间 -2026-01-10 14:44:00.177 [scheduling-3] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:44:00.183 [scheduling-6] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:44:00.097天前的日志,共删除0条 -2026-01-10 14:44:00.183 [log-processor-6] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:44:00.253 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:160ms -2026-01-10 14:44:00.253 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:44:00.253 -2026-01-10 14:44:00.253 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:44:00.253 -2026-01-10 14:44:00.266 [scheduling-6] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:44:00.320 [scheduling-3] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:44:00.413 [log-processor-6] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:44:00.756 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:44:00.756 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 503ms -2026-01-10 14:45:00.084 [scheduling-10] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:45:00.002晚于默认保留时间2026-01-03T14:45:00.084,使用默认时间 -2026-01-10 14:45:00.085 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:45:00.166 [scheduling-10] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:45:00.084天前的日志,共删除0条 -2026-01-10 14:45:00.247 [scheduling-10] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:45:00.260 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:175ms -2026-01-10 14:45:00.260 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:45:00.260 -2026-01-10 14:45:00.260 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:45:00.260 -2026-01-10 14:45:01.026 [scheduling-6] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:45:01.026 [scheduling-6] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 766ms -2026-01-10 14:46:00.001 [scheduling-7] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:46:00.001 [log-processor-5] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:46:00.001 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - ETL任务开始执行,开始时间:2026-01-10 14:40:00,结束时间:2026-01-10 14:45:00 -2026-01-10 14:46:00.001 [scheduling-9] INFO com.common.service.DataExtractor - 开始处理指定时间范围内访问日志数据,时间范围: 2026-01-10T14:40 - 2026-01-10T14:45 -2026-01-10 14:46:00.103 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:46:00.167 [scheduling-7] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:46:00.171 [scheduling-9] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:46:00.171 [scheduling-9] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:46:00.171 [log-processor-5] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:46:00.171 [scheduling-9] INFO com.common.service.DataExtractor - 开始处理告警类型指定时间范围内数据,时间范围: 2026-01-10T14:40 - 2026-01-10T14:45 -2026-01-10 14:46:00.256 [scheduling-9] INFO com.common.service.DataExtractor - 指定时间范围分组数据量: 0 组 -2026-01-10 14:46:00.256 [scheduling-9] INFO com.common.service.DataExtractor - 没有需要处理的数据 -2026-01-10 14:46:00.256 [scheduling-9] INFO com.common.schedule.ETLOrchestrator - 定时ETL任务执行完成,耗时: 0 秒 -2026-01-10 14:46:00.279 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:176ms -2026-01-10 14:46:00.279 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:46:00.279 -2026-01-10 14:46:00.279 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:46:00.279 -2026-01-10 14:46:00.394 [scheduling-8] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:46:00.001晚于默认保留时间2026-01-03T14:46:00.394,使用默认时间 -2026-01-10 14:46:00.407 [log-processor-5] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:46:00.475 [scheduling-8] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:46:00.394天前的日志,共删除0条 -2026-01-10 14:46:00.559 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:46:00.630 [scheduling-7] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:26:00.172 -2026-01-10 14:46:00.798 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:46:00.798 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 519ms -2026-01-10 14:46:57.053 [main] INFO com.syslogApplication - Starting syslogApplication using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 29532 (E:\GIT_GOSAME\haobang-security-xdr\syslog-consumer\target\classes started by chenc in E:\GIT_GOSAME\haobang-security-xdr) -2026-01-10 14:46:57.053 [background-preinit] INFO o.h.validator.internal.util.Version - HV000001: Hibernate Validator 6.2.5.Final -2026-01-10 14:46:57.058 [main] INFO com.syslogApplication - No active profile set, falling back to 1 default profile: "default" -2026-01-10 14:47:00.045 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 14:47:00.045 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Elasticsearch repositories in DEFAULT mode. -2026-01-10 14:47:00.496 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 444 ms. Found 1 Elasticsearch repository interfaces. -2026-01-10 14:47:00.500 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 14:47:00.500 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Reactive Elasticsearch repositories in DEFAULT mode. -2026-01-10 14:47:00.596 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Reactive Elasticsearch - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Reactive Elasticsearch repository, consider annotating your entities with one of these annotations: org.springframework.data.elasticsearch.annotations.Document (preferred), or consider extending one of the following types with your repository: org.springframework.data.elasticsearch.repository.ReactiveElasticsearchRepository -2026-01-10 14:47:00.597 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 93 ms. Found 0 Reactive Elasticsearch repository interfaces. -2026-01-10 14:47:00.621 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 14:47:00.623 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode. -2026-01-10 14:47:00.724 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.common.service.AppLogRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository -2026-01-10 14:47:00.724 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 94 ms. Found 0 Redis repository interfaces. -2026-01-10 14:47:01.438 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8089 (http) -2026-01-10 14:47:01.445 [main] INFO o.a.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8089"] -2026-01-10 14:47:01.445 [main] INFO o.a.catalina.core.StandardService - Starting service [Tomcat] -2026-01-10 14:47:01.446 [main] INFO o.a.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.65] -2026-01-10 14:47:01.674 [main] INFO o.a.c.c.C.[.[.[/xdrservice] - Initializing Spring embedded WebApplicationContext -2026-01-10 14:47:01.674 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - Root WebApplicationContext: initialization completed in 4533 ms -2026-01-10 14:47:01.728 [main] INFO o.s.b.f.a.AutowiredAnnotationBeanPostProcessor - Autowired annotation is not supported on static fields: private static com.common.service.DmColumnService com.syslogApplication.dmColumnService -2026-01-10 14:47:04.308 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes -2026-01-10 14:47:04.626 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.insert] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Insert] -2026-01-10 14:47:04.642 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.update] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.Update] -2026-01-10 14:47:04.665 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.deleteById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.DeleteById] -2026-01-10 14:47:04.682 [main] WARN c.b.m.core.injector.AbstractMethod - [com.common.mapper.DeviceCollectTaskMapper.selectById] Has been loaded by XML or SqlProvider or Mybatis's Annotation, so ignoring this injection for [class com.baomidou.mybatisplus.core.injector.methods.SelectById] -2026-01-10 14:47:04.730 [main] ERROR c.b.m.core.MybatisConfiguration - mapper[com.common.mapper.SecExceptionAlgorithmMapper.findById] is ignored, because it exists, maybe from xml file -2026-01-10 14:47:09.423 [main] INFO c.c.service.AccessLogAlertService - 初始化AccessLogAlertService,上次处理时间: 2026-01-10T14:45:09.423 -2026-01-10 14:47:09.448 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting... -2026-01-10 14:47:10.068 [main] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed. -2026-01-10 14:47:10.178 [main] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:47:11.197 [main] INFO com.influx.InfluxDBClient - InfluxDB connection successful: ready for queries and writes -2026-01-10 14:47:11.323 [main] INFO com.common.util.MyBatisUtil - MyBatis 初始化成功 -2026-01-10 14:47:12.312 [main] INFO org.quartz.impl.StdSchedulerFactory - Using default implementation for ThreadExecutor -2026-01-10 14:47:12.320 [main] INFO o.quartz.core.SchedulerSignalerImpl - Initialized Scheduler Signaller of type: class org.quartz.core.SchedulerSignalerImpl -2026-01-10 14:47:12.320 [main] INFO org.quartz.core.QuartzScheduler - Quartz Scheduler v.2.3.2 created. -2026-01-10 14:47:12.320 [main] INFO org.quartz.simpl.RAMJobStore - RAMJobStore initialized. -2026-01-10 14:47:12.320 [main] INFO org.quartz.core.QuartzScheduler - Scheduler meta-data: Quartz Scheduler (v2.3.2) 'quartzScheduler' with instanceId 'NON_CLUSTERED' - Scheduler class: 'org.quartz.core.QuartzScheduler' - running locally. - NOT STARTED. - Currently in standby mode. - Number of jobs executed: 0 - Using thread pool 'org.quartz.simpl.SimpleThreadPool' - with 10 threads. - Using job-store 'org.quartz.simpl.RAMJobStore' - which does not support persistence. and is not clustered. +2026-03-09 18:28:03.047 [scheduling-8] INFO c.c.s.impl.RealtimeAnalysisEngine - ִгɹ: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, processedCount=0, alarmCount=0 +2026-03-09 18:28:03.362 [scheduling-8] INFO c.c.s.i.RuleExecutionTimeServiceImpl - ¹´ִʱ䣬ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, ruleName=澯--V2, windowType=tumble, nextExecuteTime=2026-03-09 18:33:00 +2026-03-09 18:28:03.362 [scheduling-8] INFO c.c.s.RealtimeAnalysisScheduler - εִй: 1, : 0 +2026-03-09 18:29:00.006 [scheduling-5] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:29:00.006 [log-processor-10] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:29:00.081 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:29:00.236 [scheduling-5] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:29:00.236 [log-processor-10] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:29:00.240 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ159ms +2026-03-09 18:29:00.240 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:29:00.240 +2026-03-09 18:29:00.240 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:29:00.240 +2026-03-09 18:29:00.487 [scheduling-5] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:29:00.488 [log-processor-10] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:29:00.702 [scheduling-2] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:29:00.702 [scheduling-2] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 462ms +2026-03-09 18:30:00.005 [scheduling-2] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:30:00.005 [log-processor-1] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:30:00.081 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:30:00.233 [scheduling-2] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:30:00.233 [log-processor-1] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:30:00.235 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ154ms +2026-03-09 18:30:00.235 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:30:00.235 +2026-03-09 18:30:00.235 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:30:00.235 +2026-03-09 18:30:00.430 [log-processor-1] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:30:00.501 [scheduling-2] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:30:00.639 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:30:00.639 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 404ms +2026-03-09 18:31:00.006 [scheduling-3] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:31:00.006 [log-processor-2] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:31:00.006 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - ETLʼִУʼʱ䣺2026-03-09 18:25:00,ʱ䣺2026-03-09 18:30:00 +2026-03-09 18:31:00.006 [scheduling-8] INFO com.common.service.DataExtractor - ʼ澯ָʱ䷶Χݣʱ䷶Χ: 2026-03-09T18:25 - 2026-03-09T18:30 +2026-03-09 18:31:00.084 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:31:00.235 [scheduling-8] INFO com.common.service.DataExtractor - ָʱ䷶Χ: 0 +2026-03-09 18:31:00.235 [log-processor-2] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:31:00.235 [scheduling-3] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:31:00.235 [scheduling-8] INFO com.common.service.DataExtractor - ûҪ +2026-03-09 18:31:00.235 [scheduling-8] INFO com.common.schedule.ETLOrchestrator - ʱETLִɣʱ: 0 +2026-03-09 18:31:00.235 [scheduling-8] INFO c.c.s.NormalizeRuleHitTimeService - ʼִзʱʱ䣺2026-03-09T18:31:00.235 +2026-03-09 18:31:00.236 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ152ms +2026-03-09 18:31:00.236 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:31:00.236 +2026-03-09 18:31:00.236 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:31:00.236 +2026-03-09 18:31:00.515 [scheduling-3] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:31:00.519 [log-processor-2] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:31:00.629 [scheduling-10] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:31:00.629 [scheduling-10] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 393ms +2026-03-09 18:31:00.758 [scheduling-8] INFO c.c.s.NormalizeRuleHitTimeService - syslog_normal_data ͳƵ 1 м¼ +2026-03-09 18:31:00.758 [scheduling-8] INFO c.c.s.NormalizeRuleHitTimeService - syslog_normal_alarm ͳƵ 1 м¼ +2026-03-09 18:31:00.758 [scheduling-8] INFO c.c.s.NormalizeRuleHitTimeService - ϲҪµĹ2 +2026-03-09 18:31:00.910 [scheduling-8] INFO c.c.s.NormalizeRuleHitTimeService - ǰ״̬Ĺ173 +2026-03-09 18:31:00.910 [scheduling-8] INFO c.c.s.NormalizeRuleHitTimeService - ʼ£1731 +2026-03-09 18:31:00.910 [scheduling-8] INFO c.c.s.NormalizeRuleHitTimeService - ʱɣ¹0ʱ675ms +2026-03-09 18:32:00.001 [scheduling-8] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:32:00.001 [log-processor-3] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:32:00.077 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:32:00.226 [scheduling-8] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:32:00.226 [log-processor-3] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:32:00.232 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ155ms +2026-03-09 18:32:00.232 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:32:00.232 +2026-03-09 18:32:00.233 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:32:00.233 +2026-03-09 18:32:00.461 [scheduling-8] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:32:00.505 [log-processor-3] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:32:00.640 [scheduling-1] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:32:00.640 [scheduling-1] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 408ms +2026-03-09 18:33:00.002 [scheduling-10] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:33:00.002 [log-processor-4] INFO c.c.service.AccessLogAlertService - ʼִз־澯 +2026-03-09 18:33:00.078 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ͳƸ... +2026-03-09 18:33:00.228 [log-processor-4] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:33:00.228 [scheduling-10] INFO c.c.service.AccessLogAlertService - 1 õ㷨 +2026-03-09 18:33:00.230 [scheduling-6] INFO c.c.s.RealtimeAnalysisScheduler - ִй: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, ruleName=澯--V2, nextTime=2026-03-09T18:33, now=2026-03-09T18:33:00.002 +2026-03-09 18:33:00.230 [scheduling-6] INFO c.c.s.impl.AnalysisRuleServiceImpl - ִʵʱ: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765 +2026-03-09 18:33:00.232 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 豸ͳƸɣ豸1ʱ154ms +2026-03-09 18:33:00.232 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - ʼִ豸ɼ̽ʱ£ʱ: 2026-03-09T18:33:00.232 +2026-03-09 18:33:00.232 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - ʼ豸ɼʱ䣬ǰʱ: 2026-03-09T18:33:00.232 +2026-03-09 18:33:00.426 [scheduling-10] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:33:00.494 [log-processor-4] INFO c.c.service.AccessLogAlertService - ûзµ־ݣϴδʱ: 2026-03-09T18:22:00.236 +2026-03-09 18:33:00.634 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - ɣܼ: 48Ѹ: 1 +2026-03-09 18:33:00.634 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 豸ɼ̽ʱɣʱ: 402ms +2026-03-09 18:33:00.688 [scheduling-6] INFO c.c.s.impl.RealtimeAnalysisEngine - ڲѯΧ: ڴС=5mѯʱ䷶Χ=[2026-03-09 18:28:00, 2026-03-09 18:33:00] +2026-03-09 18:33:00.688 [scheduling-6] INFO c.c.s.impl.RealtimeAnalysisEngine - ʼִʵʱ: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, ruleName=澯--V2, batchNo=20260309183300381, windowType=tumble, dataStartTime=2026-03-09 18:28:00, dataEndTime=2026-03-09 18:33:00 +2026-03-09 18:33:01.943 [scheduling-6] INFO c.c.s.impl.RealtimeAnalysisEngine - ɵSQL: SELECT src_ip AS attack_ip, +dest_ip AS victim_ip, +origin_event_name AS alarm_name, +ARRAY_AGG(DISTINCT src_port) AS attack_port, +ARRAY_AGG(DISTINCT dest_port) AS victim_port, +MAX(event_level) AS alarm_level, +MODE() WITHIN GROUP (ORDER BY dest_domain) AS dns_info, +MODE() WITHIN GROUP (ORDER BY origin_event_type) AS alarm_type, +COUNT(dest_ip) AS log_count, +MAX(attack_result) AS attack_result, +ARRAY_AGG(DISTINCT http_req_header) AS http_req_header, +ARRAY_AGG(DISTINCT http_req_body) AS http_req_body, +ARRAY_AGG(DISTINCT http_resp_header) AS http_resp_header, +ARRAY_AGG(DISTINCT http_resp_body) AS http_resp_body, +ARRAY_AGG(DISTINCT http_url) AS victim_web_url, +ARRAY_AGG(DISTINCT id) AS origin_log_ids, +MIN(log_time) AS log_start_at, +MAX(log_time) AS log_end_at, +ARRAY_AGG(DISTINCT device_id) AS device_id, +ARRAY_AGG(DISTINCT payload) AS payload, +TUMBLE(log_time, INTERVAL '5 MINUTE') AS window_time +FROM syslog_normal_alarm AS t +WHERE log_time >= '2026-03-09 18:28:00' AND log_time < '2026-03-09 18:33:00' AND src_ip != '127.0.0.1' AND event_level >= 1 +GROUP BY src_ip, dest_ip, origin_event_name, TUMBLE(log_time, INTERVAL '5 MINUTE') -2026-01-10 14:47:12.320 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler 'quartzScheduler' initialized from an externally provided properties instance. -2026-01-10 14:47:12.320 [main] INFO org.quartz.impl.StdSchedulerFactory - Quartz scheduler version: 2.3.2 -2026-01-10 14:47:12.320 [main] INFO org.quartz.core.QuartzScheduler - JobFactory set to: org.springframework.scheduling.quartz.SpringBeanJobFactory@481f2acb -2026-01-10 14:47:12.481 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 -2026-01-10 14:47:12.481 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 -2026-01-10 14:47:12.481 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1768027632481 -2026-01-10 14:47:12.500 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka version: 3.4.0 -2026-01-10 14:47:12.500 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka commitId: 2e1947d240607d53 -2026-01-10 14:47:12.500 [main] INFO o.a.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1768027632500 -2026-01-10 14:47:12.502 [main] INFO o.a.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8089"] -2026-01-10 14:47:12.514 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat started on port(s): 8089 (http) with context path '/xdrservice' -2026-01-10 14:47:12.514 [main] INFO o.s.s.quartz.SchedulerFactoryBean - Starting Quartz Scheduler now -2026-01-10 14:47:12.514 [main] INFO org.quartz.core.QuartzScheduler - Scheduler quartzScheduler_$_NON_CLUSTERED started. -2026-01-10 14:47:12.527 [main] INFO com.syslogApplication - Started syslogApplication in 15.978 seconds (JVM running for 21.192) -2026-01-10 14:47:27.968 [org.springframework.kafka.KafkaListenerEndpointContainer#0-1-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions assigned: [] -2026-01-10 14:47:27.981 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO o.s.k.l.KafkaMessageListenerContainer - test-group-app: partitions assigned: [test-topic-0] -2026-01-10 14:48:00.016 [scheduling-2] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:48:00.016 [log-processor-1] INFO c.c.service.AccessLogAlertService - 开始执行访问日志告警处理任务 -2026-01-10 14:48:00.104 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:48:00.194 [log-processor-1] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:48:00.194 [scheduling-2] INFO c.c.service.AccessLogAlertService - 加载了 1 个启用的算法配置 -2026-01-10 14:48:00.287 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:183ms -2026-01-10 14:48:00.287 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:48:00.287 -2026-01-10 14:48:00.290 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:48:00.290 -2026-01-10 14:48:00.401 [scheduling-1] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:48:00.006晚于默认保留时间2026-01-03T14:48:00.401,使用默认时间 -2026-01-10 14:48:00.485 [scheduling-1] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:48:00.401天前的日志,共删除0条 -2026-01-10 14:48:00.569 [scheduling-1] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:48:00.666 [scheduling-2] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:45:09.423 -2026-01-10 14:48:00.666 [log-processor-1] INFO c.c.service.AccessLogAlertService - 没有发现新的日志数据,上次处理时间: 2026-01-10T14:45:09.423 -2026-01-10 14:48:00.797 [scheduling-3] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:48:00.797 [scheduling-3] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 510ms -2026-01-10 14:48:21.353 [http-nio-8089-exec-1] INFO o.s.web.servlet.DispatcherServlet - Initializing Servlet 'dispatcherServlet' -2026-01-10 14:48:21.356 [http-nio-8089-exec-1] INFO o.s.web.servlet.DispatcherServlet - Completed initialization in 3 ms -2026-01-10 14:48:21.557 [http-nio-8089-exec-1] INFO com.controllers.SyslogPushController - 收到syslog发送请求: SyslogRequest{ip='192.168.0.103', port=514, logContent='<0> 2026-01-10T05:28:32+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T05:28:32.806781+0800","flow_id":1671852309144385,"community_id":"uLeKRLkXu9m0D0DNn6wIg7CcdOs=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":51114,"dest_ip":"110.43.89.7","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3553898360,"tcp_ack_sequence":3537707565,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"rq.lbcct.cloud.duba.net","host_md5":"51cfa6d0981c8eb355a9b3af716da08d","uri":"/query?1767994112","uri_md5":"f28f2c62d0dd01c355caa05815d93d99","referer":"","method":"POST","protocol":"HTTP/1.1","status":200,"req_content_type":"application/x-www-form-urlencoded","request_headers":"host: rq.lbcct.cloud.duba.net\r\naccept: */*\r\ncontent-length: 85\r\ncontent-type: application/x-www-form-urlencoded\r\n","rsp_content_type":"text/plain","response_headers":"server: Tengine/1.5.2\r\ndate: Fri, 09 Jan 2026 21:28:32 GMT\r\ncontent-type: text/plain\r\nContent-Length: 54\r\nConnection: keep-alive\r\nContent-Tag: 1936292435\r\n"}', protocol='TCP', facility='USER', severity='INFO'} -2026-01-10 14:48:21.558 [http-nio-8089-exec-1] INFO com.common.service.SyslogService - 开始发送syslog消息: IP=192.168.0.103, Port=514 -2026-01-10 14:48:21.559 [http-nio-8089-exec-1] INFO com.common.service.SyslogService - TCP Syslog消息发送成功: 192.168.0.103:514 -2026-01-10 14:48:21.559 [http-nio-8089-exec-1] INFO com.controllers.SyslogPushController - Syslog消息发送成功: IP=192.168.0.103, Port=514 -2026-01-10 14:48:21.989 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO c.Modules.NormalData.SysLogProcessor - 开始处理批次消息,数量: 1 -2026-01-10 14:48:21.991 [log-processor-2] INFO c.Modules.NormalData.SysLogProcessor - 收到syslogmessage:[receive_time=20260110144821572 device_id=248 device_name=开发环境设备-HOME vendor=HFish data_type=json device_collect_id=1]<0> 2026-01-10T05:28:32+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T05:28:32.806781+0800","flow_id":1671852309144385,"community_id":"uLeKRLkXu9m0D0DNn6wIg7CcdOs=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":51114,"dest_ip":"110.43.89.7","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3553898360,"tcp_ack_sequence":3537707565,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"rq.lbcct.cloud.duba.net","host_md5":"51cfa6d0981c8eb355a9b3af716da08d","uri":"/query?1767994112","uri_md5":"f28f2c62d0dd01c355caa05815d93d99","referer":"","method":"POST","protocol":"HTTP/1.1","status":200,"req_content_type":"application/x-www-form-urlencoded","request_headers":"host: rq.lbcct.cloud.duba.net\r\naccept: */*\r\ncontent-length: 85\r\ncontent-type: application/x-www-form-urlencoded\r\n","rsp_content_type":"text/plain","response_headers":"server: Tengine/1.5.2\r\ndate: Fri, 09 Jan 2026 21:28:32 GMT\r\ncontent-type: text/plain\r\nContent-Length: 54\r\nConnection: keep-alive\r\nContent-Tag: 1936292435\r\n"} -2026-01-10 14:48:27.681 [log-processor-2] WARN c.c.service.LogDataFilterService - 泛化规则-数据过滤规则为空,默认不处理! -2026-01-10 14:48:45.272 [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] INFO c.Modules.NormalData.SysLogProcessor - 批次处理完成,总数: 1 -2026-01-10 14:49:00.096 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备统计更新任务... -2026-01-10 14:49:00.096 [scheduling-7] WARN c.c.s.i.DeviceReceiveLogServiceImpl - 删除时间点2026-01-08T14:49:00.012晚于默认保留时间2026-01-03T14:49:00.096,使用默认时间 -2026-01-10 14:49:00.185 [scheduling-7] INFO c.c.s.i.DeviceReceiveLogServiceImpl - 删除2026-01-03T14:49:00.096天前的日志,共删除0条 -2026-01-10 14:49:00.267 [scheduling-7] INFO com.common.schedule.ETLOrchestrator - 定时清理任务完成,删除0条2天前的日志 -2026-01-10 14:49:00.283 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备统计更新完成,处理设备数:1,耗时:187ms -2026-01-10 14:49:00.283 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 开始执行设备采集探针任务时间更新,时间: 2026-01-10T14:49:00.283 -2026-01-10 14:49:00.283 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 开始批量更新设备采集任务时间,当前时间: 2026-01-10T14:49:00.283 -2026-01-10 14:49:00.813 [scheduling-8] INFO c.c.s.DeviceCollectTaskUpdateService - 批量更新完成,总计: 48,已更新: 1 -2026-01-10 14:49:00.813 [scheduling-8] INFO c.c.service.DeviceStatsUpdateService - 设备采集探针任务时间更新完成,耗时: 530ms +2026-03-09 18:33:02.410 [scheduling-6] INFO c.c.s.impl.RealtimeAnalysisEngine - ִгɹ: ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, processedCount=0, alarmCount=0 +2026-03-09 18:33:02.717 [scheduling-6] INFO c.c.s.i.RuleExecutionTimeServiceImpl - ¹´ִʱ䣬ruleId=4e134d65-1170-4d20-ab48-77f3fee6a765, ruleName=澯--V2, windowType=tumble, nextExecuteTime=2026-03-09 18:38:00 +2026-03-09 18:33:02.718 [scheduling-6] INFO c.c.s.RealtimeAnalysisScheduler - εִй: 1, : 0 diff --git a/haobang-security-xdr/logs/syslog-serve.2026-01-09.log b/haobang-security-xdr/logs/syslog-serve.2026-01-09.log deleted file mode 100644 index 2e734af..0000000 --- a/haobang-security-xdr/logs/syslog-serve.2026-01-09.log +++ /dev/null @@ -1,132 +0,0 @@ -2026-01-09 11:00:51.832 [main] INFO com.SyslogServeMainApp - Starting SyslogServeMainApp using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 18748 (E:\GIT_GOSAME\haobang-security-xdr\syslog-serve\target\classes started by chenc in E:\GIT_GOSAME\haobang-security-xdr) -2026-01-09 11:00:51.832 [background-preinit] INFO o.h.validator.internal.util.Version - HV000001: Hibernate Validator 6.2.5.Final -2026-01-09 11:00:51.840 [main] INFO com.SyslogServeMainApp - No active profile set, falling back to 1 default profile: "default" -2026-01-09 11:00:53.934 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-09 11:00:53.938 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode. -2026-01-09 11:00:54.173 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 220 ms. Found 0 Redis repository interfaces. -2026-01-09 11:00:54.772 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8189 (http) -2026-01-09 11:00:54.780 [main] INFO o.a.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8189"] -2026-01-09 11:00:54.781 [main] INFO o.a.catalina.core.StandardService - Starting service [Tomcat] -2026-01-09 11:00:54.781 [main] INFO o.a.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.65] -2026-01-09 11:00:55.055 [main] INFO o.a.c.c.C.[.[.[/syslogserve] - Initializing Spring embedded WebApplicationContext -2026-01-09 11:00:55.055 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - Root WebApplicationContext: initialization completed in 3122 ms -2026-01-09 11:01:00.030 [main] INFO o.a.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8189"] -2026-01-09 11:01:00.049 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat started on port(s): 8189 (http) with context path '/syslogserve' -2026-01-09 11:01:00.062 [main] INFO com.SyslogServeMainApp - Started SyslogServeMainApp in 8.727 seconds (JVM running for 13.906) -2026-01-09 11:01:00.109 [main] INFO com.SyslogServeMainApp - Application SyslogServer start ! -2026-01-09 11:01:00.109 [main] INFO com.netty.SyslogServer - Starting Syslog server with TCP port 514 and UDP port 514 -2026-01-09 11:01:00.831 [pool-3-thread-2] INFO com.netty.SyslogServer - TCP Syslog server started on port 514 -2026-01-09 11:01:00.832 [pool-3-thread-1] INFO com.netty.SyslogServer - UDP Syslog server started on port 514 -2026-01-09 11:01:00.832 [main] INFO com.netty.SyslogServer - Both TCP and UDP Syslog servers are running -2026-01-09 11:01:49.047 [nioEventLoopGroup-5-1] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:56244: <0> 2026-01-09T07:39:59+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T07:39:59.845554+0800","flow_id":1102849895591141,"community_id":"aT54c1tW1hhcT3lcsk2zR5FrSPQ=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":58657,"dest_ip":"112.34.111.149","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":4101951934,"tcp_ack_sequence":1757098085,"ether":{},"uri":"","referer":"","method":"\u0015\u0000\u0000\u0001\u0000\u0001\u0000\\x94\\x93p\\xfb\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","protocol":"","req_content_type":"","request_headers":"\u0015 \r\n","rsp_content_type":"","response_headers":""} -2026-01-09 11:01:51.280 [nioEventLoopGroup-5-1] ERROR com.netty.SyslogMessageHandler - Exception in channel handler -org.springframework.data.redis.RedisConnectionFailureException: Unable to connect to Redis; nested exception is io.lettuce.core.RedisConnectionException: Unable to connect to localhost:6379 - at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory$ExceptionTranslatingConnectionProvider.translateException(LettuceConnectionFactory.java:1689) - at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory$ExceptionTranslatingConnectionProvider.getConnection(LettuceConnectionFactory.java:1597) - at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory$SharedConnection.getNativeConnection(LettuceConnectionFactory.java:1383) - at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory$SharedConnection.getConnection(LettuceConnectionFactory.java:1366) - at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory.getSharedConnection(LettuceConnectionFactory.java:1093) - at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory.getConnection(LettuceConnectionFactory.java:421) - at org.springframework.data.redis.cache.DefaultRedisCacheWriter.execute(DefaultRedisCacheWriter.java:304) - at org.springframework.data.redis.cache.DefaultRedisCacheWriter.get(DefaultRedisCacheWriter.java:130) - at org.springframework.data.redis.cache.RedisCache.lookup(RedisCache.java:89) - at org.springframework.cache.support.AbstractValueAdaptingCache.get(AbstractValueAdaptingCache.java:58) - at org.springframework.cache.transaction.TransactionAwareCacheDecorator.get(TransactionAwareCacheDecorator.java:80) - at org.springframework.cache.interceptor.AbstractCacheInvoker.doGet(AbstractCacheInvoker.java:73) - at org.springframework.cache.interceptor.CacheAspectSupport.findInCaches(CacheAspectSupport.java:571) - at org.springframework.cache.interceptor.CacheAspectSupport.findCachedItem(CacheAspectSupport.java:536) - at org.springframework.cache.interceptor.CacheAspectSupport.execute(CacheAspectSupport.java:402) - at org.springframework.cache.interceptor.CacheAspectSupport.execute(CacheAspectSupport.java:345) - at org.springframework.cache.interceptor.CacheInterceptor.invoke(CacheInterceptor.java:64) - at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) - at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:763) - at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:708) - at com.common.service.impl.DeviceDeviceServiceImpl$$EnhancerBySpringCGLIB$$bac109c0.getByIpSafely() - at com.Modules.Device.DeviceProcess.(DeviceProcess.java:49) - at com.netty.SyslogMessageHandler.channelRead0(SyslogMessageHandler.java:47) - at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99) - at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) - at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) - at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) - at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) - at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) - at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) - at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) - at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:346) - at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:318) - at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) - at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) - at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) - at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) - at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440) - at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) - at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) - at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166) - at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:788) - at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724) - at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650) - at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562) - at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) - at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) - at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) - at java.lang.Thread.run(Thread.java:745) -Caused by: io.lettuce.core.RedisConnectionException: Unable to connect to localhost:6379 - at io.lettuce.core.RedisConnectionException.create(RedisConnectionException.java:78) - at io.lettuce.core.RedisConnectionException.create(RedisConnectionException.java:56) - at io.lettuce.core.AbstractRedisClient.getConnection(AbstractRedisClient.java:330) - at io.lettuce.core.RedisClient.connect(RedisClient.java:216) - at org.springframework.data.redis.connection.lettuce.StandaloneConnectionProvider.lambda$getConnection$1(StandaloneConnectionProvider.java:115) - at java.util.Optional.orElseGet(Optional.java:267) - at org.springframework.data.redis.connection.lettuce.StandaloneConnectionProvider.getConnection(StandaloneConnectionProvider.java:115) - at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory$ExceptionTranslatingConnectionProvider.getConnection(LettuceConnectionFactory.java:1595) - ... 47 common frames omitted -Caused by: java.nio.channels.ClosedChannelException: null - at io.netty.channel.nio.AbstractNioChannel.doClose(AbstractNioChannel.java:502) - at io.netty.channel.socket.nio.NioSocketChannel.doClose(NioSocketChannel.java:349) - at io.netty.channel.AbstractChannel$AbstractUnsafe.doClose0(AbstractChannel.java:754) - at io.netty.channel.AbstractChannel$AbstractUnsafe.close(AbstractChannel.java:731) - at io.netty.channel.AbstractChannel$AbstractUnsafe.close(AbstractChannel.java:620) - at io.netty.channel.DefaultChannelPipeline$HeadContext.close(DefaultChannelPipeline.java:1352) - at io.netty.channel.AbstractChannelHandlerContext.invokeClose(AbstractChannelHandlerContext.java:749) - at io.netty.channel.AbstractChannelHandlerContext.access$1200(AbstractChannelHandlerContext.java:61) - at io.netty.channel.AbstractChannelHandlerContext$11.run(AbstractChannelHandlerContext.java:732) - at io.netty.util.concurrent.AbstractEventExecutor.runTask(AbstractEventExecutor.java:174) - at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:167) - at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:470) - at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:569) - ... 4 common frames omitted -2026-01-09 11:29:32.247 [nioEventLoopGroup-5-2] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:54671: <0> 2026-01-09T07:39:59+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T07:39:59.845554+0800","flow_id":1102849895591141,"community_id":"aT54c1tW1hhcT3lcsk2zR5FrSPQ=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":58657,"dest_ip":"112.34.111.149","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":4101951934,"tcp_ack_sequence":1757098085,"ether":{},"uri":"","referer":"","method":"\u0015\u0000\u0000\u0001\u0000\u0001\u0000\\x94\\x93p\\xfb\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","protocol":"","req_content_type":"","request_headers":"\u0015 \r\n","rsp_content_type":"","response_headers":""} -2026-01-09 11:29:32.384 [nioEventLoopGroup-5-2] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting... -2026-01-09 11:29:33.051 [nioEventLoopGroup-5-2] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed. -2026-01-09 11:38:58.872 [nioEventLoopGroup-5-3] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:52850: <0> 2026-01-09T07:39:59+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T07:39:59.845554+0800","flow_id":1102849895591141,"community_id":"aT54c1tW1hhcT3lcsk2zR5FrSPQ=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":58657,"dest_ip":"112.34.111.149","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":4101951934,"tcp_ack_sequence":1757098085,"ether":{},"uri":"","referer":"","method":"\u0015\u0000\u0000\u0001\u0000\u0001\u0000\\x94\\x93p\\xfb\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","protocol":"","req_content_type":"","request_headers":"\u0015 \r\n","rsp_content_type":"","response_headers":""} -2026-01-09 11:47:57.824 [nioEventLoopGroup-5-4] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:57928: <0> 2026-01-09T07:39:59+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T07:39:59.845554+0800","flow_id":1102849895591141,"community_id":"aT54c1tW1hhcT3lcsk2zR5FrSPQ=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":58657,"dest_ip":"112.34.111.149","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":4101951934,"tcp_ack_sequence":1757098085,"ether":{},"uri":"","referer":"","method":"\u0015\u0000\u0000\u0001\u0000\u0001\u0000\\x94\\x93p\\xfb\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","protocol":"","req_content_type":"","request_headers":"\u0015 \r\n","rsp_content_type":"","response_headers":""} -2026-01-09 15:27:55.550 [nioEventLoopGroup-5-5] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:55192: <0> 2026-01-09T07:39:59+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T07:39:59.845554+0800","flow_id":1102849895591141,"community_id":"aT54c1tW1hhcT3lcsk2zR5FrSPQ=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":58657,"dest_ip":"112.34.111.149","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":4101951934,"tcp_ack_sequence":1757098085,"ether":{},"uri":"","referer":"","method":"\u0015\u0000\u0000\u0001\u0000\u0001\u0000\\x94\\x93p\\xfb\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","protocol":"","req_content_type":"","request_headers":"\u0015 \r\n","rsp_content_type":"","response_headers":""} -2026-01-09 15:36:44.239 [nioEventLoopGroup-5-6] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:49951: <0> 2026-01-09T07:39:59+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T07:39:59.845554+0800","flow_id":1102849895591141,"community_id":"aT54c1tW1hhcT3lcsk2zR5FrSPQ=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":58657,"dest_ip":"112.34.111.149","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":4101951934,"tcp_ack_sequence":1757098085,"ether":{},"uri":"","referer":"","method":"\u0015\u0000\u0000\u0001\u0000\u0001\u0000\\x94\\x93p\\xfb\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","protocol":"","req_content_type":"","request_headers":"\u0015 \r\n","rsp_content_type":"","response_headers":""} -2026-01-09 15:57:14.712 [HikariPool-1 housekeeper] WARN com.zaxxer.hikari.pool.HikariPool - HikariPool-1 - Thread starvation or clock leap detected (housekeeper delta=5m37s501ms654µs800ns). -2026-01-09 17:25:52.192 [nioEventLoopGroup-5-7] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:52890: <0> 2026-01-09T17:09:15+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T17:09:15.160715+0800","flow_id":1198350732579968,"community_id":"q1DYMZcSdDgmfpAj9ozxmTzKNbY=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":41736,"dest_ip":"120.232.164.162","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3497657839,"tcp_ack_sequence":95825740,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"pull-hs-f5.flive.douyincdn.com","host_md5":"2b821a78621370fb0703c0c8076651ee","uri":"/thirdgame/stream-118644793116984074.flv?arch_hrchy=w1&enfpresource=2&exp_hrchy=w1&expire=1768553531&major_anchor_level=common&mtu_probe=false&protocol_stack=rust&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531&edgeup=v1&pt=v4&ptag=v4&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100","uri_md5":"7dfeefd12fae0a1654ba0cd0a980789e","agent":"LiveIO_ANDROID","referer":"","method":"GET","protocol":"HTTP/1.1","status":302,"req_content_type":"","request_headers":"user-agent: LiveIO_ANDROID\r\naccept: */*\r\nconnection: Close\r\nhost: pull-hs-f5.flive.douyincdn.com\r\n","rsp_content_type":"video/x-flv","response_headers":"access-control-allow-methods: GET,POST,OPTIONS\r\naccess-control-allow-origin: *\r\naccess-control-expose-headers: X-Server-Ip\r\ncache-control: no-cache\r\nconnection: close\r\ncontent-type: video/x-flv\r\nLocation: http://39.175.244.72/thirdgame/stream-118644793116984074.flv?302_type=extreme_cold_aggr&302_type_code=04&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100&align_delay=10&arch_hrchy=w1&domain=pull-hs-f5.flive.douyincdn.com&edgeup=v1&enable_pts_align=1&enfpresource=2&exp_hrchy=w1&expire=1768553531&fp_user_url=http%3A%2F%2Fpull-hs-f5.flive.douyincdn.com%2Fthirdgame%2Fstream-118644793116984074.flv%3Farch_hrchy%3Dw1%26enfpresource%3D2%26exp_hrchy%3Dw1%26expire%3D1768553531%26major_anchor_level%3Dcommon%26mtu_probe%3Dfalse%26protocol_stack%3Drust%26rtm_sei_bypass%3D1%26rtm_sstream_tag%3Dfcdn_v3%26sign%3D708e6204542769e1be14e825a3075aac%26t_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5%26unique_id%3Dstream-118644793116984074_778_flv%26volcSecret%3D708e6204542769e1be14e825a3075aac%26volcTime%3D1768553531%26edgeup%3Dv1%26pt%3Dv4%26ptag%3Dv4%26_session_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464%26abr_pts%3D-2100&hls_redirect_domain=bytefcdnrd.com&major_anchor_level=common&manage_ip=&mtu_probe=false&node_id=&pro_type=http&protocol_stack=rust&pt=v4&ptag=v4&redirect_from=pod.cn-6pz3yu.lvdb.nss&redirect_to=fc.cn-cn2316g2&redirect_to_ip=39.175.244.72&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&vhost=push-rtmp-hs-f5.douyincdn.com&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531\r\nServer: Bytedance NSS\r\nTiming-Allow-Origin: *\r\nVia: n120-232-164-132\r\nX-Cache-Status: Miss\r\nX-Client-Ip: 120.230.79.196\r\nX-Has-Token: 917220198\r\nX-Response-Timecost: {\"time_to_source\":-1,\"total_time\":-1}\r\nX-Server-Ip: 120.232.164.132\r\n"} -2026-01-09 18:01:54.416 [nioEventLoopGroup-5-8] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:51626: <0> 2026-01-09T17:09:15+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T17:09:15.160715+0800","flow_id":1198350732579968,"community_id":"q1DYMZcSdDgmfpAj9ozxmTzKNbY=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":41736,"dest_ip":"120.232.164.162","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3497657839,"tcp_ack_sequence":95825740,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"pull-hs-f5.flive.douyincdn.com","host_md5":"2b821a78621370fb0703c0c8076651ee","uri":"/thirdgame/stream-118644793116984074.flv?arch_hrchy=w1&enfpresource=2&exp_hrchy=w1&expire=1768553531&major_anchor_level=common&mtu_probe=false&protocol_stack=rust&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531&edgeup=v1&pt=v4&ptag=v4&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100","uri_md5":"7dfeefd12fae0a1654ba0cd0a980789e","agent":"LiveIO_ANDROID","referer":"","method":"GET","protocol":"HTTP/1.1","status":302,"req_content_type":"","request_headers":"user-agent: LiveIO_ANDROID\r\naccept: */*\r\nconnection: Close\r\nhost: pull-hs-f5.flive.douyincdn.com\r\n","rsp_content_type":"video/x-flv","response_headers":"access-control-allow-methods: GET,POST,OPTIONS\r\naccess-control-allow-origin: *\r\naccess-control-expose-headers: X-Server-Ip\r\ncache-control: no-cache\r\nconnection: close\r\ncontent-type: video/x-flv\r\nLocation: http://39.175.244.72/thirdgame/stream-118644793116984074.flv?302_type=extreme_cold_aggr&302_type_code=04&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100&align_delay=10&arch_hrchy=w1&domain=pull-hs-f5.flive.douyincdn.com&edgeup=v1&enable_pts_align=1&enfpresource=2&exp_hrchy=w1&expire=1768553531&fp_user_url=http%3A%2F%2Fpull-hs-f5.flive.douyincdn.com%2Fthirdgame%2Fstream-118644793116984074.flv%3Farch_hrchy%3Dw1%26enfpresource%3D2%26exp_hrchy%3Dw1%26expire%3D1768553531%26major_anchor_level%3Dcommon%26mtu_probe%3Dfalse%26protocol_stack%3Drust%26rtm_sei_bypass%3D1%26rtm_sstream_tag%3Dfcdn_v3%26sign%3D708e6204542769e1be14e825a3075aac%26t_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5%26unique_id%3Dstream-118644793116984074_778_flv%26volcSecret%3D708e6204542769e1be14e825a3075aac%26volcTime%3D1768553531%26edgeup%3Dv1%26pt%3Dv4%26ptag%3Dv4%26_session_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464%26abr_pts%3D-2100&hls_redirect_domain=bytefcdnrd.com&major_anchor_level=common&manage_ip=&mtu_probe=false&node_id=&pro_type=http&protocol_stack=rust&pt=v4&ptag=v4&redirect_from=pod.cn-6pz3yu.lvdb.nss&redirect_to=fc.cn-cn2316g2&redirect_to_ip=39.175.244.72&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&vhost=push-rtmp-hs-f5.douyincdn.com&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531\r\nServer: Bytedance NSS\r\nTiming-Allow-Origin: *\r\nVia: n120-232-164-132\r\nX-Cache-Status: Miss\r\nX-Client-Ip: 120.230.79.196\r\nX-Has-Token: 917220198\r\nX-Response-Timecost: {\"time_to_source\":-1,\"total_time\":-1}\r\nX-Server-Ip: 120.232.164.132\r\n"} -2026-01-09 18:10:56.132 [nioEventLoopGroup-5-9] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:63324: <0> 2026-01-09T17:09:15+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T17:09:15.160715+0800","flow_id":1198350732579968,"community_id":"q1DYMZcSdDgmfpAj9ozxmTzKNbY=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":41736,"dest_ip":"120.232.164.162","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3497657839,"tcp_ack_sequence":95825740,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"pull-hs-f5.flive.douyincdn.com","host_md5":"2b821a78621370fb0703c0c8076651ee","uri":"/thirdgame/stream-118644793116984074.flv?arch_hrchy=w1&enfpresource=2&exp_hrchy=w1&expire=1768553531&major_anchor_level=common&mtu_probe=false&protocol_stack=rust&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531&edgeup=v1&pt=v4&ptag=v4&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100","uri_md5":"7dfeefd12fae0a1654ba0cd0a980789e","agent":"LiveIO_ANDROID","referer":"","method":"GET","protocol":"HTTP/1.1","status":302,"req_content_type":"","request_headers":"user-agent: LiveIO_ANDROID\r\naccept: */*\r\nconnection: Close\r\nhost: pull-hs-f5.flive.douyincdn.com\r\n","rsp_content_type":"video/x-flv","response_headers":"access-control-allow-methods: GET,POST,OPTIONS\r\naccess-control-allow-origin: *\r\naccess-control-expose-headers: X-Server-Ip\r\ncache-control: no-cache\r\nconnection: close\r\ncontent-type: video/x-flv\r\nLocation: http://39.175.244.72/thirdgame/stream-118644793116984074.flv?302_type=extreme_cold_aggr&302_type_code=04&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100&align_delay=10&arch_hrchy=w1&domain=pull-hs-f5.flive.douyincdn.com&edgeup=v1&enable_pts_align=1&enfpresource=2&exp_hrchy=w1&expire=1768553531&fp_user_url=http%3A%2F%2Fpull-hs-f5.flive.douyincdn.com%2Fthirdgame%2Fstream-118644793116984074.flv%3Farch_hrchy%3Dw1%26enfpresource%3D2%26exp_hrchy%3Dw1%26expire%3D1768553531%26major_anchor_level%3Dcommon%26mtu_probe%3Dfalse%26protocol_stack%3Drust%26rtm_sei_bypass%3D1%26rtm_sstream_tag%3Dfcdn_v3%26sign%3D708e6204542769e1be14e825a3075aac%26t_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5%26unique_id%3Dstream-118644793116984074_778_flv%26volcSecret%3D708e6204542769e1be14e825a3075aac%26volcTime%3D1768553531%26edgeup%3Dv1%26pt%3Dv4%26ptag%3Dv4%26_session_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464%26abr_pts%3D-2100&hls_redirect_domain=bytefcdnrd.com&major_anchor_level=common&manage_ip=&mtu_probe=false&node_id=&pro_type=http&protocol_stack=rust&pt=v4&ptag=v4&redirect_from=pod.cn-6pz3yu.lvdb.nss&redirect_to=fc.cn-cn2316g2&redirect_to_ip=39.175.244.72&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&vhost=push-rtmp-hs-f5.douyincdn.com&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531\r\nServer: Bytedance NSS\r\nTiming-Allow-Origin: *\r\nVia: n120-232-164-132\r\nX-Cache-Status: Miss\r\nX-Client-Ip: 120.230.79.196\r\nX-Has-Token: 917220198\r\nX-Response-Timecost: {\"time_to_source\":-1,\"total_time\":-1}\r\nX-Server-Ip: 120.232.164.132\r\n"} -2026-01-09 18:17:37.914 [nioEventLoopGroup-5-10] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:64167: <0> 2026-01-09T17:09:15+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T17:09:15.160715+0800","flow_id":1198350732579968,"community_id":"q1DYMZcSdDgmfpAj9ozxmTzKNbY=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":41736,"dest_ip":"120.232.164.162","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3497657839,"tcp_ack_sequence":95825740,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"pull-hs-f5.flive.douyincdn.com","host_md5":"2b821a78621370fb0703c0c8076651ee","uri":"/thirdgame/stream-118644793116984074.flv?arch_hrchy=w1&enfpresource=2&exp_hrchy=w1&expire=1768553531&major_anchor_level=common&mtu_probe=false&protocol_stack=rust&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531&edgeup=v1&pt=v4&ptag=v4&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100","uri_md5":"7dfeefd12fae0a1654ba0cd0a980789e","agent":"LiveIO_ANDROID","referer":"","method":"GET","protocol":"HTTP/1.1","status":302,"req_content_type":"","request_headers":"user-agent: LiveIO_ANDROID\r\naccept: */*\r\nconnection: Close\r\nhost: pull-hs-f5.flive.douyincdn.com\r\n","rsp_content_type":"video/x-flv","response_headers":"access-control-allow-methods: GET,POST,OPTIONS\r\naccess-control-allow-origin: *\r\naccess-control-expose-headers: X-Server-Ip\r\ncache-control: no-cache\r\nconnection: close\r\ncontent-type: video/x-flv\r\nLocation: http://39.175.244.72/thirdgame/stream-118644793116984074.flv?302_type=extreme_cold_aggr&302_type_code=04&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100&align_delay=10&arch_hrchy=w1&domain=pull-hs-f5.flive.douyincdn.com&edgeup=v1&enable_pts_align=1&enfpresource=2&exp_hrchy=w1&expire=1768553531&fp_user_url=http%3A%2F%2Fpull-hs-f5.flive.douyincdn.com%2Fthirdgame%2Fstream-118644793116984074.flv%3Farch_hrchy%3Dw1%26enfpresource%3D2%26exp_hrchy%3Dw1%26expire%3D1768553531%26major_anchor_level%3Dcommon%26mtu_probe%3Dfalse%26protocol_stack%3Drust%26rtm_sei_bypass%3D1%26rtm_sstream_tag%3Dfcdn_v3%26sign%3D708e6204542769e1be14e825a3075aac%26t_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5%26unique_id%3Dstream-118644793116984074_778_flv%26volcSecret%3D708e6204542769e1be14e825a3075aac%26volcTime%3D1768553531%26edgeup%3Dv1%26pt%3Dv4%26ptag%3Dv4%26_session_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464%26abr_pts%3D-2100&hls_redirect_domain=bytefcdnrd.com&major_anchor_level=common&manage_ip=&mtu_probe=false&node_id=&pro_type=http&protocol_stack=rust&pt=v4&ptag=v4&redirect_from=pod.cn-6pz3yu.lvdb.nss&redirect_to=fc.cn-cn2316g2&redirect_to_ip=39.175.244.72&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&vhost=push-rtmp-hs-f5.douyincdn.com&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531\r\nServer: Bytedance NSS\r\nTiming-Allow-Origin: *\r\nVia: n120-232-164-132\r\nX-Cache-Status: Miss\r\nX-Client-Ip: 120.230.79.196\r\nX-Has-Token: 917220198\r\nX-Response-Timecost: {\"time_to_source\":-1,\"total_time\":-1}\r\nX-Server-Ip: 120.232.164.132\r\n"} -2026-01-09 18:26:22.954 [main] INFO com.SyslogServeMainApp - Starting SyslogServeMainApp using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 25676 (E:\GIT_GOSAME\haobang-security-xdr\syslog-serve\target\classes started by chenc in E:\GIT_GOSAME\haobang-security-xdr) -2026-01-09 18:26:22.954 [background-preinit] INFO o.h.validator.internal.util.Version - HV000001: Hibernate Validator 6.2.5.Final -2026-01-09 18:26:22.956 [main] INFO com.SyslogServeMainApp - No active profile set, falling back to 1 default profile: "default" -2026-01-09 18:26:24.953 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-09 18:26:24.954 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode. -2026-01-09 18:26:25.122 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 153 ms. Found 0 Redis repository interfaces. -2026-01-09 18:26:25.561 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8189 (http) -2026-01-09 18:26:25.561 [main] INFO o.a.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8189"] -2026-01-09 18:26:25.561 [main] INFO o.a.catalina.core.StandardService - Starting service [Tomcat] -2026-01-09 18:26:25.561 [main] INFO o.a.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.65] -2026-01-09 18:26:25.813 [main] INFO o.a.c.c.C.[.[.[/syslogserve] - Initializing Spring embedded WebApplicationContext -2026-01-09 18:26:25.813 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - Root WebApplicationContext: initialization completed in 2787 ms -2026-01-09 18:26:29.613 [main] INFO o.a.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8189"] -2026-01-09 18:26:29.627 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat started on port(s): 8189 (http) with context path '/syslogserve' -2026-01-09 18:26:29.636 [main] INFO com.SyslogServeMainApp - Started SyslogServeMainApp in 7.135 seconds (JVM running for 11.565) -2026-01-09 18:26:29.675 [main] INFO com.SyslogServeMainApp - Application SyslogServer start ! -2026-01-09 18:26:29.675 [main] INFO com.netty.SyslogServer - Starting Syslog server with TCP port 514 and UDP port 514 -2026-01-09 18:26:30.117 [pool-3-thread-2] INFO com.netty.SyslogServer - TCP Syslog server started on port 514 -2026-01-09 18:26:30.117 [pool-3-thread-1] INFO com.netty.SyslogServer - UDP Syslog server started on port 514 -2026-01-09 18:26:30.118 [main] INFO com.netty.SyslogServer - Both TCP and UDP Syslog servers are running -2026-01-09 18:26:32.082 [nioEventLoopGroup-5-1] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:57387: <0> 2026-01-09T17:09:15+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-09T17:09:15.160715+0800","flow_id":1198350732579968,"community_id":"q1DYMZcSdDgmfpAj9ozxmTzKNbY=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":41736,"dest_ip":"120.232.164.162","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3497657839,"tcp_ack_sequence":95825740,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"pull-hs-f5.flive.douyincdn.com","host_md5":"2b821a78621370fb0703c0c8076651ee","uri":"/thirdgame/stream-118644793116984074.flv?arch_hrchy=w1&enfpresource=2&exp_hrchy=w1&expire=1768553531&major_anchor_level=common&mtu_probe=false&protocol_stack=rust&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531&edgeup=v1&pt=v4&ptag=v4&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100","uri_md5":"7dfeefd12fae0a1654ba0cd0a980789e","agent":"LiveIO_ANDROID","referer":"","method":"GET","protocol":"HTTP/1.1","status":302,"req_content_type":"","request_headers":"user-agent: LiveIO_ANDROID\r\naccept: */*\r\nconnection: Close\r\nhost: pull-hs-f5.flive.douyincdn.com\r\n","rsp_content_type":"video/x-flv","response_headers":"access-control-allow-methods: GET,POST,OPTIONS\r\naccess-control-allow-origin: *\r\naccess-control-expose-headers: X-Server-Ip\r\ncache-control: no-cache\r\nconnection: close\r\ncontent-type: video/x-flv\r\nLocation: http://39.175.244.72/thirdgame/stream-118644793116984074.flv?302_type=extreme_cold_aggr&302_type_code=04&_session_id=037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464&abr_pts=-2100&align_delay=10&arch_hrchy=w1&domain=pull-hs-f5.flive.douyincdn.com&edgeup=v1&enable_pts_align=1&enfpresource=2&exp_hrchy=w1&expire=1768553531&fp_user_url=http%3A%2F%2Fpull-hs-f5.flive.douyincdn.com%2Fthirdgame%2Fstream-118644793116984074.flv%3Farch_hrchy%3Dw1%26enfpresource%3D2%26exp_hrchy%3Dw1%26expire%3D1768553531%26major_anchor_level%3Dcommon%26mtu_probe%3Dfalse%26protocol_stack%3Drust%26rtm_sei_bypass%3D1%26rtm_sstream_tag%3Dfcdn_v3%26sign%3D708e6204542769e1be14e825a3075aac%26t_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5%26unique_id%3Dstream-118644793116984074_778_flv%26volcSecret%3D708e6204542769e1be14e825a3075aac%26volcTime%3D1768553531%26edgeup%3Dv1%26pt%3Dv4%26ptag%3Dv4%26_session_id%3D037-20260109165209EC0131706463F07583DF-WnNoS5.1767949754287.-940398464%26abr_pts%3D-2100&hls_redirect_domain=bytefcdnrd.com&major_anchor_level=common&manage_ip=&mtu_probe=false&node_id=&pro_type=http&protocol_stack=rust&pt=v4&ptag=v4&redirect_from=pod.cn-6pz3yu.lvdb.nss&redirect_to=fc.cn-cn2316g2&redirect_to_ip=39.175.244.72&rtm_sei_bypass=1&rtm_sstream_tag=fcdn_v3&sign=708e6204542769e1be14e825a3075aac&t_id=037-20260109165209EC0131706463F07583DF-WnNoS5&unique_id=stream-118644793116984074_778_flv&vhost=push-rtmp-hs-f5.douyincdn.com&volcSecret=708e6204542769e1be14e825a3075aac&volcTime=1768553531\r\nServer: Bytedance NSS\r\nTiming-Allow-Origin: *\r\nVia: n120-232-164-132\r\nX-Cache-Status: Miss\r\nX-Client-Ip: 120.230.79.196\r\nX-Has-Token: 917220198\r\nX-Response-Timecost: {\"time_to_source\":-1,\"total_time\":-1}\r\nX-Server-Ip: 120.232.164.132\r\n"} -2026-01-09 18:26:32.265 [nioEventLoopGroup-5-1] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting... -2026-01-09 18:26:32.842 [nioEventLoopGroup-5-1] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed. diff --git a/haobang-security-xdr/logs/syslog-serve.log b/haobang-security-xdr/logs/syslog-serve.log index 01a0676..afe840f 100644 --- a/haobang-security-xdr/logs/syslog-serve.log +++ b/haobang-security-xdr/logs/syslog-serve.log @@ -1,31 +1,23 @@ -2026-01-10 13:26:58.023 [main] INFO com.SyslogServeMainApp - Starting SyslogServeMainApp using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 26480 (E:\GIT_GOSAME\haobang-security-xdr\syslog-serve\target\classes started by chenc in E:\GIT_GOSAME\haobang-security-xdr) -2026-01-10 13:26:58.023 [background-preinit] INFO o.h.validator.internal.util.Version - HV000001: Hibernate Validator 6.2.5.Final -2026-01-10 13:26:58.029 [main] INFO com.SyslogServeMainApp - No active profile set, falling back to 1 default profile: "default" -2026-01-10 13:26:59.839 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode -2026-01-10 13:26:59.839 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode. -2026-01-10 13:27:00.011 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 153 ms. Found 0 Redis repository interfaces. -2026-01-10 13:27:00.592 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8189 (http) -2026-01-10 13:27:00.604 [main] INFO o.a.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8189"] -2026-01-10 13:27:00.604 [main] INFO o.a.catalina.core.StandardService - Starting service [Tomcat] -2026-01-10 13:27:00.604 [main] INFO o.a.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.65] -2026-01-10 13:27:00.971 [main] INFO o.a.c.c.C.[.[.[/syslogserve] - Initializing Spring embedded WebApplicationContext -2026-01-10 13:27:00.971 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - Root WebApplicationContext: initialization completed in 2874 ms -2026-01-10 13:27:04.637 [main] INFO o.a.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8189"] -2026-01-10 13:27:04.652 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat started on port(s): 8189 (http) with context path '/syslogserve' -2026-01-10 13:27:04.662 [main] INFO com.SyslogServeMainApp - Started SyslogServeMainApp in 7.133 seconds (JVM running for 12.488) -2026-01-10 13:27:04.693 [main] INFO com.SyslogServeMainApp - Application SyslogServer start ! -2026-01-10 13:27:04.694 [main] INFO com.netty.SyslogServer - Starting Syslog server with TCP port 514 and UDP port 514 -2026-01-10 13:27:05.069 [pool-3-thread-2] INFO com.netty.SyslogServer - TCP Syslog server started on port 514 -2026-01-10 13:27:05.069 [pool-3-thread-1] INFO com.netty.SyslogServer - UDP Syslog server started on port 514 -2026-01-10 13:27:05.069 [main] INFO com.netty.SyslogServer - Both TCP and UDP Syslog servers are running -2026-01-10 13:38:58.993 [nioEventLoopGroup-5-1] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.0.103:65442: <0> 2026-01-10T05:28:32+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T05:28:32.806781+0800","flow_id":1671852309144385,"community_id":"uLeKRLkXu9m0D0DNn6wIg7CcdOs=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":51114,"dest_ip":"110.43.89.7","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3553898360,"tcp_ack_sequence":3537707565,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"rq.lbcct.cloud.duba.net","host_md5":"51cfa6d0981c8eb355a9b3af716da08d","uri":"/query?1767994112","uri_md5":"f28f2c62d0dd01c355caa05815d93d99","referer":"","method":"POST","protocol":"HTTP/1.1","status":200,"req_content_type":"application/x-www-form-urlencoded","request_headers":"host: rq.lbcct.cloud.duba.net\r\naccept: */*\r\ncontent-length: 85\r\ncontent-type: application/x-www-form-urlencoded\r\n","rsp_content_type":"text/plain","response_headers":"server: Tengine/1.5.2\r\ndate: Fri, 09 Jan 2026 21:28:32 GMT\r\ncontent-type: text/plain\r\nContent-Length: 54\r\nConnection: keep-alive\r\nContent-Tag: 1936292435\r\n"} -2026-01-10 13:38:59.375 [nioEventLoopGroup-5-1] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting... -2026-01-10 13:38:59.994 [nioEventLoopGroup-5-1] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed. -2026-01-10 13:39:00.321 [nioEventLoopGroup-5-1] ERROR com.Modules.Device.DeviceProcess - 设备请求的Host IP非系统注册,请联系管理员添加设备信息! -2026-01-10 13:39:00.321 [nioEventLoopGroup-5-1] INFO com.netty.SyslogMessageHandler - syslog message 的请求设备IP:192.168.0.103非系统注册,暂不做处理! -2026-01-10 13:39:00.579 [nioEventLoopGroup-5-1] INFO c.c.s.impl.DeviceUnknownServiceImpl - 更新设备最后发现时间成功,ID: 16 -2026-01-10 13:59:49.596 [HikariPool-1 housekeeper] WARN com.zaxxer.hikari.pool.HikariPool - HikariPool-1 - Thread starvation or clock leap detected (housekeeper delta=5m51s158ms35µs500ns). -2026-01-10 14:08:50.222 [nioEventLoopGroup-5-2] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.0.103:59772: <0> 2026-01-10T05:28:32+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T05:28:32.806781+0800","flow_id":1671852309144385,"community_id":"uLeKRLkXu9m0D0DNn6wIg7CcdOs=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":51114,"dest_ip":"110.43.89.7","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3553898360,"tcp_ack_sequence":3537707565,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"rq.lbcct.cloud.duba.net","host_md5":"51cfa6d0981c8eb355a9b3af716da08d","uri":"/query?1767994112","uri_md5":"f28f2c62d0dd01c355caa05815d93d99","referer":"","method":"POST","protocol":"HTTP/1.1","status":200,"req_content_type":"application/x-www-form-urlencoded","request_headers":"host: rq.lbcct.cloud.duba.net\r\naccept: */*\r\ncontent-length: 85\r\ncontent-type: application/x-www-form-urlencoded\r\n","rsp_content_type":"text/plain","response_headers":"server: Tengine/1.5.2\r\ndate: Fri, 09 Jan 2026 21:28:32 GMT\r\ncontent-type: text/plain\r\nContent-Length: 54\r\nConnection: keep-alive\r\nContent-Tag: 1936292435\r\n"} -2026-01-10 14:08:55.232 [nioEventLoopGroup-5-2] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection org.postgresql.jdbc.PgConnection@53953cd1 (This connection has been closed.). Possibly consider using a shorter maxLifetime value. -2026-01-10 14:25:15.953 [nioEventLoopGroup-5-3] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.0.103:65302: <0> 2026-01-10T13:47:27+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T13:47:27.249503+0800","flow_id":767115114538067,"community_id":"fFU2gDB2+pyUS6xQpAqqLdPLG4k=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"192.168.2.81","src_port":51018,"dest_ip":"120.241.131.42","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":423808413,"tcp_ack_sequence":3371175627,"ether":{},"host":"szextshort.weixin.qq.com","host_md5":"d7745538302ebc766b77ca8a4f3dd735","uri":"/mmtls/1abfe317","uri_md5":"e889825636e4d22b1d364b6bd6400ad5","agent":"MicroMessenger Client","referer":"","method":"POST","protocol":"HTTP/1.1","req_content_type":"application/octet-stream","request_headers":"accept: */*\r\ncache-control: no-cache\r\nconnection: Keep-Alive\r\ncontent-length: 2579\r\ncontent-type: application/octet-stream\r\nHost: szextshort.weixin.qq.com\r\nUpgrade: mmtls\r\nUser-Agent: MicroMessenger Client\r\n","rsp_content_type":"","response_headers":""} -2026-01-10 14:48:21.561 [nioEventLoopGroup-5-4] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.0.103:64558: <0> 2026-01-10T05:28:32+08:00 ubuntu log_forward[3419]: {"timestamp":"2026-01-10T05:28:32.806781+0800","flow_id":1671852309144385,"community_id":"uLeKRLkXu9m0D0DNn6wIg7CcdOs=","serial_num":"CJFBT92","origin":"eno4","xdr_log_type":"http","vxlan_vni":256,"src_ip":"172.16.121.137","src_port":51114,"dest_ip":"110.43.89.7","dest_port":80,"proto":"TCP","app_proto":"http","tcp_sequence":3553898360,"tcp_ack_sequence":3537707565,"ether":{"src_mac":"90:f1:b0:fb:81:a1","dest_mac":"a4:7b:2c:21:03:79"},"host":"rq.lbcct.cloud.duba.net","host_md5":"51cfa6d0981c8eb355a9b3af716da08d","uri":"/query?1767994112","uri_md5":"f28f2c62d0dd01c355caa05815d93d99","referer":"","method":"POST","protocol":"HTTP/1.1","status":200,"req_content_type":"application/x-www-form-urlencoded","request_headers":"host: rq.lbcct.cloud.duba.net\r\naccept: */*\r\ncontent-length: 85\r\ncontent-type: application/x-www-form-urlencoded\r\n","rsp_content_type":"text/plain","response_headers":"server: Tengine/1.5.2\r\ndate: Fri, 09 Jan 2026 21:28:32 GMT\r\ncontent-type: text/plain\r\nContent-Length: 54\r\nConnection: keep-alive\r\nContent-Tag: 1936292435\r\n"} +2026-03-09 18:21:20.224 [background-preinit] INFO o.h.validator.internal.util.Version - HV000001: Hibernate Validator 6.2.5.Final +2026-03-09 18:21:20.224 [main] INFO com.SyslogServeMainApp - Starting SyslogServeMainApp using Java 1.8.0_121 on LAPTOP-ARDUR3N0 with PID 10428 (E:\GIT_GOSAME\ai-security-xdr\haobang-security-xdr\syslog-serve\target\classes started by chenc in E:\GIT_GOSAME\ai-security-xdr\haobang-security-xdr) +2026-03-09 18:21:20.234 [main] INFO com.SyslogServeMainApp - No active profile set, falling back to 1 default profile: "default" +2026-03-09 18:21:22.016 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode +2026-03-09 18:21:22.021 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode. +2026-03-09 18:21:22.197 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 163 ms. Found 0 Redis repository interfaces. +2026-03-09 18:21:22.765 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat initialized with port(s): 8189 (http) +2026-03-09 18:21:22.771 [main] INFO o.a.coyote.http11.Http11NioProtocol - Initializing ProtocolHandler ["http-nio-8189"] +2026-03-09 18:21:22.771 [main] INFO o.a.catalina.core.StandardService - Starting service [Tomcat] +2026-03-09 18:21:22.771 [main] INFO o.a.catalina.core.StandardEngine - Starting Servlet engine: [Apache Tomcat/9.0.65] +2026-03-09 18:21:23.163 [main] INFO o.a.c.c.C.[.[.[/syslogserve] - Initializing Spring embedded WebApplicationContext +2026-03-09 18:21:23.163 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - Root WebApplicationContext: initialization completed in 2858 ms +2026-03-09 18:21:27.379 [main] INFO o.a.coyote.http11.Http11NioProtocol - Starting ProtocolHandler ["http-nio-8189"] +2026-03-09 18:21:27.389 [main] INFO o.s.b.w.e.tomcat.TomcatWebServer - Tomcat started on port(s): 8189 (http) with context path '/syslogserve' +2026-03-09 18:21:27.401 [main] INFO com.SyslogServeMainApp - Started SyslogServeMainApp in 7.592 seconds (JVM running for 12.693) +2026-03-09 18:21:27.437 [main] INFO com.SyslogServeMainApp - Application SyslogServer start ! +2026-03-09 18:21:27.437 [main] INFO com.netty.SyslogServer - Starting Syslog server with TCP port 514 and UDP port 514 +2026-03-09 18:21:27.959 [pool-3-thread-2] INFO com.netty.SyslogServer - TCP Syslog server started on port 514 +2026-03-09 18:21:27.959 [pool-3-thread-1] INFO com.netty.SyslogServer - UDP Syslog server started on port 514 +2026-03-09 18:21:27.960 [main] INFO com.netty.SyslogServer - Both TCP and UDP Syslog servers are running +2026-03-09 18:21:32.274 [nioEventLoopGroup-5-1] INFO com.netty.SyslogMessageHandler - Received syslog from 192.168.1.19:55610: <0> 2026-01-12T14:37:53+08:00 ubuntu log_forward[3419]: {"flow_id": 1028204815001825, "serial_num": "CJFBT92", "src_ip": "120.238.245.132", "src_port": 60838, "dest_ip": "211.136.192.6", "dest_port": 53, "proto": "UDP", "app_proto": "dns", "direction": "CTS", "attacker_ip": "120.238.245.132", "victim_ip": "211.136.192.6", "rule_id": "0x20001e", "rule_name": "???????????DNS???????", "attack_type": "???????", "severity": "1", "bulletin": "??????????????????????????????????", "detail_info": "????????????????DNSLOG?????????", "vuln_type": "???????", "vuln_desc": "????????????????DNSLOG?????????", "vuln_harm": "????????????????DNSLOG?????????", "tags": "dnslog", "cnnvd_id": null, "cve_id": null, "killchain": "??????", "enable": "????", "attack_result": "???", "attack_method": "???", "site_app": null, "code_language": "???", "att_ck": "TA0002", "timestamp": "2026-01-12T14:37:53.588+0800", "custom": "{}", "feature_field": "", "feature_payload": "", "": null, "payload": "SQkBAAABAAAAAAAAB3BvbGxpbmcHb2FzdGlmeQNjb20AAAEAAQ==", "packet_size": 37, "pcap_file": ""} +2026-03-09 18:21:32.533 [nioEventLoopGroup-5-1] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting... +2026-03-09 18:21:33.099 [nioEventLoopGroup-5-1] INFO com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed. diff --git a/haobang-security-xdr/syslog-consumer/doc/realtime-scheduler-design.md b/haobang-security-xdr/syslog-consumer/doc/realtime-scheduler-design.md new file mode 100644 index 0000000..8dedb3f --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/doc/realtime-scheduler-design.md @@ -0,0 +1,177 @@ +# 实时分析调度器设计方案 + +## 概述 + +每个规则根据窗口类型(滚动、滑动、会话)独立管理下次运行时间,而不是所有规则统一每30秒执行一次。 + +## 核心设计 + +### 1. 窗口类型与执行间隔映射 + +| 窗口类型 | 配置参数 | 执行间隔 | 示例 | +|---------|---------|---------|------| +| **TUMBLE**
滚动窗口 | `tumble_window_size` + `tumble_window_size_unit` | 等于窗口大小 | 窗口5分钟 → 每5分钟执行 | +| **HOP**
滑动窗口 | `hop_slide` + `hop_slide_unit` | 等于滑动步长 | 步长5分钟 → 每5分钟执行 | +| **SESSION**
会话窗口 | `session_window_size` + `session_window_size_unit` | 等于会话超时时间 | 超时30分钟 → 每30分钟执行 | +| **NONE**
无窗口 | - | 默认30秒 | 每30秒执行 | + +### 2. 执行流程 + +``` +┌─────────────────────────────────────────────────────────────┐ +│ 应用启动 │ +│ └─> initAllRules() │ +│ └─> 加载所有实时规则 │ +│ └─> 为每个规则初始化下次执行时间(Redis) │ +└─────────────────────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ 定时调度(每10秒检查一次) │ +│ └─> checkAndExecuteRules() │ +│ └─> 查询所有实时规则 │ +│ └─> 对每个规则: │ +│ ├─> 获取下次执行时间(Redis) │ +│ ├─> 判断是否到执行时间 │ +│ │ ├─> 是 → 执行规则 → 更新下次执行时间 │ +│ │ └─> 否 → 跳过 │ +│ └─> 继续下一个规则 │ +└─────────────────────────────────────────────────────────────┘ +``` + +### 3. 时间计算逻辑 + +#### TUMBLE(滚动窗口) +``` +下次执行时间 = 当前时间 + 窗口大小 +``` + +**示例**: +- 窗口大小:5分钟 +- 执行时间:10:00:00 +- 下次执行:10:05:00 + +#### HOP(滑动窗口) +``` +下次执行时间 = 当前时间 + 滑动步长 +``` + +**示例**: +- 窗口大小:10分钟,步长:5分钟 +- 执行时间:10:00:00 +- 下次执行:10:05:00 + +#### SESSION(会话窗口) +``` +下次执行时间 = 当前时间 + 会话超时时间 +``` + +**示例**: +- 会话超时:30分钟 +- 执行时间:10:00:00 +- 下次执行:10:30:00 + +## 数据结构 + +### Redis Key 设计 + +``` +rule:next_execute:{ruleId} +``` + +**示例**: +``` +rule:next_execute:rule-001 -> "2026-02-27 10:30:00" +rule:next_execute:rule-002 -> "2026-02-27 10:00:00" +``` + +**过期时间**:30天 + +## 配置示例 + +### application.yml + +```yaml +spring: + redis: + host: localhost + port: 6379 + password: + database: 0 + timeout: 3000 + +analysis: + realtime: + enabled: true + # 检查间隔(秒) - 频率越高越精确,但消耗资源 + check-interval-seconds: 10 + offline: + enabled: true + cron-expression: "0 0 */1 * * ?" +``` + +### pom.xml(添加Redis依赖) + +```xml + + org.springframework.boot + spring-boot-starter-data-redis + +``` + +## 核心组件 + +### 1. RuleExecutionTimeService +规则执行时间管理服务接口,负责: +- 获取规则下次执行时间 +- 更新规则下次执行时间(根据窗口类型计算) +- 初始化规则执行时间 +- 删除规则执行时间 + +### 2. RuleExecutionTimeServiceImpl +规则执行时间管理服务实现(基于Redis),核心方法: +- `calculateTumbleNextExecuteTime()`: 计算滚动窗口下次执行时间 +- `calculateHopNextExecuteTime()`: 计算滑动窗口下次执行时间 +- `calculateSessionNextExecuteTime()`: 计算会话窗口下次执行时间 + +### 3. RealtimeAnalysisScheduler +实时分析调度器,核心逻辑: +- 应用启动时初始化所有规则执行时间 +- 每10秒检查一次规则是否需要执行 +- 根据窗口类型独立计算下次执行时间 + +## 日志示例 + +### 初始化日志 +``` +INFO - ========== 初始化实时分析调度器 ========== +INFO - 查询到 3 个实时分析规则 +INFO - 初始化规则: ruleId=001, ruleName=暴力破解检测, windowType=HOP, nextExecuteTime=2026-02-27 10:05:00 +INFO - 初始化规则: ruleId=002, ruleName=端口扫描检测, windowType=TUMBLE, nextExecuteTime=2026-02-27 10:05:00 +INFO - 初始化规则: ruleId=003, ruleName=会话异常检测, windowType=SESSION, nextExecuteTime=2026-02-27 10:30:00 +INFO - ========== 实时分析调度器初始化完成 ========== +``` + +### 执行日志 +``` +INFO - 执行规则: ruleId=001, ruleName=暴力破解检测, nextTime=2026-02-27 10:05:00, now=2026-02-27 10:05:00 +INFO - 滑动窗口下次执行时间: 步长=5m,nextTime=2026-02-27 10:10:00 +INFO - 更新规则下次执行时间,ruleId=001, ruleName=暴力破解检测, windowType=HOP, nextExecuteTime=2026-02-27 10:10:00 +INFO - 本次调度执行规则数: 1, 跳过规则数: 2 +``` + +## 优势 + +1. ✅ **独立周期**:每个规则根据窗口类型独立配置执行周期 +2. ✅ **高性能**:Redis缓存,每次检查只需毫秒级 +3. ✅ **分布式支持**:多节点共享Redis,避免重复执行 +4. ✅ **动态调整**:运行时修改窗口配置,下次执行自动生效 +5. ✅ **资源优化**:避免低频规则频繁执行浪费资源 + +## 注意事项 + +1. **Redis依赖**:需要安装并启动Redis服务 +2. **时间精度**:调度检查间隔默认10秒,可根据需要调整 +3. **窗口配置**:规则必须配置窗口类型和大小参数 +4. **异常恢复**:应用重启后,Redis中记录的执行时间保持不变 +5. **并发控制**:同一规则同时只能有一个实例在执行 diff --git a/haobang-security-xdr/syslog-consumer/docker_run.txt b/haobang-security-xdr/syslog-consumer/docker_run.txt index 3ca58f2..5ed5265 100644 --- a/haobang-security-xdr/syslog-consumer/docker_run.txt +++ b/haobang-security-xdr/syslog-consumer/docker_run.txt @@ -2,17 +2,17 @@ --0.л˴Ŀ¼ cd /opt/syslog/docker/consumer ---ͨDockerfileļ (ã -docker build -f /opt/syslog/docker/consumer/Dockerfile -t syslog-consumer:v1.0 --1.鿴 docker ps -a ---2.ֹͣ ɾ +--2.docker imageļ (Dockerfile ǰĿ¼ +docker build -t syslog-consumer:v1.X.X . + + +--3.ֹͣ ɾ docker stop ct-syslog-consumer && docker rm ct-syslog-consumer ---3.docker imageļ (Dockerfile ǰĿ¼ -docker build -t syslog-consumer:v1.X.X . --4.docker ļ docker run --restart unless-stopped -e TZ=Asia/Shanghai -d --name ct-syslog-consumer -p 8089:8089 -v /home/syslog/logs:/app/logs --privileged=true syslog-consumer:v1.X.X diff --git a/haobang-security-xdr/syslog-consumer/pom.xml b/haobang-security-xdr/syslog-consumer/pom.xml index 458989c..16a502f 100644 --- a/haobang-security-xdr/syslog-consumer/pom.xml +++ b/haobang-security-xdr/syslog-consumer/pom.xml @@ -190,6 +190,14 @@ ${junit.version} test + + + + com.dameng + DmJdbcDriver18 + 8.1.2.141 + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/Alarm.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/Alarm.java index e3c947f..a826ea6 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/Alarm.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/Alarm.java @@ -70,9 +70,9 @@ public class Alarm { private Boolean baseFocused; private Boolean isUpdated; private int alarmSource; - private String[] httpReqHeaders; - private String[] httpReqBodys; - private String[] httpRespHeaders; - private String[] httpRespBodys; + private String[] httpReqHeader; + private String[] httpReqBody; + private String[] httpRespHeader; + private String[] httpRespBody; } \ No newline at end of file diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AlarmVisit.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AlarmVisit.java index 5f5f5a8..6541f59 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AlarmVisit.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AlarmVisit.java @@ -71,8 +71,8 @@ public class AlarmVisit { private Boolean baseFocused; private Boolean isUpdated; private int alarmSource; - private String[] httpReqHeaders; - private String[] httpReqBodys; - private String[] httpRespHeaders; - private String[] httpRespBodys; + private String[] httpReqHeader; + private String[] httpReqBody; + private String[] httpRespHeader; + private String[] httpRespBody; } \ No newline at end of file diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisAnalysisRule.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisAnalysisRule.java new file mode 100644 index 0000000..36683e4 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisAnalysisRule.java @@ -0,0 +1,144 @@ +package com.common.entity; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; +import java.util.UUID; + +/** + * 分析规则实体类 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AnalysisAnalysisRule { + + /** + * 规则唯一标识(UUID) + */ + private String ruleId; + + /** + * 创建用户账号 + */ + private String createUser; + + /** + * 规则名称(界面显示用) + */ + private String ruleName; + + /** + * 分析方式:高级版/基础版 + */ + private String analysisMethod; + + /** + * 运行模式:realtime-实时流式/offline-离线批处理 + */ + private String runMode; + + /** + * 规则输出类型:risk_alarm-风险告警/statistics-统计报表 + */ + private String ruleOutput; + + /** + * 所属组织机构编码 + */ + private String organization; + + /** + * 任务状态:running-正在执行/stopped-已停止/waiting-等待中 + */ + private String taskStatus; + + /** + * 创建部门ID + */ + private Long createDept; + + /** + * 删除标志:0-未删除 1-已删除 + */ + private String delFlag; + + /** + * 记录创建时间(带时区) + */ + private LocalDateTime createTime; + + /** + * 记录更新时间(带时区) + */ + private LocalDateTime updateTime; + + /** + * 创建人ID + */ + private Long createBy; + + /** + * 更新人ID + */ + private Long updateBy; + + /** + * 规则备注说明文本 + */ + private String remark; + + /** + * 多租户隔离ID + */ + private String tenantId; + + /** + * 规则详细描述 + */ + private String ruleDesc; + + /** + * 规则分类编码(预留扩展用) + */ + private Integer ruleType; + + /** + * 规则启用状态(0-禁用/1-启用等) + */ + private Integer ruleStatus; + + /** + * 规则配置内容(JSON或结构化文本) + */ + private String ruleContent; + + /** + * 规则计算表达式(如SQL片段或自定义脚本) + */ + private String ruleExpression; + + /** + * 规则执行优先级(数值越大优先级越高) + */ + private Long priority; + + /** + * 规则标签,多个标签用逗号分隔 + */ + private String tags; + + /** + * 规则版本号,用于版本管理 + */ + private Integer version; + + /** + * 分组ID + */ + private Integer subsetId; +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisField.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisField.java new file mode 100644 index 0000000..9f75c66 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisField.java @@ -0,0 +1,144 @@ +package com.common.entity; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; + +/** + * 分析字段配置实体类 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AnalysisField { + + /** + * 主键ID,自增序列 + */ + private Integer id; + + /** + * 关联的规则配置UUID + */ + private String ruleId; + + /** + * 字段类型(如维度/指标/计算字段等) + */ + private String type; + + /** + * 数据源名称,如数据库连接名或数据流标识 + */ + private String dataSource; + + /** + * 数据库名称 + */ + private String database; + + /** + * 表名 + */ + private String tableName; + + /** + * 表别名,用于SQL语句中的表引用 + */ + private String tableAlias; + + /** + * 字段名 + */ + private String columnName; + + /** + * 字段描述 + */ + private String columnDesc; + + /** + * 字段数据类型(如int/varchar/decimal等) + */ + private String dataType; + + /** + * 计算函数名(如SUM/COUNT/自定义函数,NULL表示原始字段) + */ + private String fn; + + /** + * 函数参数列表,JSON数组格式(即使无参数也保留空数组) + */ + private String arguments; + + /** + * 字段显示占位符(用于前端展示的默认文本) + */ + private String placeholder; + + /** + * 基础类型编码(预留分类扩展用) + */ + private Integer baseType; + + /** + * 字段分类ID + */ + private Integer categoryId; + + /** + * 创建部门ID + */ + private Long createDept; + + /** + * 删除标志(0-未删除/1-已删除) + */ + private String delFlag; + + /** + * 记录创建时间 + */ + private LocalDateTime createTime; + + /** + * 记录更新时间 + */ + private LocalDateTime updateTime; + + /** + * 创建人ID + */ + private Long createBy; + + /** + * 更新人ID + */ + private Long updateBy; + + /** + * 备注说明 + */ + private String remark; + + /** + * 租户ID,默认000000表示系统级配置 + */ + private String tenantId; + + /** + * 告警字段名(别名) + */ + private String alarmColumnName; + + /** + * 告警字段描述 + */ + private String alarmColumnDesc; +} + diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisFilter.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisFilter.java new file mode 100644 index 0000000..d3f7599 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisFilter.java @@ -0,0 +1,147 @@ +package com.common.entity; + +import com.baomidou.mybatisplus.annotation.*; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; + +/** + * 分析过滤条件实体类 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +@TableName("analysis_filter") +public class AnalysisFilter { + + /** + * 主键ID + */ + @TableId(value = "id", type = IdType.AUTO) + private Integer id; + + /** + * 规则ID + */ + private String ruleId; + + /** + * 数据源 + */ + private String dataSource; + + /** + * 数据库名 + */ + private String database; + + /** + * 表名 + */ + private String tableName; + + /** + * 表别名 + */ + private String tableAlias; + + /** + * 列名 + */ + private String columnName; + + /** + * 列描述 + */ + private String columnDesc; + + /** + * 数据类型 + */ + private String dataType; + + /** + * 函数名 + */ + private String fn; + + /** + * 函数参数 + */ + private Object arguments; + + /** + * 操作符 + */ + private String operator; + + /** + * 筛选值 + */ + private Object value; + + /** + * 基础类型 + */ + private Integer baseType; + + /** + * 分类ID + */ + private Integer categoryId; + + /** + * 创建部门ID + */ + private Long createDept; + + /** + * 删除标志:0-正常 1-删除 + */ + @TableLogic + private String delFlag; + + /** + * 创建时间 + */ + private LocalDateTime createTime; + + /** + * 更新时间 + */ + private LocalDateTime updateTime; + + /** + * 创建人ID + */ + private Long createBy; + + /** + * 更新人ID + */ + private Long updateBy; + + /** + * 备注信息 + */ + private String remark; + + /** + * 租户ID + */ + private String tenantId; + + /** + * 关联条件逻辑表ID + */ + private Integer condId; + + /** + * 表达式执行顺序号 + */ + private Integer seqNum; +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupBy.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupBy.java new file mode 100644 index 0000000..f327677 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupBy.java @@ -0,0 +1,81 @@ +package com.common.entity; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * 分组规则配置实体类 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AnalysisGroupBy { + + /** + * 主键ID + */ + private Long id; + + /** + * 规则ID + */ + private String ruleId; + + /** + * 分组类型:1-标准分组 2-窗口分组 + */ + private Integer groupType; + + /** + * 窗口类型:tumble/hop/session + */ + private String windowType; + + /** + * 窗口配置(关联AnalysisGroupByWindow) + */ + private AnalysisGroupByWindow window; + + /** + * 创建部门 + */ + private Long createDept; + + /** + * 删除标志:0-未删除,1-已删除 + */ + private String delFlag; + + /** + * 创建时间 + */ + private String createTime; + + /** + * 更新时间 + */ + private String updateTime; + + /** + * 创建人 + */ + private Long createBy; + + /** + * 更新人 + */ + private Long updateBy; + + /** + * 备注 + */ + private String remark; + + /** + * 租户ID + */ + private String tenantId; +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupByColumn.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupByColumn.java new file mode 100644 index 0000000..912b1db --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupByColumn.java @@ -0,0 +1,133 @@ +package com.common.entity; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; + +/** + * 分析分组字段配置实体类 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AnalysisGroupByColumn { + + /** + * 主键ID:自增唯一标识符 + */ + private Integer id; + + /** + * 分组ID:关联分析分组的外键 + */ + private Integer groupById; + + /** + * 数据源名称:数据来源系统标识 + */ + private String dataSource; + + /** + * 数据库名称:字段所在的数据库 + */ + private String database; + + /** + * 表名:字段所在的物理表名 + */ + private String tableName; + + /** + * 表别名:查询时使用的表别名 + */ + private String tableAlias; + + /** + * 字段名:物理字段名称 + */ + private String columnName; + + /** + * 字段描述:业务层面的字段说明 + */ + private String columnDesc; + + /** + * 数据类型:字段的数据类型(如varchar、int4) + */ + private String dataType; + + /** + * 基础类型:字段的业务基础类型编码 + */ + private Integer baseType; + + /** + * 分类ID:字段所属的业务分类ID + */ + private Integer categoryId; + + /** + * 创建部门:创建记录的部门ID + */ + private Long createDept; + + /** + * 删除标志:0-未删除,1-已删除 + */ + private String delFlag; + + /** + * 创建时间:记录创建时间戳 + */ + private LocalDateTime createTime; + + /** + * 更新时间:记录最后更新时间戳 + */ + private LocalDateTime updateTime; + + /** + * 创建人:记录创建者ID + */ + private Long createBy; + + /** + * 更新人:记录最后更新者ID + */ + private Long updateBy; + + /** + * 备注:扩展说明信息 + */ + private String remark; + + /** + * 租户ID:多租户环境下的租户标识 + */ + private String tenantId; + + /** + * 规则ID:关联的业务规则UUID + */ + private String ruleId; + + /** + * 用户组ID:关联的权限用户组ID + */ + private Long groupId; + + /** + * 字段唯一ID:跨表唯一的字段标识符 + */ + private Long fieldId; + + /** + * 排序号:字段展示顺序 + */ + private Integer sort; +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupByHaving.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupByHaving.java new file mode 100644 index 0000000..51d180d --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupByHaving.java @@ -0,0 +1,138 @@ +package com.common.entity; + +import com.baomidou.mybatisplus.annotation.*; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; + +/** + * 分析分组聚合条件配置实体类 + * 存储GROUP BY后的HAVING筛选条件定义 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +@TableName("analysis_group_by_having") +public class AnalysisGroupByHaving { + + /** + * 主键ID + */ + @TableId(value = "id", type = IdType.AUTO) + private Integer id; + + /** + * 分组ID + */ + private Integer groupById; + + /** + * 数据源名称 + */ + private String dataSource; + + /** + * 数据库名称 + */ + private String database; + + /** + * 表名 + */ + private String tableName; + + /** + * 表别名 + */ + private String tableAlias; + + /** + * 字段名 + */ + private String columnName; + + /** + * 字段描述 + */ + private String columnDesc; + + /** + * 字段数据类型 + */ + private String dataType; + + /** + * 聚合函数名(如SUM/COUNT/AVG等) + */ + private String fn; + + /** + * 聚合函数参数列表,JSON数组格式 + */ + private Object arguments; + + /** + * 比较运算符(如>、<、=、>=、<=、IN、LIKE等) + */ + private String operator; + + /** + * 比较值,JSON格式存储 + */ + private Object value; + + /** + * 基础类型标识 + */ + private Integer baseType; + + /** + * 分类ID,用于条件分组归类 + */ + private Integer categoryId; + + /** + * 创建部门ID + */ + private Long createDept; + + /** + * 删除标志:0-未删除 1-已删除 + */ + @TableLogic + private String delFlag; + + /** + * 记录创建时间 + */ + private LocalDateTime createTime; + + /** + * 记录更新时间 + */ + private LocalDateTime updateTime; + + /** + * 创建人ID + */ + private Long createBy; + + /** + * 更新人ID + */ + private Long updateBy; + + /** + * 备注说明 + */ + private String remark; + + /** + * 租户ID + */ + private String tenantId; +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupByWindow.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupByWindow.java new file mode 100644 index 0000000..fab2244 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisGroupByWindow.java @@ -0,0 +1,139 @@ +package com.common.entity; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * 分析时间窗口配置实体类 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AnalysisGroupByWindow { + + /** + * 主键ID + */ + private Integer id; + + /** + * 分组ID + */ + private Integer groupById; + + /** + * 窗口类型:tumble/hop/session + */ + private String windowType; + + // ============ 滚动窗口配置 ============ + + /** + * 滚动窗口时间类型:second/minute/hour + */ + private String tumbleWindowTimeType; + + /** + * 滚动窗口大小 + */ + private Integer tumbleWindowSize; + + /** + * 滚动窗口单位:s/m/h/d + */ + private String tumbleWindowSizeUnit; + + // ============ 滑动窗口配置 ============ + + /** + * 滑动窗口时间类型 + */ + private String hopWindowTimeType; + + /** + * 滑动窗口大小 + */ + private Integer hopWindowSize; + + /** + * 滑动窗口单位:s/m/h/d + */ + private String hopWindowSizeUnit; + + /** + * 滑动窗口步长 + */ + private Integer hopWindowSlide; + + /** + * 滑动窗口步长单位:s/m/h/d + */ + private String hopWindowSlideUnit; + + /** + * 滑动窗口告警频率:是否每个窗口仅告警一次 + */ + private Boolean hopWindowAlarmOncePerWindow; + + // ============ 会话窗口配置 ============ + + /** + * 会话窗口时间类型 + */ + private String sessionWindowTimeType; + + /** + * 会话窗口超时时间 + */ + private Integer sessionWindowSize; + + /** + * 会话窗口单位:s/m/h/d + */ + private String sessionWindowSizeUnit; + + // ============ 公共字段 ============ + + /** + * 创建部门 + */ + private Long createDept; + + /** + * 删除标志:0-未删除,1-已删除 + */ + private String delFlag; + + /** + * 创建时间 + */ + private String createTime; + + /** + * 更新时间 + */ + private String updateTime; + + /** + * 创建人 + */ + private Long createBy; + + /** + * 更新人 + */ + private Long updateBy; + + /** + * 备注 + */ + private String remark; + + /** + * 租户ID + */ + private String tenantId; +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisTaskHistory.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisTaskHistory.java new file mode 100644 index 0000000..dfc1329 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisTaskHistory.java @@ -0,0 +1,103 @@ +package com.common.entity; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; +import java.util.UUID; +import java.time.LocalDateTime; + +/** + * 分析规则任务运行历史及状态记录 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AnalysisTaskHistory { + + /** + * 记录ID + */ + private Long id; + + /** + * 规则ID + */ + private String ruleId; + + /** + * 开始时间 + */ + private LocalDateTime startTime; + + /** + * 结束时间 + */ + private LocalDateTime endTime; + + /** + * 持续时间(秒) + */ + private Long durationTime; + + /** + * 进度百分比 + */ + private Integer progressPercent; + + /** + * 输入数据量 + */ + private Long inputCount; + + /** + * 输出数据量 + */ + private Long outputCount; + + /** + * 状态(RUNNING/CANCELED/KILLED/COMPLETED/FAILED) + */ + private String status; + + /** + * 创建部门ID + */ + private Long createDept; + + /** + * 删除标志(0-正常,1-删除) + */ + private String delFlag; + + /** + * 创建时间 + */ + private LocalDateTime createTime; + + /** + * 更新时间 + */ + private LocalDateTime updateTime; + + /** + * 创建人ID + */ + private Long createBy; + + /** + * 更新人ID + */ + private Long updateBy; + + /** + * 备注信息 + */ + private String remark; + + /** + * 租户ID + */ + private String tenantId; +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisUserDefinedFunction.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisUserDefinedFunction.java new file mode 100644 index 0000000..b202ee8 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisUserDefinedFunction.java @@ -0,0 +1,66 @@ +package com.common.entity; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * 用户自定义函数实体类 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AnalysisUserDefinedFunction { + + /** + * 函数ID + */ + private String functionId; + + /** + * 规则ID + */ + private String ruleId; + + /** + * 函数名称 + */ + private String functionName; + + /** + * 函数类型:AGGREGATE-聚合函数/SCALAR-标量函数/WINDOW-窗口函数 + */ + private String functionType; + + /** + * 函数描述 + */ + private String functionDesc; + + /** + * 函数实现SQL + */ + private String functionSql; + + /** + * 返回类型 + */ + private String returnType; + + /** + * 参数定义(JSON格式) + */ + private String params; + + /** + * 启用状态 + */ + private Boolean enabled; + + /** + * 删除标志:0-未删除 1-已删除 + */ + private String delFlag; +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisWhereCondition.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisWhereCondition.java new file mode 100644 index 0000000..bb5bdc9 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/AnalysisWhereCondition.java @@ -0,0 +1,83 @@ +package com.common.entity; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; + +/** + * WHERE条件逻辑表实体类(仅存储条件逻辑组信息) + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AnalysisWhereCondition { + + /** + * 条件ID + */ + private Integer condId; + + /** + * 规则ID + */ + private String ruleId; + + /** + * 逻辑运算符:AND, OR + */ + private String logicalOp; + + /** + * 序号 + */ + private Integer seqNum; + + /** + * 父节点ID + */ + private Integer parentCondId; + + /** + * 创建部门ID + */ + private Long createDept; + + /** + * 删除标志:0-未删除 1-已删除 + */ + private String delFlag; + + /** + * 创建时间 + */ + private LocalDateTime createTime; + + /** + * 更新时间 + */ + private LocalDateTime updateTime; + + /** + * 创建人ID + */ + private Long createBy; + + /** + * 更新人ID + */ + private Long updateBy; + + /** + * 备注信息 + */ + private String remark; + + /** + * 租户ID + */ + private String tenantId; +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/GroupedSyslogData.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/GroupedSyslogData.java index 1540f48..dc2b890 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/GroupedSyslogData.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/entity/GroupedSyslogData.java @@ -29,6 +29,7 @@ public class GroupedSyslogData { private String[] httpReqBodys; private String[] httpRespHeaders; private String[] httpRespBodys; + private String dnsInfo; private String victimIpsStr; private Integer[] allAttackResults; // 所有不同的attack_result值 private Integer mostCommonAttackResult; // 最常见的attack_result值 diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AlarmMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AlarmMapper.java index 944154d..b8a5ffc 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AlarmMapper.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AlarmMapper.java @@ -15,10 +15,10 @@ public interface AlarmMapper { "INSERT INTO alarm (", "id, created_at, alarm_name, alarm_level, alarm_type, ", "alarm_major_type, alarm_minor_type,alarm_area_id, attack_ip, victim_ip, victim_web_url, ", - "device_id, comment,origin_log_ids,log_start_at, log_end_at, http_status, ", + "device_id, comment,origin_log_ids,log_start_at, log_end_at, window_time, http_status, ", "attack_port, victim_port, attack_method, etl_time, log_count, ", "attack_chain_phase, disposition_advice, attack_direction, ", - "judged_state, disposed_state, attack_result, fall, payload, engine_type, " , + "judged_state, disposed_state, attack_result, fall, payload, dns_info, engine_type, " , "http_req_header , http_req_body,http_resp_header , http_resp_body ", ") VALUES ", "", @@ -30,18 +30,18 @@ public interface AlarmMapper { "#{item.deviceId, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ", "#{item.comment}, " , "#{item.originLogIds, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", - "#{item.logStartAt}, #{item.logEndAt}, #{item.httpStatus}, ", + "#{item.logStartAt}, #{item.logEndAt}, #{item.windowTime}, #{item.httpStatus}, ", "#{item.attackPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ", "#{item.victimPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ", "#{item.attackMethod}, #{item.etlTime}, #{item.logCount}, ", "#{item.attackChainPhase, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ", "#{item.dispositionAdvice}, #{item.attackDirection}, ", "#{item.judgedState}, #{item.disposedState}, #{item.attackResult}, #{item.fall}, ", - "#{item.payload}, #{item.engineType}, ", - "#{item.httpReqHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", - "#{item.httpReqBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", - "#{item.httpRespHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", - "#{item.httpRespBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler})", + "#{item.payload},#{item.dnsInfo}, #{item.engineType}, ", + "#{item.httpReqHeader, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", + "#{item.httpReqBody, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", + "#{item.httpRespHeader, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", + "#{item.httpRespBody, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler})", "", ""}) void batchInsert(@Param("list") List alarmList); @@ -52,10 +52,10 @@ public interface AlarmMapper { @Insert("INSERT INTO alarm (" + "id, created_at, alarm_name, alarm_level, alarm_type, " + "alarm_major_type, alarm_minor_type,alarm_area_id, attack_ip, victim_ip, victim_web_url, " + - "device_id, comment,origin_log_ids, log_start_at, log_end_at, http_status, " + + "device_id, comment,origin_log_ids, log_start_at, log_end_at, window_time, http_status, " + "attack_port, victim_port, attack_method, etl_time, log_count, " + "attack_chain_phase, disposition_advice, attack_direction, " + - "judged_state, disposed_state, attack_result, fall, payload, engine_type, " + + "judged_state, disposed_state, attack_result, fall, payload, dns_info, engine_type, " + "http_req_header , http_req_body,http_resp_header , http_resp_body " + ") VALUES (" + "#{id}, #{createdAt}, #{alarmName}, #{alarmLevel}, " + @@ -66,17 +66,17 @@ public interface AlarmMapper { "#{deviceId, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " + "#{comment}, " + "#{originLogIds, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + - "#{logStartAt}, #{logEndAt}, #{httpStatus}, " + + "#{logStartAt}, #{logEndAt}, #{windowTime},#{httpStatus}, " + "#{attackPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " + "#{victimPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " + "#{attackMethod}, #{etlTime}, #{logCount}, " + "#{attackChainPhase, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " + "#{dispositionAdvice}, #{attackDirection}, " + - "#{judgedState}, #{disposedState}, #{attackResult}, #{fall}, #{payload}, #{engineType}, " + - "#{httpReqHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + - "#{httpReqBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + - "#{httpRespHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + - "#{httpRespBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler} " + + "#{judgedState}, #{disposedState}, #{attackResult}, #{fall}, #{payload}, #{dnsInfo},#{engineType}, " + + "#{httpReqHeader, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + + "#{httpReqBody, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + + "#{httpRespHeader, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + + "#{httpRespBody, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler} " + ")") void insert(Alarm alarm); } \ No newline at end of file diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AlarmVisitMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AlarmVisitMapper.java index de00ea9..00b1948 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AlarmVisitMapper.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AlarmVisitMapper.java @@ -19,10 +19,10 @@ public interface AlarmVisitMapper { "INSERT INTO alarm_visit (", "id, created_at, alarm_name, alarm_level, alarm_type, ", "alarm_major_type, alarm_minor_type,alarm_area_id, attack_ip, victim_ip, victim_web_url, ", - "device_id, comment,origin_log_ids,log_start_at, log_end_at, http_status, ", + "device_id, comment,origin_log_ids,log_start_at, log_end_at,window_time, http_status, ", "attack_port, victim_port, attack_method, etl_time, log_count, ", "attack_chain_phase, disposition_advice, attack_direction, ", - "judged_state, disposed_state, attack_result, fall, payload, " , + "judged_state, disposed_state, attack_result, fall, payload, dns_info, engine_type, " , "http_req_header , http_req_body,http_resp_header , http_resp_body ", ") VALUES ", "", @@ -34,18 +34,18 @@ public interface AlarmVisitMapper { "#{item.deviceId, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ", "#{item.comment}, " , "#{item.originLogIds, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", - "#{item.logStartAt}, #{item.logEndAt}, #{item.httpStatus}, ", + "#{item.logStartAt}, #{item.logEndAt},, #{item.windowTime} #{item.httpStatus}, ", "#{item.attackPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ", "#{item.victimPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ", "#{item.attackMethod}, #{item.etlTime}, #{item.logCount}, ", "#{item.attackChainPhase, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ", "#{item.dispositionAdvice}, #{item.attackDirection}, ", "#{item.judgedState}, #{item.disposedState}, #{item.attackResult}, #{item.fall}, ", - "#{item.payload}, ", - "#{item.httpReqHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", - "#{item.httpReqBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", - "#{item.httpRespHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", - "#{item.httpRespBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}) ", + "#{item.payload},#{item.dnsInfo}, #{item.engineType}, ", + "#{item.httpReqHeader, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", + "#{item.httpReqBody, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", + "#{item.httpRespHeader, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ", + "#{item.httpRespBody, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}) ", "", ""}) void batchInsert(@Param("list") List alarmList); @@ -56,10 +56,10 @@ public interface AlarmVisitMapper { @Insert("INSERT INTO alarm_visit (" + "id, created_at, alarm_name, alarm_level, alarm_type, " + "alarm_major_type, alarm_minor_type,alarm_area_id, attack_ip, victim_ip, victim_web_url, " + - "device_id, comment,origin_log_ids, log_start_at, log_end_at, http_status, " + + "device_id, comment,origin_log_ids, log_start_at, log_end_at, window_time,http_status, " + "attack_port, victim_port, attack_method, etl_time, log_count, " + "attack_chain_phase, disposition_advice, attack_direction, " + - "judged_state, disposed_state, attack_result, fall, payload, " + + "judged_state, disposed_state, attack_result, fall, payload, dns_info,engine_type, " + "http_req_header , http_req_body,http_resp_header , http_resp_body " + ") VALUES (" + "#{id}, #{createdAt}, #{alarmName}, #{alarmLevel}, " + @@ -70,17 +70,17 @@ public interface AlarmVisitMapper { "#{deviceId, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " + "#{comment}, " + "#{originLogIds, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + - "#{logStartAt}, #{logEndAt}, #{httpStatus}, " + + "#{logStartAt}, #{logEndAt}, #{windowTime}, #{httpStatus}, " + "#{attackPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " + "#{victimPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " + "#{attackMethod}, #{etlTime}, #{logCount}, " + "#{attackChainPhase, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " + "#{dispositionAdvice}, #{attackDirection}, " + - "#{judgedState}, #{disposedState}, #{attackResult}, #{fall}, #{payload}, " + - "#{httpReqHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + - "#{httpReqBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + - "#{httpRespHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + - "#{httpRespBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler} " + + "#{judgedState}, #{disposedState}, #{attackResult}, #{fall}, #{payload},#{dnsInfo} ,#{engineType}, " + + "#{httpReqHeader, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + + "#{httpReqBody, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + + "#{httpRespHeader, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " + + "#{httpRespBody, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler} " + ")") void insert(AlarmVisit alarm); } \ No newline at end of file diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisAnalysisRuleMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisAnalysisRuleMapper.java new file mode 100644 index 0000000..dc17e09 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisAnalysisRuleMapper.java @@ -0,0 +1,50 @@ +package com.common.mapper; + +import com.common.entity.AnalysisAnalysisRule; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; +import java.util.UUID; +/** + * 分析规则Mapper + */ +@Mapper +public interface AnalysisAnalysisRuleMapper { + + /** + * 查询指定运行模式的活动规则 + * + * @param runMode 运行模式:realtime-实时/offline-离线 + * @return 规则列表 + */ + List selectActiveRulesByRunMode(@Param("runMode") String runMode); + + /** + * 根据规则ID查询规则 + * + * @param ruleId 规则ID + * @return 规则信息 + */ + AnalysisAnalysisRule selectByRuleId(@Param("ruleId") String ruleId); + + /** + * 更新规则任务状态 + * + * @param ruleId 规则ID + * @param taskStatus 任务状态 + * @param updateBy 更新人ID + * @return 影响行数 + */ + int updateTaskStatus(@Param("ruleId") String ruleId, + @Param("taskStatus") String taskStatus, + @Param("updateBy") Long updateBy); + + /** + * 根据ID查询规则 + * + * @param ruleId 规则ID + * @return 规则信息 + */ + AnalysisAnalysisRule selectById(@Param("ruleId") String ruleId); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisFieldMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisFieldMapper.java new file mode 100644 index 0000000..7c81bdf --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisFieldMapper.java @@ -0,0 +1,38 @@ +package com.common.mapper; + +import com.common.entity.AnalysisField; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; +import java.util.UUID; +/** + * 分析字段配置Mapper + */ +@Mapper +public interface AnalysisFieldMapper { + + /** + * 根据规则ID查询字段配置 + * + * @param ruleId 规则ID + * @return 字段列表 + */ + List selectByRuleId(@Param("ruleId") String ruleId); + + /** + * 查询规则中用于SELECT的字段 + * + * @param ruleId 规则ID + * @return 字段列表 + */ + List selectSelectFieldsByRuleId(@Param("ruleId") String ruleId); + + /** + * 查询规则中用于GROUP BY的字段 + * + * @param ruleId 规则ID + * @return 字段列表 + */ + List selectGroupByFieldsByRuleId(@Param("ruleId") String ruleId); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisFilterMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisFilterMapper.java new file mode 100644 index 0000000..22e1f47 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisFilterMapper.java @@ -0,0 +1,38 @@ +package com.common.mapper; + +import com.common.entity.AnalysisFilter; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * 分析过滤条件Mapper + */ +@Mapper +public interface AnalysisFilterMapper { + + /** + * 根据规则ID查询过滤条件 + * + * @param ruleId 规则ID + * @return 过滤条件列表 + */ + List selectByRuleId(@Param("ruleId") String ruleId); + + /** + * 根据字段ID查询过滤条件 + * + * @param fieldId 字段ID + * @return 过滤条件列表 + */ + List selectByFieldId(@Param("fieldId") Long fieldId); + + /** + * 根据条件ID查询过滤条件(与where_condition表关联) + * + * @param condId 条件ID + * @return 过滤条件列表 + */ + List selectByCondId(@Param("condId") Integer condId); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByColumnMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByColumnMapper.java new file mode 100644 index 0000000..b340963 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByColumnMapper.java @@ -0,0 +1,38 @@ +package com.common.mapper; + +import com.common.entity.AnalysisGroupByColumn; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * 分组字段配置Mapper + */ +@Mapper +public interface AnalysisGroupByColumnMapper { + + /** + * 根据规则ID查询分组字段配置 + * + * @param ruleId 规则ID + * @return 分组字段列表 + */ + List selectByRuleId(@Param("ruleId") String ruleId); + + /** + * 根据分组ID查询分组字段配置 + * + * @param groupById 分组ID + * @return 分组字段列表 + */ + List selectByGroupById(@Param("groupById") Integer groupById); + + /** + * 根据用户组ID查询分组字段配置 + * + * @param groupId 用户组ID + * @return 分组字段列表 + */ + List selectByGroupId(@Param("groupId") Long groupId); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByHavingMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByHavingMapper.java new file mode 100644 index 0000000..4600634 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByHavingMapper.java @@ -0,0 +1,30 @@ +package com.common.mapper; + +import com.common.entity.AnalysisGroupByHaving; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * 分析分组聚合条件Mapper + */ +@Mapper +public interface AnalysisGroupByHavingMapper { + + /** + * 根据分组ID查询HAVING条件 + * + * @param groupById 分组ID + * @return HAVING条件列表 + */ + List selectByGroupById(@Param("groupById") Integer groupById); + + /** + * 根据规则ID查询HAVING条件(通过关联分组表) + * + * @param ruleId 规则ID + * @return HAVING条件列表 + */ + List selectByRuleId(@Param("ruleId") String ruleId); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByMapper.java new file mode 100644 index 0000000..a851365 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByMapper.java @@ -0,0 +1,22 @@ +package com.common.mapper; + +import com.common.entity.AnalysisGroupBy; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * 分组规则配置Mapper + */ +@Mapper +public interface AnalysisGroupByMapper { + + /** + * 根据规则ID查询分组配置 + * + * @param ruleId 规则ID + * @return 分组配置列表 + */ + List selectByRuleId(@Param("ruleId") String ruleId); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByWindowMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByWindowMapper.java new file mode 100644 index 0000000..78abe6f --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisGroupByWindowMapper.java @@ -0,0 +1,20 @@ +package com.common.mapper; + +import com.common.entity.AnalysisGroupByWindow; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +/** + * 分析时间窗口配置Mapper + */ +@Mapper +public interface AnalysisGroupByWindowMapper { + + /** + * 根据分组ID查询窗口配置 + * + * @param groupById 分组ID + * @return 窗口配置 + */ + AnalysisGroupByWindow selectByGroupById(@Param("groupById") Integer groupById); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisTaskHistoryMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisTaskHistoryMapper.java new file mode 100644 index 0000000..643bf8d --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisTaskHistoryMapper.java @@ -0,0 +1,50 @@ +package com.common.mapper; + +import com.common.entity.AnalysisTaskHistory; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * 分析任务历史Mapper + */ +@Mapper +public interface AnalysisTaskHistoryMapper { + + /** + * 插入任务历史记录 + * + * @param history 任务历史 + * @return 影响行数 + */ + int insert(AnalysisTaskHistory history); + + /** + * 更新任务历史记录 + * + * @param history 任务历史 + * @return 影响行数 + */ + int update(AnalysisTaskHistory history); + + /** + * 根据规则ID查询最近的任务历史 + * + * @param ruleId 规则ID + * @param limit 查询数量 + * @return 任务历史列表 + */ + List selectRecentByRuleId(@Param("ruleId") String ruleId, + @Param("limit") int limit); + + /** + * 根据规则ID和状态查询任务历史 + * + * @param ruleId 规则ID + * @param status 状态 + * @return 任务历史 + */ + AnalysisTaskHistory selectByRuleIdAndStatus(@Param("ruleId") String ruleId, + @Param("status") String status); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisWhereConditionMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisWhereConditionMapper.java new file mode 100644 index 0000000..fb4c1a0 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/AnalysisWhereConditionMapper.java @@ -0,0 +1,39 @@ +package com.common.mapper; + +import com.common.entity.AnalysisWhereCondition; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; +import java.util.UUID; + +/** + * WHERE条件Mapper + */ +@Mapper +public interface AnalysisWhereConditionMapper { + + /** + * 根据规则ID查询WHERE条件 + * + * @param ruleId 规则ID + * @return WHERE条件列表 + */ + List selectByRuleId(@Param("ruleId") String ruleId); + + /** + * 查询根节点条件(无父节点) + * + * @param ruleId 规则ID + * @return 根条件列表 + */ + List selectRootConditions(@Param("ruleId") String ruleId); + + /** + * 查询指定条件的子条件 + * + * @param parentCondId 父条件ID + * @return 子条件列表 + */ + List selectChildConditions(@Param("parentCondId") String parentCondId); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/SyslogNormalAlarmMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/SyslogNormalAlarmMapper.java index 9f1ce29..e11ffaa 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/SyslogNormalAlarmMapper.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/SyslogNormalAlarmMapper.java @@ -71,6 +71,7 @@ public interface SyslogNormalAlarmMapper { "ARRAY_AGG(DISTINCT http_req_body) as httpReqBodys, " + "ARRAY_AGG(DISTINCT http_resp_header) as httpRespHeaders, " + "ARRAY_AGG(DISTINCT http_resp_body) as httpRespBodys, " + + "MODE() WITHIN GROUP (ORDER BY dest_domain) as dns_info, " + "STRING_AGG(DISTINCT COALESCE(host(dest_ip)::text, ''), ',') as victim_ips_str " + "FROM syslog_normal_alarm " + "WHERE log_time >= #{startTime} AND log_time < #{endTime} " + diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/SyslogNormalDataMapper.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/SyslogNormalDataMapper.java index fef684c..358218c 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/SyslogNormalDataMapper.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/mapper/SyslogNormalDataMapper.java @@ -68,6 +68,7 @@ public interface SyslogNormalDataMapper { "ARRAY_AGG(DISTINCT http_req_body) as httpReqBodys, " + "ARRAY_AGG(DISTINCT http_resp_header) as httpRespHeaders, " + "ARRAY_AGG(DISTINCT http_resp_body) as httpRespBodys, " + + "MODE() WITHIN GROUP (ORDER BY dest_domain) as dns_info, " + "STRING_AGG(DISTINCT COALESCE(host(dest_ip)::text, ''), ',') as victim_ips_str " + "FROM syslog_normal_data " + "WHERE log_time >= #{startTime} AND log_time < #{endTime} " + diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/ETLOrchestrator.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/ETLOrchestrator.java index 2244880..460f172 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/ETLOrchestrator.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/ETLOrchestrator.java @@ -33,8 +33,9 @@ public class ETLOrchestrator { private NormalizeRuleHitTimeService normalizeRuleHitTimeService; /** * 定时任务 - 从每小时第1分钟开始,5分钟间隔执行 + * 20260317:暂定硬规则关联分析 */ - @Scheduled(cron = "0 1/5 * * * ?") + //@Scheduled(cron = "0 1/5 * * * ?") public void scheduledETL() { long startTime = System.currentTimeMillis(); @@ -46,7 +47,7 @@ public class ETLOrchestrator { //泛化标准数据告警降噪任务 try { //retryHandler.executeWithRetry(() -> dataExtractor.extractAndProcess24HoursGroupedData()); - retryHandler.executeWithRetry(() -> dataExtractor.extractAndProcessQueryHoursGroupedData(strStartTime,strEndTime )); + //retryHandler.executeWithRetry(() -> dataExtractor.extractAndProcessQueryHoursGroupedData(strStartTime,strEndTime )); retryHandler.executeWithRetry(() -> dataExtractor.extractAndProcessQueryHoursAlarm(strStartTime,strEndTime )); long endTime = System.currentTimeMillis(); long duration = (endTime - startTime) / 1000; diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/OfflineAnalysisScheduler.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/OfflineAnalysisScheduler.java new file mode 100644 index 0000000..41479c3 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/OfflineAnalysisScheduler.java @@ -0,0 +1,62 @@ +package com.common.schedule; + +import com.common.service.AnalysisRuleService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.util.List; +import java.util.Map; + +/** + * 离线分析定时任务 + */ +@Slf4j +@Component +public class OfflineAnalysisScheduler { + + @Autowired + private AnalysisRuleService analysisRuleService; + + @Value("${analysis.offline.enabled:true}") + private boolean offlineEnabled; + + /** + * 定时执行离线分析(使用cron表达式,默认每小时执行一次) + * 具体分析规则运行需要根据配置运行时间周期进行,离线暂停 + */ + // @Scheduled(cron = "${analysis.offline.cron-expression:0 0 */1 * * ?}") + public void executeOfflineAnalysis() { + if (!offlineEnabled) { + log.debug("离线分析引擎已禁用,跳过执行"); + return; + } + + try { + log.info("========== 开始执行离线分析任务 =========="); + long startTime = System.currentTimeMillis(); + + List> results = analysisRuleService.executeOfflineAnalysis(); + + long endTime = System.currentTimeMillis(); + long duration = endTime - startTime; + + log.info("========== 离线分析任务完成,耗时: {} ms,处理规则数: {} ==========", + duration, results.size()); + + // 输出执行结果摘要 + for (Map result : results) { + log.info("规则: {}, 状态: {}, 处理记录数: {}, 生成告警数: {}", + result.get("ruleName"), + result.get("status"), + result.get("processedCount"), + result.get("alarmCount")); + } + + } catch (Exception e) { + log.error("执行离线分析任务失败", e); + } + } +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/RealtimeAnalysisScheduler.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/RealtimeAnalysisScheduler.java new file mode 100644 index 0000000..2d03814 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/schedule/RealtimeAnalysisScheduler.java @@ -0,0 +1,196 @@ +package com.common.schedule; + +import com.common.entity.AnalysisAnalysisRule; +import com.common.entity.AnalysisGroupBy; +import com.common.entity.AnalysisGroupByWindow; +import com.common.mapper.AnalysisAnalysisRuleMapper; +import com.common.mapper.AnalysisGroupByMapper; +import com.common.mapper.AnalysisGroupByWindowMapper; +import com.common.service.AnalysisRuleService; +import com.common.service.RuleExecutionTimeService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.time.LocalDateTime; +import java.util.List; +import java.util.Map; + +/** + * 实时分析定时任务调度器 + * 每个规则根据窗口类型(滚动、滑动、会话)独立管理下次运行时间 + */ +@Slf4j +@Component +public class RealtimeAnalysisScheduler { + + @Autowired + private AnalysisRuleService analysisRuleService; + + @Autowired + private RuleExecutionTimeService ruleExecutionTimeService; + + @Autowired + private AnalysisAnalysisRuleMapper ruleMapper; + + @Autowired + private AnalysisGroupByMapper groupByMapper; + + @Autowired + private AnalysisGroupByWindowMapper groupByWindowMapper; + + @Value("${analysis.realtime.enabled:true}") + private boolean realtimeEnabled; + + @Value("${analysis.realtime.check-interval-seconds:10}") + private int checkIntervalSeconds; + + /** + * 应用启动时初始化所有规则的执行时间 + */ + @PostConstruct + public void init() { + if (!realtimeEnabled) { + log.info("实时分析引擎已禁用,跳过初始化"); + return; + } + + log.info("========== 初始化实时分析调度器 =========="); + try { + initAllRules(); + log.info("========== 实时分析调度器初始化完成 =========="); + } catch (Exception e) { + log.error("初始化实时分析调度器失败", e); + } + } + + /** + * 定时检查规则是否需要执行(默认每10秒检查一次) + * 根据窗口类型(滚动、滑动、会话)独立计算下次执行时间 + */ + @Scheduled(fixedDelayString = "${analysis.realtime.check-interval-seconds:10}000") + public void checkAndExecuteRules() { + if (!realtimeEnabled) { + return; + } + + try { + LocalDateTime now = LocalDateTime.now(); + + // 查询所有启用的实时规则 + List rules = ruleMapper.selectActiveRulesByRunMode("realtime"); + + if (rules.isEmpty()) { + return; + } + + int executedCount = 0; + int skippedCount = 0; + + for (AnalysisAnalysisRule rule : rules) { + try { + // 获取规则下次执行时间 + LocalDateTime nextTime = ruleExecutionTimeService.getNextExecuteTime(rule.getRuleId()); + + // 如果未初始化或已到执行时间 + if (nextTime == null || !nextTime.isAfter(now)) { + log.info("执行规则: ruleId={}, ruleName={}, nextTime={}, now={}", + rule.getRuleId(), rule.getRuleName(), + nextTime, now); + + // 执行规则 + Map result = analysisRuleService.executeRealtimeRule(rule.getRuleId()); + + // 计算下次执行时间 + updateNextExecuteTime(rule); + executedCount++; + } else { + skippedCount++; + log.debug("规则未到执行时间: ruleId={}, nextTime={}, now={}, diff={}", + rule.getRuleId(), nextTime, now, + java.time.Duration.between(now, nextTime).getSeconds()); + } + } catch (Exception e) { + log.error("检查和执行规则失败: ruleId={}", rule.getRuleId(), e); + } + } + + if (executedCount > 0) { + log.info("本次调度执行规则数: {}, 跳过规则数: {}", executedCount, skippedCount); + } + + } catch (Exception e) { + log.error("检查和执行规则失败", e); + } + } + + /** + * 初始化所有规则的执行时间 + */ + private void initAllRules() { + List rules = ruleMapper.selectActiveRulesByRunMode("realtime"); + log.info("查询到 {} 个实时分析规则", rules.size()); + + for (AnalysisAnalysisRule rule : rules) { + try { + // 加载窗口配置 + AnalysisGroupByWindow groupByWindow = loadGroupByWindow(rule.getRuleId()); + + // 初始化下次执行时间 + ruleExecutionTimeService.initRuleExecuteTime(rule, groupByWindow); + + log.info("初始化规则: ruleId={}, ruleName={}, windowType={}", + rule.getRuleId(), rule.getRuleName(), + groupByWindow != null ? groupByWindow.getWindowType() : "NONE"); + } catch (Exception e) { + log.error("初始化规则执行时间失败: ruleId={}", rule.getRuleId(), e); + } + } + } + + /** + * 加载规则的窗口配置 + */ + private AnalysisGroupByWindow loadGroupByWindow(String ruleId) { + try { + List groupByList = groupByMapper.selectByRuleId(ruleId); + if (groupByList != null && !groupByList.isEmpty()) { + AnalysisGroupBy groupBy = groupByList.get(0); + if (groupBy.getId() != null) { + return groupByWindowMapper.selectByGroupById(groupBy.getId().intValue()); + } + } + return null; + } catch (Exception e) { + log.error("加载窗口配置失败: ruleId={}", ruleId, e); + return null; + } + } + + /** + * 更新规则下次执行时间 + */ + private void updateNextExecuteTime(AnalysisAnalysisRule rule) { + try { + // 加载窗口配置 + AnalysisGroupByWindow groupByWindow = loadGroupByWindow(rule.getRuleId()); + + // 更新下次执行时间 + ruleExecutionTimeService.updateNextExecuteTime(rule, groupByWindow); + } catch (Exception e) { + log.error("更新规则下次执行时间失败: ruleId={}", rule.getRuleId(), e); + } + } + + /** + * 应用关闭时清理 + */ + @PreDestroy + public void destroy() { + log.info("========== 关闭实时分析调度器 =========="); + } +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AccessLogAlertService.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AccessLogAlertService.java index a815081..01af5f6 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AccessLogAlertService.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AccessLogAlertService.java @@ -6,6 +6,7 @@ import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import com.common.entity.AlarmVisit; +import com.common.entity.DeviceDevice; import com.common.entity.SecExceptionAlgorithm; import com.common.entity.SyslogNormalData; import lombok.extern.slf4j.Slf4j; @@ -36,6 +37,10 @@ import com.common.util.AlgorithmResultParser; @EnableScheduling @EnableAsync public class AccessLogAlertService { + + @Autowired + public static DeviceDeviceService deviceDeviceService ; + @Autowired private AlgorithmResultParser algorithmResultParser; @Autowired @@ -63,7 +68,7 @@ public class AccessLogAlertService { @PostConstruct public void init() { // 初始化时设置为当前时间减2分钟 - lastProcessTime = LocalDateTime.now().minusMinutes(2); + lastProcessTime = LocalDateTime.now().minusMinutes(1); log.info("初始化AccessLogAlertService,上次处理时间: {}", lastProcessTime); // 加载启用的算法配置到缓存 @@ -93,7 +98,7 @@ public class AccessLogAlertService { /** * 安全的定时任务入口 */ - @Scheduled(cron = "0 */2 * * * ?") + @Scheduled(cron = "0 */1 * * * ?") public void safeProcessTask() { if (processing.compareAndSet(false, true)) { try { @@ -108,7 +113,7 @@ public class AccessLogAlertService { /** * 定时任务入口 - 每2分钟执行一次 */ - @Scheduled(cron = "0 */2 * * * ?") + @Scheduled(cron = "0 */1 * * * ?") @Async public void processAccessLogAlert() { log.info("开始执行访问日志告警处理任务"); @@ -351,8 +356,11 @@ public class AccessLogAlertService { .judgedState(0) .disposedState(0) .dispositionAdvice("研判后处置") + .dnsInfo(alarmResult.getString("host")) .build(); + //补充返回结果的原始日志字段 + AddOriginLogField(algorithm.getAlgorithmName(),alarmVisit,alarmResult); // 保存告警记录 alarmVisitMapper.insert(alarmVisit); alarmCount++; @@ -366,6 +374,8 @@ public class AccessLogAlertService { } } + + /** * 解析时间字符串 */ @@ -374,7 +384,6 @@ public class AccessLogAlertService { if (timeStr == null || timeStr.isEmpty()) { return LocalDateTime.now(); } - // 尝试多种时间格式 try { return LocalDateTime.parse(timeStr); @@ -392,6 +401,54 @@ public class AccessLogAlertService { } + /** + * 补充返回结果的原始日志字段 + * @param AlgorithmName + * @param alarmVisit + * @param alarmResult + * @return + */ + private boolean AddOriginLogField(String AlgorithmName, AlarmVisit alarmVisit ,JSONObject alarmResult ) + { + try { + JSONObject originLogObject= alarmResult.getJSONObject("origin_log"); + if(originLogObject.isEmpty()) { + log.debug("算法:{},ID:{} ,AlarmNme:{} 没有返回 origin_log节点.",AlgorithmName, alarmVisit.getId(), alarmVisit.getAlarmName()); + return false; + } + alarmVisit.setAttackPort( new Integer[]{alarmResult.getInteger("_source.sport")} ); + alarmVisit.setVictimPort( new Integer[]{alarmResult.getInteger("_source.dport")} ); + alarmVisit.setAttackMethod(alarmResult.getString("_source.method") ); + String deviceIp= alarmResult.getString("_source.device_ip"); + //alarmVisit.setDeviceId( new Integer[]{ getDeviceID(deviceIp)} ); + alarmVisit.setHttpStatus( alarmResult.getString("_source.status")); + return true; + } catch (Exception e) { + log.error("算法:{} 补充原始记录日志字段异常。error:{} ",AlgorithmName,e.getMessage(), e ); + return false; + } + } + + public int getDeviceID(String source_ip) + { + //默认deviceId =-1 + int deviceId=-1 ; + List deviceList= deviceDeviceService.getByIpSafely(source_ip); + if(deviceList.isEmpty()) { + return deviceId; + } + if(deviceList.size()>1) + { + log.error("设备请求的Host IP注册超过一条记录,请联系管理员处理!"); + return deviceId; + } + return deviceList.get(0).getId(); + } + + + + + } \ No newline at end of file diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AnalysisEngine.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AnalysisEngine.java new file mode 100644 index 0000000..b1c849e --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AnalysisEngine.java @@ -0,0 +1,42 @@ +package com.common.service; + +import com.common.entity.AnalysisAnalysisRule; + +import java.util.List; +import java.util.Map; + +/** + * 分析引擎接口 + */ +public interface AnalysisEngine { + + /** + * 执行分析规则 + * + * @param rule 规则 + * @return 处理结果 + */ + Map executeRule(AnalysisAnalysisRule rule); + + /** + * 批量执行规则 + * + * @param rules 规则列表 + * @return 处理结果列表 + */ + List> executeRules(List rules); + + /** + * 停止指定规则 + * + * @param ruleId 规则ID + */ + void stopRule(String ruleId); + + /** + * 获取引擎运行模式 + * + * @return 运行模式 + */ + String getRunMode(); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AnalysisRuleService.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AnalysisRuleService.java new file mode 100644 index 0000000..50bb9e6 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/AnalysisRuleService.java @@ -0,0 +1,50 @@ +package com.common.service; + +import com.common.entity.AnalysisAnalysisRule; + +import java.util.List; +import java.util.Map; + +/** + * 分析规则服务接口 + */ +public interface AnalysisRuleService { + + /** + * 执行实时分析 + * + * @return 执行结果 + */ + List> executeRealtimeAnalysis(); + + /** + * 执行离线分析 + * + * @return 执行结果 + */ + List> executeOfflineAnalysis(); + + /** + * 查询活动规则 + * + * @param runMode 运行模式 + * @return 规则列表 + */ + List getActiveRules(String runMode); + + /** + * 停止规则 + * + * @param ruleId 规则ID + */ + void stopRule(String ruleId); + + + /** + * 执行单个实时分析规则 + * + * @param ruleId 规则ID + * @return 执行结果 + */ + Map executeRealtimeRule(String ruleId); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/DataTransformer.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/DataTransformer.java index a7cb8c5..81ffb0b 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/DataTransformer.java +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/DataTransformer.java @@ -84,7 +84,7 @@ public class DataTransformer { .logStartAt(groupedData.getMinLogTime()) .logEndAt(groupedData.getMaxLogTime()) .httpStatus(convertHttpStatus(groupedData.getHttpStatusCodes())) - .dnsInfo(null) + .dnsInfo(groupedData.getDnsInfo()) .accountInfo(null) .attackerInfo(null) .victimInfo(null) @@ -114,10 +114,10 @@ public class DataTransformer { .focused(false) .baseFocused(false) .isUpdated(false) - .httpReqHeaders(groupedData.getHttpReqHeaders()) - .httpReqBodys(groupedData.getHttpReqBodys()) - .httpRespHeaders(groupedData.getHttpRespHeaders()) - .httpRespBodys(groupedData.getHttpRespBodys()) + .httpReqHeader(groupedData.getHttpReqHeaders()) + .httpReqBody(groupedData.getHttpReqBodys()) + .httpRespHeader(groupedData.getHttpRespHeaders()) + .httpRespBody(groupedData.getHttpRespBodys()) .build(); } catch (Exception e) { @@ -160,7 +160,7 @@ public class DataTransformer { .logStartAt(groupedData.getMinLogTime()) .logEndAt(groupedData.getMaxLogTime()) .httpStatus(convertHttpStatus(groupedData.getHttpStatusCodes())) - .dnsInfo(null) + .dnsInfo(groupedData.getDnsInfo()) .accountInfo(null) .attackerInfo(null) .victimInfo(null) @@ -190,10 +190,10 @@ public class DataTransformer { .focused(false) .baseFocused(false) .isUpdated(false) - .httpReqHeaders(groupedData.getHttpReqHeaders()) - .httpReqBodys(groupedData.getHttpReqBodys()) - .httpRespHeaders(groupedData.getHttpRespHeaders()) - .httpRespBodys(groupedData.getHttpRespBodys()) + .httpReqHeader(groupedData.getHttpReqHeaders()) + .httpReqBody(groupedData.getHttpReqBodys()) + .httpRespHeader(groupedData.getHttpRespHeaders()) + .httpRespBody(groupedData.getHttpRespBodys()) .build(); } catch (Exception e) { diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/RuleExecutionTimeService.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/RuleExecutionTimeService.java new file mode 100644 index 0000000..7a3568c --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/RuleExecutionTimeService.java @@ -0,0 +1,52 @@ +package com.common.service; + + +import com.common.entity.AnalysisAnalysisRule; +import com.common.entity.AnalysisGroupByWindow; + +import java.time.LocalDateTime; + +/** + * 规则执行时间管理服务 + * 根据窗口类型(滚动、滑动、会话)动态计算下次执行时间 + */ +public interface RuleExecutionTimeService { + + /** + * 获取规则下次执行时间 + * + * @param ruleId 规则ID + * @return 下次执行时间,如果未初始化则返回null + */ + LocalDateTime getNextExecuteTime(String ruleId); + + /** + * 更新规则下次执行时间 + * 根据窗口类型(滚动、滑动、会话)动态计算 + * + * @param rule 分析规则 + * @param groupByWindow 窗口配置 + */ + void updateNextExecuteTime(AnalysisAnalysisRule rule, AnalysisGroupByWindow groupByWindow); + + /** + * 初始化规则执行时间 + * 应用启动时调用,设置初始执行时间 + * + * @param rule 分析规则 + * @param groupByWindow 窗口配置 + */ + void initRuleExecuteTime(AnalysisAnalysisRule rule, AnalysisGroupByWindow groupByWindow); + + /** + * 删除规则执行时间 + * + * @param ruleId 规则ID + */ + void removeRuleExecuteTime(String ruleId); + + /** + * 清空所有规则执行时间 + */ + void clearAllRuleExecuteTime(); +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/SqlGeneratorService.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/SqlGeneratorService.java new file mode 100644 index 0000000..4b536a3 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/SqlGeneratorService.java @@ -0,0 +1,132 @@ +package com.common.service; + + + +import com.common.entity.*; +import java.util.List; + +/** + * SQL生成服务接口 + */ +public interface SqlGeneratorService { + + /** + * 根据规则动态生成SQL + * + * @param rule 分析规则 + * @param fields 字段配置 + * @param whereConditions WHERE条件逻辑组 + * @param filters 过滤条件列表 + * @param groupByColumns GROUP BY字段 + * @param havingConditions HAVING条件列表 + * @param groupByWindow 时间窗口配置 + * @param startTime 开始时间 + * @param endTime 结束时间 + * @return 生成的SQL语句 + */ + String generateSql(AnalysisAnalysisRule rule, + List fields, + List whereConditions, + List filters, + List groupByColumns, + List havingConditions, + AnalysisGroupByWindow groupByWindow, + String startTime, + String endTime); + + /** + * 生成SELECT子句 + * + * @param fields 字段配置 + * @param groupByWindow 时间窗口配置 + * @return SELECT子句SQL + */ + String generateSelectClause(List fields, AnalysisGroupByWindow groupByWindow); + + /** + * 生成FROM子句 + * + * @param fields 字段配置 + * @param groupByWindow 时间窗口配置 + * @return FROM子句SQL + */ + String generateFromClause(List fields, AnalysisGroupByWindow groupByWindow); + + /** + * 生成WHERE子句(新版本,使用AnalysisFilter) + * + * @param filters 过滤条件列表 + * @param startTime 开始时间 + * @param endTime 结束时间 + * @return WHERE子句SQL + */ + String generateWhereClauseFromFilters(List filters, + String startTime, + String endTime); + + /** + * 生成WHERE子句(旧版本,使用AnalysisWhereCondition) + * + * @param conditions WHERE条件列表 + * @param startTime 开始时间 + * @param endTime 结束时间 + * @return WHERE子句SQL + * @deprecated 使用 generateWhereClauseFromFilters 替代 + */ + @Deprecated + String generateWhereClause(List conditions, + String startTime, + String endTime); + + /** + * 生成GROUP BY子句 + * + * @param columns GROUP BY字段列表 + * @param groupByWindow 时间窗口配置 + * @return GROUP BY子句SQL + */ + String generateGroupByClause(List columns, AnalysisGroupByWindow groupByWindow); + + /** + * 生成HAVING子句(新版本,使用AnalysisGroupByHaving) + * + * @param havingConditions HAVING条件列表 + * @return HAVING子句SQL + */ + String generateHavingClauseFromConditions(List havingConditions); + + /** + * 生成HAVING子句(旧版本,从规则表达式获取) + * + * @param rule 规则 + * @return HAVING子句SQL + * @deprecated 使用 generateHavingClauseFromConditions 替代 + */ + @Deprecated + String generateHavingClause(AnalysisAnalysisRule rule); + + /** + * 生成时间窗口SQL子句 + * + * @param groupByWindow 时间窗口配置 + * @return 窗口SQL子句 + */ + String generateWindowClause(AnalysisGroupByWindow groupByWindow); + + /** + * 构建完整的SQL语句 + * + * @param selectClause SELECT子句 + * @param fromClause FROM子句 + * @param whereClause WHERE子句 + * @param groupByClause GROUP BY子句 + * @param havingClause HAVING子句 + * @return 完整SQL + */ + String buildFullSql(String selectClause, + String fromClause, + String whereClause, + String groupByClause, + String havingClause); +} + diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/AnalysisRuleServiceImpl.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/AnalysisRuleServiceImpl.java new file mode 100644 index 0000000..aeaaade --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/AnalysisRuleServiceImpl.java @@ -0,0 +1,114 @@ +package com.common.service.impl; + +import com.common.entity.AnalysisAnalysisRule; +import com.common.mapper.AnalysisAnalysisRuleMapper; +import com.common.service.AnalysisEngine; +import com.common.service.AnalysisRuleService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Service; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.ArrayList; +/** + * 分析规则服务实现 + */ +@Slf4j +@Service +public class AnalysisRuleServiceImpl implements AnalysisRuleService { + + @Autowired + private AnalysisAnalysisRuleMapper ruleMapper; + + @Autowired + @Qualifier("realtimeAnalysisEngine") + private AnalysisEngine realtimeAnalysisEngine; + + @Autowired + @Qualifier("offlineAnalysisEngine") + private AnalysisEngine offlineAnalysisEngine; + + @Override + public List> executeRealtimeAnalysis() { + log.info("开始执行实时分析任务"); + List rules = getActiveRules("realtime"); + log.info("查询到 {} 条实时分析规则", rules.size()); + return realtimeAnalysisEngine.executeRules(rules); + } + + @Override + public List> executeOfflineAnalysis() { + log.info("开始执行离线分析任务"); + List rules = getActiveRules("offline"); + log.info("查询到 {} 条离线分析规则", rules.size()); + return offlineAnalysisEngine.executeRules(rules); + } + + @Override + public List getActiveRules(String runMode) { + return ruleMapper.selectActiveRulesByRunMode(runMode); + } + + @Override + public void stopRule(String ruleId) { + // 查询规则确定运行模式 + AnalysisAnalysisRule rule = ruleMapper.selectByRuleId(ruleId); + if (rule != null) { + String runMode = rule.getRunMode(); + if ("realtime".equalsIgnoreCase(runMode)) { + realtimeAnalysisEngine.stopRule(ruleId); + } else if ("offline".equalsIgnoreCase(runMode)) { + offlineAnalysisEngine.stopRule(ruleId); + } + } + } + + + + @Override + public Map executeRealtimeRule(String ruleId) { + log.info("执行实时分析规则: ruleId={}", ruleId); + + // 查询规则 + AnalysisAnalysisRule rule = ruleMapper.selectByRuleId(ruleId); + if (rule == null) { + log.warn("规则不存在: ruleId={}", ruleId); + Map result = new HashMap<>(); + result.put("success", false); + result.put("message", "规则不存在"); + return result; + } + + if (!"realtime".equalsIgnoreCase(rule.getRunMode())) { + log.warn("规则不是实时分析模式: ruleId={}, runMode={}", ruleId, rule.getRunMode()); + Map result = new HashMap<>(); + result.put("success", false); + result.put("message", "规则不是实时分析模式"); + return result; + } + + try { + // 执行单个规则 + List ruleList = new ArrayList<>(); + ruleList.add(rule); + List> results = realtimeAnalysisEngine.executeRules(ruleList); + + Map result = new HashMap<>(); + result.put("success", true); + result.put("ruleId", ruleId); + result.put("ruleName", rule.getRuleName()); + result.put("resultCount", results.size()); + result.put("results", results); + return result; + } catch (Exception e) { + log.error("执行实时分析规则失败: ruleId={}", ruleId, e); + Map result = new HashMap<>(); + result.put("success", false); + result.put("ruleId", ruleId); + result.put("message", e.getMessage()); + return result; + } + } +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/OfflineAnalysisEngine.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/OfflineAnalysisEngine.java new file mode 100644 index 0000000..d5b7b98 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/OfflineAnalysisEngine.java @@ -0,0 +1,587 @@ +package com.common.service.impl; + +import com.common.entity.*; +import com.common.mapper.*; +import com.common.service.AnalysisEngine; +import com.common.service.SqlGeneratorService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.stereotype.Service; + +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; + +/** + * 离线分析引擎实现 + */ +@Slf4j +@Service("offlineAnalysisEngine") +public class OfflineAnalysisEngine implements AnalysisEngine { + + @Autowired + private SqlGeneratorService sqlGeneratorService; + + @Autowired + private AnalysisAnalysisRuleMapper ruleMapper; + + @Autowired + private AnalysisFieldMapper fieldMapper; + + @Autowired + private AnalysisWhereConditionMapper whereConditionMapper; + + @Autowired + private AnalysisGroupByColumnMapper groupByColumnMapper; + + @Autowired + private AnalysisFilterMapper filterMapper; + + @Autowired + private AnalysisGroupByHavingMapper groupByHavingMapper; + @Autowired + private AnalysisGroupByMapper groupByMapper; + + @Autowired + private AnalysisGroupByWindowMapper groupByWindowMapper; + + + @Autowired + private AnalysisTaskHistoryMapper taskHistoryMapper; + + @Autowired + private AlarmMapper alarmMapper; + + @Autowired + private JdbcTemplate jdbcTemplate; + + private static final String RUN_MODE = "offline"; + private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + + @Override + public Map executeRule(AnalysisAnalysisRule rule) { + String batchNo = generateBatchNo(); + LocalDateTime startTime = LocalDateTime.now(); + + log.info("开始执行离线规则: ruleId={}, ruleName={}, batchNo={}", + rule.getRuleId(), rule.getRuleName(), batchNo); + + // 从规则配置中获取时间范围 + LocalDateTime dataStartTime = parseDataStartTime(rule); + LocalDateTime dataEndTime = parseDataEndTime(rule); + + // 如果配置中没有指定时间范围,使用默认值 + if (dataStartTime == null) { + dataStartTime = startTime.minusHours(1); + } + if (dataEndTime == null) { + dataEndTime = startTime; + } + + // 创建任务历史记录 + AnalysisTaskHistory history = AnalysisTaskHistory.builder() + .id(System.currentTimeMillis()) // 使用时间戳作为ID + .ruleId(rule.getRuleId()) + .startTime(startTime) + .status("RUNNING") + .progressPercent(0) + .inputCount(0L) + .outputCount(0L) + .delFlag("0") + .createTime(startTime) + .updateTime(startTime) + .tenantId("000000") + .remark("离线分析任务 - " + batchNo) + .build(); + taskHistoryMapper.insert(history); + + Map result = new HashMap<>(); + result.put("ruleId", rule.getRuleId()); + result.put("ruleName", rule.getRuleName()); + result.put("runMode", RUN_MODE); + result.put("batchNo", batchNo); + result.put("dataStartTime", dataStartTime); + result.put("dataEndTime", dataEndTime); + + try { + // 更新规则状态为运行中 + ruleMapper.updateTaskStatus(rule.getRuleId(), "running", 1L); + + // 加载规则配置 + List fields = fieldMapper.selectByRuleId(rule.getRuleId()); + List whereConditions = whereConditionMapper.selectByRuleId(rule.getRuleId()); + List filters = filterMapper.selectByRuleId(rule.getRuleId()); + List groupByColumns = groupByColumnMapper.selectByRuleId(rule.getRuleId()); + List havingConditions = groupByHavingMapper.selectByRuleId(rule.getRuleId()); + + + // 加载分组和窗口配置 + AnalysisGroupByWindow groupByWindow = null; + List groupByList = groupByMapper.selectByRuleId(rule.getRuleId()); + if (groupByList != null && !groupByList.isEmpty()) { + AnalysisGroupBy groupBy = groupByList.get(0); + if (groupBy.getId() != null) { + groupByWindow = groupByWindowMapper.selectByGroupById(groupBy.getId().intValue()); + } + } + // 生成SQL + String sql = sqlGeneratorService.generateSql( + rule, + fields, + whereConditions, + filters, + groupByColumns, + havingConditions, + groupByWindow, + dataStartTime.format(DATE_FORMATTER), + dataEndTime.format(DATE_FORMATTER) + ); + + log.info("生成的SQL: {}", sql); + + // 执行SQL + List> queryResult = jdbcTemplate.queryForList(sql); + result.put("queryResult", queryResult); + + // 处理结果,生成告警 + long alarmCount = 0; + if (!queryResult.isEmpty()) { + List alarms = convertToAlarms(rule, queryResult); + if (!alarms.isEmpty()) { + //String tableName = "alarm_" + dataStartTime.format(DateTimeFormatter.ofPattern("yyyyMMdd")); + String tableName = "alarm"; + alarmMapper.batchInsert( alarms); + alarmCount = alarms.size(); + } + } + + // 更新任务历史 + LocalDateTime endTime = LocalDateTime.now(); + long durationSeconds = java.time.Duration.between(startTime, endTime).getSeconds(); + history.setEndTime(endTime); + history.setDurationTime(durationSeconds); + history.setProgressPercent(100); + history.setInputCount((long) queryResult.size()); + history.setOutputCount(alarmCount); + history.setStatus("COMPLETED"); + history.setUpdateTime(endTime); + taskHistoryMapper.update(history); + + result.put("processedCount", queryResult.size()); + result.put("alarmCount", alarmCount); + result.put("status", "success"); + + // 更新规则状态为等待 + ruleMapper.updateTaskStatus(rule.getRuleId(), "waiting", 1L); + + log.info("规则执行成功: ruleId={}, processedCount={}, alarmCount={}", + rule.getRuleId(), queryResult.size(), alarmCount); + + } catch (Exception e) { + log.error("规则执行失败: ruleId={}", rule.getRuleId(), e); + + // 更新任务历史 + LocalDateTime endTime = LocalDateTime.now(); + long durationSeconds = java.time.Duration.between(startTime, endTime).getSeconds(); + history.setEndTime(endTime); + history.setDurationTime(durationSeconds); + history.setStatus("FAILED"); + //history.setRemark("执行失败: " + e.getMessage()); + history.setRemark("执行失败: " + (e.getMessage().length()>480? e.getMessage().substring(0,480):e.getMessage())); + history.setUpdateTime(endTime); + taskHistoryMapper.update(history); + + // 更新规则状态 + ruleMapper.updateTaskStatus(rule.getRuleId(), "stopped", 1L); + + result.put("status", "failed"); + result.put("errorMsg", e.getMessage()); + } + + return result; + } + + @Override + public List> executeRules(List rules) { + List> results = new ArrayList<>(); + + for (AnalysisAnalysisRule rule : rules) { + try { + Map result = executeRule(rule); + results.add(result); + } catch (Exception e) { + log.error("执行规则失败: ruleId={}", rule.getRuleId(), e); + Map errorResult = new HashMap<>(); + errorResult.put("ruleId", rule.getRuleId()); + errorResult.put("ruleName", rule.getRuleName()); + errorResult.put("status", "failed"); + errorResult.put("errorMsg", e.getMessage()); + results.add(errorResult); + } + } + + return results; + } + + @Override + public void stopRule(String ruleId) { + try { + ruleMapper.updateTaskStatus(ruleId, "stopped", 1L); + log.info("已停止规则: ruleId={}", ruleId); + } catch (Exception e) { + log.error("停止规则失败: ruleId={}", ruleId, e); + } + } + + @Override + public String getRunMode() { + return RUN_MODE; + } + + /** + * 生成批次号 + */ + private String generateBatchNo() { + return LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyyMMddHHmmssSSS")); + } + + /** + * 解析数据开始时间 + */ + private LocalDateTime parseDataStartTime(AnalysisAnalysisRule rule) { + // 从rule_content或rule_expression中解析时间范围 + // 这里简化处理,实际需要解析JSON配置 +/* if (StringUtils.isNotBlank(rule.getRuleContent())) { + // TODO: 解析JSON配置获取时间范围 + }*/ + return null; + } + + /** + * 解析数据结束时间 + */ + private LocalDateTime parseDataEndTime(AnalysisAnalysisRule rule) { + // 从rule_content或rule_expression中解析时间范围 +/* if (StringUtils.isNotBlank(rule.getRuleContent())) { + // TODO: 解析JSON配置获取时间范围 + }*/ + return null; + } + + /** + * 转换查询结果为告警对象 + * 复用实时引擎的转换逻辑 + */ + private List convertToAlarms(AnalysisAnalysisRule rule, List> queryResult) { + List alarms = new ArrayList<>(); + + for (Map row : queryResult) { + Alarm alarm = Alarm.builder() + .id(UUID.randomUUID().toString()) + .createdAt(LocalDateTime.now()) + .updatedAt(LocalDateTime.now()) + .alarmName(rule.getRuleName()) + .engineType("offline") + .attackResult(-1) + .focused(false) + .fall(0) + .alarmLevel("未知") + .baseFocused(false) + .isUpdated(false) + .alarmSource(1) + .dispositionAdvice("研判后处置") + .disposedState(0) + .attackDirection("other") + .etlTime(LocalDateTime.now()) + .alarmAreaId(0) + .build(); + + // 映射查询结果字段到告警对象(根据新表结构) + if (row.containsKey("log_start_at")) { + alarm.setLogStartAt(getTimestampValue(row.get("log_start_at"))); + } + if (row.containsKey("log_end_at")) { + alarm.setLogEndAt(getTimestampValue(row.get("log_end_at"))); + } + if (row.containsKey("alarm_name")) { + alarm.setComment(getStringValue(row.get("alarm_name"))); + } + if (row.containsKey("alarm_type")) { + alarm.setComment(getStringValue(row.get("alarm_type"))); + } + if (row.containsKey("alarm_level")) { + alarm.setAlarmLevel(convertAlarmLevel(getIntegerValue(row.get("alarm_level")))); + } + if (row.containsKey("attack_ip")) { + alarm.setAttackIp(getStringArray(row.get("attack_ip"))); + } + if (row.containsKey("victim_ip")) { + alarm.setVictimIp(getStringArray(row.get("victim_ip"))); + } + if (row.containsKey("victim_web_url")) { + alarm.setVictimWebUrl(getStringArray(row.get("victim_web_url"))); + } + if (row.containsKey("attack_chain_phase")) { + alarm.setAttackChainPhase(getIntegerArray(row.get("attack_chain_phase"))); + } + if (row.containsKey("device_id")) { + alarm.setDeviceId(getIntegerArray(row.get("device_id"))); + } + if (row.containsKey("tag")) { + alarm.setTag(getStringArray(row.get("tag"))); + } + if (row.containsKey("comment")) { + alarm.setComment(getStringValue(row.get("comment"))); + } + if (row.containsKey("origin_log_ids")) { + alarm.setOriginLogIds(getStringArray(row.get("origin_log_ids"))); + } + if (row.containsKey("query_id")) { + alarm.setQueryId(getStringValue(row.get("query_id"))); + } + if (row.containsKey("attack_result")) { + alarm.setAttackResult(getIntegerValue(row.get("attack_result"))); + } + if (row.containsKey("fall")) { + alarm.setFall(getIntegerValue(row.get("fall"))); + } + if (row.containsKey("payload")) { + alarm.setPayload(getBytesValue(row.get("payload"))); + } + if (row.containsKey("operate_event")) { + alarm.setOperateEvent(getIntegerArray(row.get("operate_event"))); + } + if (row.containsKey("attack_port")) { + alarm.setAttackPort(getIntegerArray(row.get("attack_port"))); + } + if (row.containsKey("victim_port")) { + alarm.setVictimPort(getIntegerArray(row.get("victim_port"))); + } + if (row.containsKey("attack_method")) { + alarm.setAttackMethod(getStringValue(row.get("attack_method"))); + } + if (row.containsKey("business_ext")) { + alarm.setBusinessExt(getStringValue(row.get("business_ext"))); + } + if (row.containsKey("http_status")) { + alarm.setHttpStatus(getStringValue(row.get("http_status"))); + } + if (row.containsKey("dns_info")) { + alarm.setDnsInfo(getStringValue(row.get("dns_info"))); + } + if (row.containsKey("account_info")) { + alarm.setAccountInfo(getStringValue(row.get("account_info"))); + } + if (row.containsKey("attacker_info")) { + alarm.setAttackerInfo(getStringValue(row.get("attacker_info"))); + } + if (row.containsKey("victim_info")) { + alarm.setVictimInfo(getStringValue(row.get("victim_info"))); + } + if (row.containsKey("suspicious_action")) { + alarm.setSuspiciousAction(getStringValue(row.get("suspicious_action"))); + } + if (row.containsKey("vuln_info")) { + alarm.setVulnInfo(getStringValue(row.get("vuln_info"))); + } + if (row.containsKey("weak_pwd")) { + alarm.setWeakPwd(getStringValue(row.get("weak_pwd"))); + } + if (row.containsKey("compliance_baseline")) { + alarm.setComplianceBaseline(getStringValue(row.get("compliance_baseline"))); + } + if (row.containsKey("file_info")) { + alarm.setFileInfo(getStringValue(row.get("file_info"))); + } + if (row.containsKey("file_tags")) { + alarm.setFileTags(getStringValue(row.get("file_tags"))); + } + if (row.containsKey("endpoint_info")) { + alarm.setEndpointInfo(getStringValue(row.get("endpoint_info"))); + } + if (row.containsKey("origin_info")) { + alarm.setOriginInfo(getStringValue(row.get("origin_info"))); + } + if (row.containsKey("protocol_info")) { + alarm.setProtocolInfo(getStringValue(row.get("protocol_info"))); + } + if (row.containsKey("email_info")) { + alarm.setEmailInfo(getStringValue(row.get("email_info"))); + } + if (row.containsKey("sensitive_data")) { + alarm.setSensitiveData(getStringValue(row.get("sensitive_data"))); + } + if (row.containsKey("hit_intelligence")) { + alarm.setHitIntelligence(getIntegerValue(row.get("hit_intelligence"))); + } + if (row.containsKey("window_time")) { + alarm.setWindowTime(getStringValue(row.get("window_time"))); + } + if (row.containsKey("attack_ip_pic")) { + alarm.setAttackIpPic(getStringValue(row.get("attack_ip_pic"))); + } + if (row.containsKey("victim_ip_pic")) { + alarm.setVictimIpPic(getStringValue(row.get("victim_ip_pic"))); + } + if (row.containsKey("operation_at")) { + alarm.setOperationAt(getTimestampValue(row.get("operation_at"))); + } + if (row.containsKey("attack_direction")) { + alarm.setAttackDirection(getStringValue(row.get("attack_direction"))); + } + if (row.containsKey("etl_time")) { + alarm.setEtlTime(getTimestampValue(row.get("etl_time"))); + } + if (row.containsKey("log_count")) { + alarm.setLogCount(getIntegerValue(row.get("log_count"))); + } + if (row.containsKey("is_asset_hit")) { + alarm.setIsAssetHit(getIntegerValue(row.get("is_asset_hit"))); + } + if (row.containsKey("http_req_header")) { + alarm.setHttpReqHeader(getStringArray(row.get("http_req_header"))); + } + if (row.containsKey("http_req_body")) { + alarm.setHttpReqBody(getStringArray(row.get("http_req_body"))); + } + if (row.containsKey("http_resp_header")) { + alarm.setHttpRespHeader(getStringArray(row.get("http_resp_header"))); + } + if (row.containsKey("http_resp_body")) { + alarm.setHttpRespBody(getStringArray(row.get("http_resp_body"))); + } + if (row.containsKey("window_time")) { + alarm.setLogEndAt(getTimestampValue(row.get("window_time"))); + } + alarms.add(alarm); + } + + return alarms; + } + + private byte[] getBytesValue(Object value) { + if (value == null) { + return null; + } + if (value instanceof byte[]) { + return (byte[]) value; + } + return value.toString().getBytes(); + } + + // 以下为辅助方法,用于类型转换 + + private String getStringValue(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + private Long getLongValue(Object value) { + if (value == null) { + return 0L; + } + if (value instanceof Number) { + return ((Number) value).longValue(); + } + return Long.parseLong(value.toString()); + } + + private Integer getIntegerValue(Object value) { + if (value == null) { + return 0; + } + if (value instanceof Number) { + return ((Number) value).intValue(); + } + return Integer.parseInt(value.toString()); + } + + private LocalDateTime getTimestampValue(Object value) { + if (value == null) { + return null; + } + if (value instanceof LocalDateTime) { + return (LocalDateTime) value; + } + return LocalDateTime.parse(value.toString()); + } + + @SuppressWarnings("unchecked") + private String[] getStringArray(Object value) { + if (value == null) { + return new String[0]; + } + if (value instanceof String[]) { + return (String[]) value; + } + if (value instanceof Object[]) { + Object[] arr = (Object[]) value; + String[] result = new String[arr.length]; + for (int i = 0; i < arr.length; i++) { + result[i] = arr[i] != null ? arr[i].toString() : null; + } + return result; + } + String str = value.toString(); + if (str.startsWith("{") && str.endsWith("}")) { + str = str.substring(1, str.length() - 1); + return str.split(","); + } + return new String[]{str}; + } + + @SuppressWarnings("unchecked") + private Integer[] getIntegerArray(Object value) { + if (value == null) { + return new Integer[0]; + } + String[] strArray = getStringArray(value); + Integer[] result = new Integer[strArray.length]; + for (int i = 0; i < strArray.length; i++) { + try { + result[i] = Integer.parseInt(strArray[i]); + } catch (NumberFormatException e) { + result[i] = null; + } + } + return result; + } + + @SuppressWarnings("unchecked") + private byte[][] getByteArrayArray(Object value) { + if (value == null) { + return new byte[0][]; + } + if (value instanceof byte[][]) { + return (byte[][]) value; + } + if (value instanceof Object[]) { + Object[] arr = (Object[]) value; + byte[][] result = new byte[arr.length][]; + for (int i = 0; i < arr.length; i++) { + if (arr[i] instanceof byte[]) { + result[i] = (byte[]) arr[i]; + } else { + result[i] = arr[i] != null ? arr[i].toString().getBytes() : null; + } + } + return result; + } + return new byte[0][]; + } + + private String convertAlarmLevel(Integer eventLevel) { + if (eventLevel == null) return "未知"; + switch (eventLevel) { + case 0: return "安全(无威胁)"; + case 1: return "低危"; + case 2: return "中危"; + case 3: return "高危"; + case 4: return "超危"; + default: return "未知"; + } + } +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/RealtimeAnalysisEngine.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/RealtimeAnalysisEngine.java new file mode 100644 index 0000000..03e9a81 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/RealtimeAnalysisEngine.java @@ -0,0 +1,847 @@ +package com.common.service.impl; + +import com.common.entity.*; +import com.common.mapper.*; +import com.common.service.AnalysisEngine; +import com.common.service.SqlGeneratorService; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.stereotype.Service; + +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; + +/** + * 实时分析引擎实现 + */ +@Slf4j +@Service("realtimeAnalysisEngine") +public class RealtimeAnalysisEngine implements AnalysisEngine { + + @Autowired + private SqlGeneratorService sqlGeneratorService; + + @Autowired + private AnalysisAnalysisRuleMapper ruleMapper; + + @Autowired + private AnalysisFieldMapper fieldMapper; + + @Autowired + private AnalysisWhereConditionMapper whereConditionMapper; + + @Autowired + private AnalysisGroupByColumnMapper groupByColumnMapper; + + + @Autowired + private AnalysisGroupByWindowMapper groupByWindowMapper; + + + @Autowired + private AnalysisFilterMapper filterMapper; + + @Autowired + private AnalysisGroupByHavingMapper groupByHavingMapper; + + @Autowired + private AnalysisTaskHistoryMapper taskHistoryMapper; + + @Autowired + private AlarmMapper alarmMapper; + @Autowired + private AnalysisGroupByMapper groupByMapper; + @Autowired + private JdbcTemplate jdbcTemplate; + + private static final String RUN_MODE = "realtime"; + private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + + @Override + public Map executeRule(AnalysisAnalysisRule rule) { + String batchNo = generateBatchNo(); + //LocalDateTime startTime = LocalDateTime.now(); + //LocalDateTime dataEndTime = startTime; + //LocalDateTime dataStartTime = startTime.minusMinutes(30); // 默认查询最近30分钟 + // 时间点取整分钟,避免秒级时间导致窗口计算不一致 + LocalDateTime startTime = LocalDateTime.now().withSecond(0).withNano(0); + LocalDateTime dataEndTime = startTime; + + // 加载分组和窗口配置(在计算时间范围之前) + AnalysisGroupByWindow groupByWindow = null; + List groupByList = groupByMapper.selectByRuleId(rule.getRuleId()); + if (groupByList != null && !groupByList.isEmpty()) { + AnalysisGroupBy groupBy = groupByList.get(0); + if (groupBy.getId() != null) { + // groupBy.getId()是Long类型,需要转换为Integer + groupByWindow = groupByWindowMapper.selectByGroupById(groupBy.getId().intValue()); + } + } + + // 根据窗口类型动态计算数据查询时间范围 + LocalDateTime dataStartTime = calculateDataStartTime(dataEndTime, groupByWindow); + + log.info("开始执行实时规则: ruleId={}, ruleName={}, batchNo={}, windowType={}, dataStartTime={}, dataEndTime={}", + rule.getRuleId(), rule.getRuleName(), batchNo, + groupByWindow != null ? groupByWindow.getWindowType() : "NONE", + dataStartTime.format(DATE_FORMATTER), + dataEndTime.format(DATE_FORMATTER)); + + // 创建任务历史记录 + AnalysisTaskHistory history = AnalysisTaskHistory.builder() + .id(System.currentTimeMillis()) // 使用时间戳作为ID + .ruleId(rule.getRuleId()) + .startTime(startTime) + .status("RUNNING") + .progressPercent(0) + .inputCount(0L) + .outputCount(0L) + .delFlag("0") + .createTime(startTime) + .updateTime(startTime) + .tenantId("000000") + .remark("实时分析任务 - " + batchNo) + .build(); + taskHistoryMapper.insert(history); + + Map result = new HashMap<>(); + result.put("ruleId", rule.getRuleId()); + result.put("ruleName", rule.getRuleName()); + result.put("runMode", RUN_MODE); + result.put("batchNo", batchNo); + + try { + // 更新规则状态为运行中 + ruleMapper.updateTaskStatus(rule.getRuleId(), "running", 1L); + + // 加载规则配置 + List fields = fieldMapper.selectByRuleId(rule.getRuleId()); + List whereConditions = whereConditionMapper.selectByRuleId(rule.getRuleId()); + List filters = filterMapper.selectByRuleId(rule.getRuleId()); + List groupByColumns = groupByColumnMapper.selectByRuleId(rule.getRuleId()); + List havingConditions = groupByHavingMapper.selectByRuleId(rule.getRuleId()); + + /** + // 加载分组和窗口配置 + AnalysisGroupByWindow groupByWindow = null; + List groupByList = groupByMapper.selectByRuleId(rule.getRuleId()); + if (groupByList != null && !groupByList.isEmpty()) { + AnalysisGroupBy groupBy = groupByList.get(0); + if (groupBy.getId() != null) { + groupByWindow = groupByWindowMapper.selectByGroupById(groupBy.getId().intValue()); + } + } + ***/ + + // 生成SQL + String sql = sqlGeneratorService.generateSql( + rule, + fields, + whereConditions, + filters, + groupByColumns, + havingConditions, + groupByWindow, + dataStartTime.format(DATE_FORMATTER), + dataEndTime.format(DATE_FORMATTER) + ); + + log.info("生成的SQL: {}", sql); + + // 执行SQL + List> queryResult = jdbcTemplate.queryForList(sql); + result.put("queryResult", queryResult); + + // 处理结果,生成告警 + long alarmCount = 0; + if (!queryResult.isEmpty()) { + List alarms = convertToAlarms(rule, queryResult); + if (!alarms.isEmpty()) { + //String tableName = "alarm_" + dataStartTime.format(DateTimeFormatter.ofPattern("yyyyMMdd")); + String tableName = "alarm"; + alarmMapper.batchInsert( alarms); + alarmCount = alarms.size(); + } + } + + // 更新任务历史 + LocalDateTime endTime = LocalDateTime.now(); + long durationSeconds = java.time.Duration.between(startTime, endTime).getSeconds(); + history.setEndTime(endTime); + history.setDurationTime(durationSeconds); + history.setProgressPercent(100); + history.setInputCount((long) queryResult.size()); + history.setOutputCount(alarmCount); + history.setStatus("COMPLETED"); + history.setUpdateTime(endTime); + taskHistoryMapper.update(history); + + result.put("processedCount", queryResult.size()); + result.put("alarmCount", alarmCount); + result.put("status", "success"); + + // 更新规则状态为等待 + ruleMapper.updateTaskStatus(rule.getRuleId(), "waiting", 1L); + + log.info("规则执行成功: ruleId={}, processedCount={}, alarmCount={}", + rule.getRuleId(), queryResult.size(), alarmCount); + + } catch (Exception e) { + log.error("规则执行失败: ruleId={}", rule.getRuleId(), e); + + // 更新任务历史 + LocalDateTime endTime = LocalDateTime.now(); + long durationSeconds = java.time.Duration.between(startTime, endTime).getSeconds(); + history.setEndTime(endTime); + history.setDurationTime(durationSeconds); + history.setStatus("FAILED"); + history.setRemark("执行失败: " + (e.getMessage().length()>480? e.getMessage().substring(0,480):e.getMessage())); + history.setUpdateTime(endTime); + taskHistoryMapper.update(history); + + // 更新规则状态 + ruleMapper.updateTaskStatus(rule.getRuleId(), "stopped", 1L); + + result.put("status", "failed"); + result.put("errorMsg", e.getMessage()); + } + + return result; + } + + @Override + public List> executeRules(List rules) { + List> results = new ArrayList<>(); + + for (AnalysisAnalysisRule rule : rules) { + try { + Map result = executeRule(rule); + results.add(result); + } catch (Exception e) { + log.error("执行规则失败: ruleId={}", rule.getRuleId(), e); + Map errorResult = new HashMap<>(); + errorResult.put("ruleId", rule.getRuleId()); + errorResult.put("ruleName", rule.getRuleName()); + errorResult.put("status", "failed"); + errorResult.put("errorMsg", e.getMessage()); + results.add(errorResult); + } + } + + return results; + } + + @Override + public void stopRule(String ruleId) { + try { + ruleMapper.updateTaskStatus(ruleId, "stopped", 1L); + log.info("已停止规则: ruleId={}", ruleId); + } catch (Exception e) { + log.error("停止规则失败: ruleId={}", ruleId, e); + } + } + + @Override + public String getRunMode() { + return RUN_MODE; + } + + /** + * 生成批次号 + */ + private String generateBatchNo() { + return LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyyMMddHHmmssSSS")); + } + /** + * 根据窗口类型动态计算数据查询开始时间 + * + * @param dataEndTime 数据查询结束时间 + * @param groupByWindow 窗口配置 + * @return 数据查询开始时间 + */ + private LocalDateTime calculateDataStartTime(LocalDateTime dataEndTime, AnalysisGroupByWindow groupByWindow) { + // 如果没有配置窗口,使用默认30分钟 + if (groupByWindow == null) { + log.warn("未配置窗口类型,使用默认查询范围:最近30分钟"); + return dataEndTime.minusMinutes(30); + } + + String windowType = groupByWindow.getWindowType(); + + if (windowType == null || windowType.trim().isEmpty()) { + log.warn("窗口类型为空,使用默认查询范围:最近30分钟"); + return dataEndTime.minusMinutes(30); + } + + switch (windowType.toUpperCase()) { + case "TUMBLE": + // 滚动窗口:查询最近一个完整窗口的数据 + return calculateTumbleWindowStartTime(dataEndTime, groupByWindow); + + case "HOP": + // 滑动窗口:查询覆盖当前时间点的所有滑动窗口 + return calculateHopWindowStartTime(dataEndTime, groupByWindow); + + case "SESSION": + // 会话窗口:查询最近一个会话超时时间的数据 + return calculateSessionWindowStartTime(dataEndTime, groupByWindow); + + default: + log.warn("未知窗口类型: {},使用默认查询范围:最近30分钟", windowType); + return dataEndTime.minusMinutes(30); + } + } + + /** + * 计算滚动窗口的数据查询开始时间 + * + * 滚动窗口特点:窗口之间不重叠,数据量固定 + * 查询策略:查询最近1个窗口的数据(包含当前正在进行的窗口) + */ + private LocalDateTime calculateTumbleWindowStartTime(LocalDateTime dataEndTime, AnalysisGroupByWindow groupByWindow) { + Integer windowSize = groupByWindow.getTumbleWindowSize(); + String windowSizeUnit = groupByWindow.getTumbleWindowSizeUnit(); + + if (windowSize == null || windowSize <= 0) { + log.warn("滚动窗口大小配置无效,使用默认值:5分钟"); + windowSize = 5; + windowSizeUnit = "m"; + } + + if (windowSizeUnit == null || windowSizeUnit.trim().isEmpty()) { + windowSizeUnit = "m"; + } + + LocalDateTime dataStartTime; + switch (windowSizeUnit.toLowerCase()) { + case "s": + dataStartTime = dataEndTime.minusSeconds(windowSize); + break; + case "m": + dataStartTime = dataEndTime.minusMinutes(windowSize); + break; + case "h": + dataStartTime = dataEndTime.minusHours(windowSize); + break; + case "d": + dataStartTime = dataEndTime.minusDays(windowSize); + break; + default: + log.warn("滚动窗口单位无效: {},使用默认单位:分钟", windowSizeUnit); + dataStartTime = dataEndTime.minusMinutes(windowSize); + } + + log.info("滚动窗口查询范围: 窗口大小={}{},查询时间范围=[{}, {}]", + windowSize, windowSizeUnit, + dataStartTime.format(DATE_FORMATTER), + dataEndTime.format(DATE_FORMATTER)); + + return dataStartTime; + } + + /** + * 计算滑动窗口的数据查询开始时间 + * + * 滑动窗口特点:窗口之间重叠,每个滑动步长触发一次计算 + * 查询策略:查询覆盖当前时间点的完整窗口(窗口大小),而不是查询所有历史滑动窗口 + * 原因:避免数据量过大,实时分析只需要分析当前活跃窗口的数据 + */ + private LocalDateTime calculateHopWindowStartTime(LocalDateTime dataEndTime, AnalysisGroupByWindow groupByWindow) { + Integer windowSize = groupByWindow.getHopWindowSize(); + String windowSizeUnit = groupByWindow.getHopWindowSizeUnit(); + + if (windowSize == null || windowSize <= 0) { + log.warn("滑动窗口大小配置无效,使用默认值:5分钟"); + windowSize = 5; + windowSizeUnit = "m"; + } + + if (windowSizeUnit == null || windowSizeUnit.trim().isEmpty()) { + windowSizeUnit = "m"; + } + + LocalDateTime dataStartTime; + switch (windowSizeUnit.toLowerCase()) { + case "s": + dataStartTime = dataEndTime.minusSeconds(windowSize); + break; + case "m": + dataStartTime = dataEndTime.minusMinutes(windowSize); + break; + case "h": + dataStartTime = dataEndTime.minusHours(windowSize); + break; + case "d": + dataStartTime = dataEndTime.minusDays(windowSize); + break; + default: + log.warn("滑动窗口单位无效: {},使用默认单位:分钟", windowSizeUnit); + dataStartTime = dataEndTime.minusMinutes(windowSize); + } + + log.info("滑动窗口查询范围: 窗口大小={}{},查询时间范围=[{}, {}]", + windowSize, windowSizeUnit, + dataStartTime.format(DATE_FORMATTER), + dataEndTime.format(DATE_FORMATTER)); + + return dataStartTime; + } + + /** + * 计算会话窗口的数据查询开始时间 + * + * 会话窗口特点:根据用户行为动态划分,超时后结束会话 + * 查询策略:查询最近一个会话超时时间的数据,确保捕获活跃会话 + * 额外策略:为避免遗漏跨天或长时间运行的会话,额外增加1天缓冲时间 + */ + private LocalDateTime calculateSessionWindowStartTime(LocalDateTime dataEndTime, AnalysisGroupByWindow groupByWindow) { + Integer sessionTimeout = groupByWindow.getSessionWindowSize(); + String sessionTimeoutUnit = groupByWindow.getSessionWindowSizeUnit(); + + if (sessionTimeout == null || sessionTimeout <= 0) { + log.warn("会话窗口超时时间配置无效,使用默认值:30分钟"); + sessionTimeout = 30; + sessionTimeoutUnit = "m"; + } + + if (sessionTimeoutUnit == null || sessionTimeoutUnit.trim().isEmpty()) { + sessionTimeoutUnit = "m"; + } + + LocalDateTime dataStartTime; + switch (sessionTimeoutUnit.toLowerCase()) { + case "s": + dataStartTime = dataEndTime.minusSeconds(sessionTimeout); + break; + case "m": + dataStartTime = dataEndTime.minusMinutes(sessionTimeout); + break; + case "h": + dataStartTime = dataEndTime.minusHours(sessionTimeout); + break; + case "d": + dataStartTime = dataEndTime.minusDays(sessionTimeout); + break; + default: + log.warn("会话窗口超时单位无效: {},使用默认单位:分钟", sessionTimeoutUnit); + dataStartTime = dataEndTime.minusMinutes(sessionTimeout); + } + + // 会话窗口额外增加1天缓冲时间,避免遗漏跨天或长时间运行的会话 + dataStartTime = dataStartTime.minusDays(1); + + log.info("会话窗口查询范围: 超时时间={}{},额外缓冲1天,查询时间范围=[{}, {}]", + sessionTimeout, sessionTimeoutUnit, + dataStartTime.format(DATE_FORMATTER), + dataEndTime.format(DATE_FORMATTER)); + + return dataStartTime; + } + + + /** + * 转换查询结果为告警对象 + */ + private List convertToAlarms(AnalysisAnalysisRule rule, List> queryResult) { + List alarms = new ArrayList<>(); + + for (Map row : queryResult) { + Alarm alarm = Alarm.builder() + .id(UUID.randomUUID().toString()) + .createdAt(LocalDateTime.now()) + .updatedAt(LocalDateTime.now()) + .alarmName(rule.getRuleName()) + .engineType("realtime") + .attackResult(-1) + .focused(false) + .fall(0) + .alarmLevel("未知") + .baseFocused(false) + .isUpdated(false) + .alarmSource(1) + .dispositionAdvice("研判后处置") + .disposedState(0) + .attackDirection("other") + .etlTime(LocalDateTime.now()) + .alarmAreaId(0) + .comment(buildComment(row)) + .attackChainPhase(getIntegerArray(-1)) + .judgedState(0) + .build(); + + // 映射查询结果字段到告警对象 + if (row.containsKey("log_start_at")) { + alarm.setLogStartAt(getTimestampValue(row.get("log_start_at"))); + } + if (row.containsKey("log_end_at")) { + alarm.setLogEndAt(getTimestampValue(row.get("log_end_at"))); + } + if (row.containsKey("alarm_name")) { + alarm.setAlarmName(getStringValue(row.get("alarm_name"))); + } + if (row.containsKey("alarm_type")) { + alarm.setAlarmType(getStringValue(row.get("alarm_type"))); + } + if (row.containsKey("alarm_level")) { + alarm.setAlarmLevel(convertAlarmLevel(getIntegerValue(row.get("alarm_level")))); + } + if (row.containsKey("attack_ip")) { + alarm.setAttackIp(getStringArray(row.get("attack_ip"))); + } + if (row.containsKey("victim_ip")) { + alarm.setVictimIp(getStringArray(row.get("victim_ip"))); + } + if (row.containsKey("victim_web_url")) { + alarm.setVictimWebUrl(getStringArray(row.get("victim_web_url"))); + } + if (row.containsKey("attack_chain_phase")) { + alarm.setAttackChainPhase(getIntegerArray(row.get("attack_chain_phase"))); + } + if (row.containsKey("device_id")) { + alarm.setDeviceId(getIntegerArray(row.get("device_id"))); + } + if (row.containsKey("tag")) { + alarm.setTag(getStringArray(row.get("tag"))); + } + if (row.containsKey("comment")) { + alarm.setComment(getStringValue(row.get("comment"))); + } + if (row.containsKey("origin_log_ids")) { + alarm.setOriginLogIds(getStringArray(row.get("origin_log_ids"))); + } + if (row.containsKey("query_id")) { + alarm.setQueryId(getStringValue(row.get("query_id"))); + } + if (row.containsKey("attack_result")) { + alarm.setAttackResult(getIntegerValue(row.get("attack_result"))); + } + if (row.containsKey("fall")) { + alarm.setFall(getIntegerValue(row.get("fall"))); + } + if (row.containsKey("payload")) { + alarm.setPayload(getBytesValue(row.get("payload"))); + } + if (row.containsKey("operate_event")) { + alarm.setOperateEvent(getIntegerArray(row.get("operate_event"))); + } + if (row.containsKey("attack_port")) { + alarm.setAttackPort(getIntegerArray(row.get("attack_port"))); + } + if (row.containsKey("victim_port")) { + alarm.setVictimPort(getIntegerArray(row.get("victim_port"))); + } + if (row.containsKey("attack_method")) { + alarm.setAttackMethod(getStringValue(row.get("attack_method"))); + } + if (row.containsKey("business_ext")) { + alarm.setBusinessExt(getStringValue(row.get("business_ext"))); + } + if (row.containsKey("http_status")) { + alarm.setHttpStatus(getStringValue(row.get("http_status"))); + } + if (row.containsKey("dns_info")) { + alarm.setDnsInfo(getStringValue(row.get("dns_info"))); + } + if (row.containsKey("account_info")) { + alarm.setAccountInfo(getStringValue(row.get("account_info"))); + } + if (row.containsKey("attacker_info")) { + alarm.setAttackerInfo(getStringValue(row.get("attacker_info"))); + } + if (row.containsKey("victim_info")) { + alarm.setVictimInfo(getStringValue(row.get("victim_info"))); + } + if (row.containsKey("suspicious_action")) { + alarm.setSuspiciousAction(getStringValue(row.get("suspicious_action"))); + } + if (row.containsKey("vuln_info")) { + alarm.setVulnInfo(getStringValue(row.get("vuln_info"))); + } + if (row.containsKey("weak_pwd")) { + alarm.setWeakPwd(getStringValue(row.get("weak_pwd"))); + } + if (row.containsKey("compliance_baseline")) { + alarm.setComplianceBaseline(getStringValue(row.get("compliance_baseline"))); + } + if (row.containsKey("file_info")) { + alarm.setFileInfo(getStringValue(row.get("file_info"))); + } + if (row.containsKey("file_tags")) { + alarm.setFileTags(getStringValue(row.get("file_tags"))); + } + if (row.containsKey("endpoint_info")) { + alarm.setEndpointInfo(getStringValue(row.get("endpoint_info"))); + } + if (row.containsKey("origin_info")) { + alarm.setOriginInfo(getStringValue(row.get("origin_info"))); + } + if (row.containsKey("protocol_info")) { + alarm.setProtocolInfo(getStringValue(row.get("protocol_info"))); + } + if (row.containsKey("email_info")) { + alarm.setEmailInfo(getStringValue(row.get("email_info"))); + } + if (row.containsKey("sensitive_data")) { + alarm.setSensitiveData(getStringValue(row.get("sensitive_data"))); + } + if (row.containsKey("hit_intelligence")) { + alarm.setHitIntelligence(getIntegerValue(row.get("hit_intelligence"))); + } + if (row.containsKey("window_time")) { + alarm.setWindowTime(getStringValue(row.get("window_time"))); + } + if (row.containsKey("attack_ip_pic")) { + alarm.setAttackIpPic(getStringValue(row.get("attack_ip_pic"))); + } + if (row.containsKey("victim_ip_pic")) { + alarm.setVictimIpPic(getStringValue(row.get("victim_ip_pic"))); + } + if (row.containsKey("operation_at")) { + alarm.setOperationAt(getTimestampValue(row.get("operation_at"))); + } + if (row.containsKey("attack_direction")) { + alarm.setAttackDirection(getStringValue(row.get("attack_direction"))); + } + if (row.containsKey("etl_time")) { + alarm.setEtlTime(getTimestampValue(row.get("etl_time"))); + } + if (row.containsKey("log_count")) { + alarm.setLogCount(getIntegerValue(row.get("log_count"))); + } + if (row.containsKey("is_asset_hit")) { + alarm.setIsAssetHit(getIntegerValue(row.get("is_asset_hit"))); + } + if (row.containsKey("http_req_header")) { + alarm.setHttpReqHeader(getStringArray(row.get("http_req_header"))); + } + if (row.containsKey("http_req_body")) { + alarm.setHttpReqBody(getStringArray(row.get("http_req_body"))); + } + if (row.containsKey("http_resp_header")) { + alarm.setHttpRespHeader(getStringArray(row.get("http_resp_header"))); + } + if (row.containsKey("http_resp_body")) { + alarm.setHttpRespBody(getStringArray(row.get("http_resp_body"))); + } + if (row.containsKey("window_time")) { + alarm.setLogEndAt(getTimestampValue(row.get("window_time"))); + } + alarms.add(alarm); + } + + return alarms; + } + + private byte[] getBytesValue(Object value) { + if (value == null) { + return null; + } + if (value instanceof byte[]) { + return (byte[]) value; + } + return value.toString().getBytes(); + } + + // 以下为辅助方法,用于类型转换 + + private String getStringValue(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + private Long getLongValue(Object value) { + if (value == null) { + return 0L; + } + if (value instanceof Number) { + return ((Number) value).longValue(); + } + return Long.parseLong(value.toString()); + } + + private Integer getIntegerValue(Object value) { + if (value == null) { + return -1; + } + if (value instanceof Number) { + return ((Number) value).intValue(); + } + return Integer.parseInt(value.toString()); + } + + private LocalDateTime getTimestampValue(Object value) { + if (value == null) { + return null; + } + if (value instanceof LocalDateTime) { + return (LocalDateTime) value; + } + + String strValue = value.toString().trim(); + + // 尝试多种时间格式解析 + String[] patterns = { + "yyyy-MM-dd HH:mm:ss.SSS", // 2026-02-05 18:14:25.824 + "yyyy-MM-dd HH:mm:ss", // 2026-02-05 18:14:25 + "yyyy-MM-dd'T'HH:mm:ss.SSS", // 2026-02-05T18:14:25.824 + "yyyy-MM-dd'T'HH:mm:ss", // 2026-02-05T18:14:25 + "yyyy-MM-dd HH:mm:ss.SSSSSS", // 带微秒 + "yyyy-MM-dd HH:mm:ss.SSSSS", // 带10万分秒 + "yyyy-MM-dd HH:mm:ss.S", // 1位毫秒 + "yyyy-MM-dd HH:mm:ss.SS", // 2位毫秒 + "yyyy-MM-dd'T'HH:mm:ss.SSSSSSS", // ISO带纳秒 + "yyyy-MM-dd", // 只有日期 + "yyyy/MM/dd HH:mm:ss", // 斜杠分隔 + "yyyy/MM/dd HH:mm:ss.SSS" // 斜杠分隔+毫秒 + }; + + for (String pattern : patterns) { + try { + return LocalDateTime.parse(strValue, java.time.format.DateTimeFormatter.ofPattern(pattern)); + } catch (Exception ignored) { + // 继续尝试下一个格式 + } + } + + // 如果所有格式都失败,尝试直接解析(ISO格式) + try { + return LocalDateTime.parse(strValue); + } catch (Exception e) { + log.warn("无法解析时间字符串: {}", strValue); + return null; + } + } + + @SuppressWarnings("unchecked") + private String[] getStringArray(Object value) { + if (value == null) { + return new String[0]; + } + if (value instanceof String[]) { + return (String[]) value; + } + if (value instanceof Object[]) { + Object[] arr = (Object[]) value; + String[] result = new String[arr.length]; + for (int i = 0; i < arr.length; i++) { + result[i] = arr[i] != null ? arr[i].toString() : null; + } + return result; + } + // PostgreSQL数组以字符串形式返回,如 "{ip1,ip2,ip3}" + String str = value.toString(); + if (str.startsWith("{") && str.endsWith("}")) { + str = str.substring(1, str.length() - 1); + return str.split(","); + } + return new String[]{str}; + } + + @SuppressWarnings("unchecked") + private Integer[] getIntegerArray(Object value) { + if (value == null) { + return new Integer[0]; + } + String[] strArray = getStringArray(value); + Integer[] result = new Integer[strArray.length]; + for (int i = 0; i < strArray.length; i++) { + try { + result[i] = Integer.parseInt(strArray[i]); + } catch (NumberFormatException e) { + result[i] = null; + } + } + return result; + } + + @SuppressWarnings("unchecked") + private byte[][] getByteArrayArray(Object value) { + if (value == null) { + return new byte[0][]; + } + if (value instanceof byte[][]) { + return (byte[][]) value; + } + if (value instanceof Object[]) { + Object[] arr = (Object[]) value; + byte[][] result = new byte[arr.length][]; + for (int i = 0; i < arr.length; i++) { + if (arr[i] instanceof byte[]) { + result[i] = (byte[]) arr[i]; + } else { + result[i] = arr[i] != null ? arr[i].toString().getBytes() : null; + } + } + return result; + } + return new byte[0][]; + } + + private String convertAlarmLevel(Integer eventLevel) { + if (eventLevel == null) return "未知"; + switch (eventLevel) { + case 0: return "安全(无威胁)"; + case 1: return "低危"; + case 2: return "中危"; + case 3: return "高危"; + case 4: return "超危"; + default: return "未知"; + } + } + + /** + * 构建comment字段 + */ + private String buildComment(Map row) { + String victimIpsStr; + if (row.containsKey("victim_ip")) { + victimIpsStr = String.join(",", getStringArray(row.get("victim_ip"))); + } + else { + victimIpsStr = "未知"; + } + String alarmName=""; + if (row.containsKey("alarm_name")) { + alarmName=getStringValue(row.get("alarm_name")); + } + String AttackIps=""; + if (row.containsKey("attack_ip")) { + AttackIps=String.join(",", getStringArray(row.get("attack_ip"))); + } + return String.format( + "24小时内,检测到%s上产生%s告警:\n告警名称:%s\n攻击IP:%s\n攻击结果:%d", + victimIpsStr, + convertAlarmLevel(getIntegerValue(row.get("alarm_level"))), + alarmName, + AttackIps, + determineAttackResult(row) + ); + + } + private String convertAttackIps(String[] attackIps) { + if (attackIps == null || attackIps.length == 0) { + return null; + } + return String.join(",", attackIps); + } + /** + * 确定attack_result的值 + */ + private Integer determineAttackResult( Map row ) { + // 优先使用单个attack_result值 + if (row.containsKey("attack_result")) { + return getIntegerValue(row.get("attack_result")); + } + else + { + return -1; + } + + } + +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/RuleExecutionTimeServiceImpl.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/RuleExecutionTimeServiceImpl.java new file mode 100644 index 0000000..a0f1057 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/RuleExecutionTimeServiceImpl.java @@ -0,0 +1,281 @@ +package com.common.service.impl; + + + +import com.common.entity.AnalysisAnalysisRule; +import com.common.entity.AnalysisGroupByWindow; +import com.common.service.RuleExecutionTimeService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.stereotype.Service; + +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Set; +import java.time.temporal.ChronoUnit; +/** + * 规则执行时间管理服务实现(基于Redis) + * 根据窗口类型(滚动、滑动、会话)动态计算下次执行时间 + */ +@Slf4j +@Service +public class RuleExecutionTimeServiceImpl implements RuleExecutionTimeService { + + private static final String REDIS_KEY_PREFIX = "rule:next_execute:"; + private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + + @Autowired + private RedisTemplate redisTemplate; + + @Override + public LocalDateTime getNextExecuteTime(String ruleId) { + String key = REDIS_KEY_PREFIX + ruleId; + String value = redisTemplate.opsForValue().get(key); + + if (value == null) { + log.debug("规则首次执行,无下次执行时间记录,ruleId={}", ruleId); + return null; + } + + try { + return LocalDateTime.parse(value, DATE_FORMATTER); + } catch (Exception e) { + log.error("解析下次执行时间失败,ruleId={}, value={}", ruleId, value, e); + return null; + } + } + + @Override + public void updateNextExecuteTime(AnalysisAnalysisRule rule, AnalysisGroupByWindow groupByWindow) { + LocalDateTime nextTime = calculateNextExecuteTime(rule, groupByWindow); + String ruleId = rule.getRuleId(); + + String key = REDIS_KEY_PREFIX + ruleId; + String value = nextTime.format(DATE_FORMATTER); + + // 设置过期时间:5天 + redisTemplate.opsForValue().set(key, value, Duration.ofDays(5)); + + log.info("更新规则下次执行时间,ruleId={}, ruleName={}, windowType={}, nextExecuteTime={}", + ruleId, rule.getRuleName(), + groupByWindow != null ? groupByWindow.getWindowType() : "NONE", + value); + } + + @Override + public void initRuleExecuteTime(AnalysisAnalysisRule rule, AnalysisGroupByWindow groupByWindow) { + String ruleId = rule.getRuleId(); + String key = REDIS_KEY_PREFIX + ruleId; + + // 如果已存在,则不覆盖(防止重启后重新计算) + if (redisTemplate.hasKey(key)) { + log.info("规则执行时间已存在,跳过初始化,ruleId={}", ruleId); + return; + } + + // 计算初始执行时间(当前时间 + 窗口间隔) + LocalDateTime nextTime = calculateNextExecuteTime(rule, groupByWindow); + String value = nextTime.format(DATE_FORMATTER); + + redisTemplate.opsForValue().set(key, value, Duration.ofDays(5)); + + log.info("初始化规则执行时间,ruleId={}, ruleName={}, windowType={}, nextExecuteTime={}", + ruleId, rule.getRuleName(), + groupByWindow != null ? groupByWindow.getWindowType() : "NONE", + value); + } + + @Override + public void removeRuleExecuteTime(String ruleId) { + String key = REDIS_KEY_PREFIX + ruleId; + redisTemplate.delete(key); + log.debug("删除规则执行时间记录,ruleId={}", ruleId); + } + + @Override + public void clearAllRuleExecuteTime() { + String pattern = REDIS_KEY_PREFIX + "*"; + Set keys = redisTemplate.keys(pattern); + + if (keys != null && !keys.isEmpty()) { + redisTemplate.delete(keys); + log.info("清空所有规则执行时间,共 {} 条", keys.size()); + } + } + + /** + * 根据窗口类型动态计算下次执行时间 + * + * @param rule 分析规则 + * @param groupByWindow 窗口配置 + * @return 下次执行时间 + */ + private LocalDateTime calculateNextExecuteTime(AnalysisAnalysisRule rule, AnalysisGroupByWindow groupByWindow) { + LocalDateTime now = LocalDateTime.now().truncatedTo(ChronoUnit.MINUTES); + + // 如果没有配置窗口,使用默认30秒 + if (groupByWindow == null) { + log.warn("未配置窗口类型,使用默认执行间隔:60秒"); + return now.plusSeconds(60); + } + + String windowType = groupByWindow.getWindowType(); + + if (windowType == null || windowType.trim().isEmpty()) { + log.warn("窗口类型为空,使用默认执行间隔:60秒"); + return now.plusSeconds(60); + } + + switch (windowType.toUpperCase()) { + case "TUMBLE": + return calculateTumbleNextExecuteTime(now, groupByWindow); + + case "HOP": + return calculateHopNextExecuteTime(now, groupByWindow); + + case "SESSION": + return calculateSessionNextExecuteTime(now, groupByWindow); + + default: + log.warn("未知窗口类型: {},使用默认执行间隔:60秒", windowType); + return now.plusSeconds(60); + } + } + + /** + * 计算滚动窗口的下次执行时间 + * + * 策略:执行间隔 = 窗口大小 + * 示例:窗口大小5分钟,则每5分钟执行一次 + */ + private LocalDateTime calculateTumbleNextExecuteTime(LocalDateTime now, AnalysisGroupByWindow groupByWindow) { + Integer windowSize = groupByWindow.getTumbleWindowSize(); + String windowSizeUnit = groupByWindow.getTumbleWindowSizeUnit(); + + if (windowSize == null || windowSize <= 0) { + log.warn("滚动窗口大小配置无效,使用默认值:5分钟"); + windowSize = 5; + windowSizeUnit = "m"; + } + + if (windowSizeUnit == null || windowSizeUnit.trim().isEmpty()) { + windowSizeUnit = "m"; + } + + LocalDateTime nextTime; + switch (windowSizeUnit.toLowerCase()) { + case "s": + nextTime = now.plusSeconds(windowSize); + break; + case "m": + nextTime = now.plusMinutes(windowSize); + break; + case "h": + nextTime = now.plusHours(windowSize); + break; + case "d": + nextTime = now.plusDays(windowSize); + break; + default: + log.warn("滚动窗口单位无效: {},使用默认单位:分钟", windowSizeUnit); + nextTime = now.plusMinutes(windowSize); + } + + log.debug("滚动窗口下次执行时间: 窗口大小={}{},nextTime={}", + windowSize, windowSizeUnit, nextTime.format(DATE_FORMATTER)); + + return nextTime; + } + + /** + * 计算滑动窗口的下次执行时间 + * + * 策略:执行间隔 = 滑动步长(slide) + * 示例:窗口大小10分钟,步长5分钟,则每5分钟执行一次 + */ + private LocalDateTime calculateHopNextExecuteTime(LocalDateTime now, AnalysisGroupByWindow groupByWindow) { + Integer slide = groupByWindow.getHopWindowSlide(); + String slideUnit = groupByWindow.getHopWindowSizeUnit(); + + if (slide == null || slide <= 0) { + log.warn("滑动窗口步长配置无效,使用默认值:5分钟"); + slide = 5; + slideUnit = "m"; + } + + if (slideUnit == null || slideUnit.trim().isEmpty()) { + slideUnit = "m"; + } + + LocalDateTime nextTime; + switch (slideUnit.toLowerCase()) { + case "s": + nextTime = now.plusSeconds(slide); + break; + case "m": + nextTime = now.plusMinutes(slide); + break; + case "h": + nextTime = now.plusHours(slide); + break; + case "d": + nextTime = now.plusDays(slide); + break; + default: + log.warn("滑动窗口步长单位无效: {},使用默认单位:分钟", slideUnit); + nextTime = now.plusMinutes(slide); + } + + log.debug("滑动窗口下次执行时间: 步长={}{},nextTime={}", + slide, slideUnit, nextTime.format(DATE_FORMATTER)); + + return nextTime; + } + + /** + * 计算会话窗口的下次执行时间 + * + * 策略:执行间隔 = 会话超时时间(session window size) + * 示例:超时时间30分钟,则每30分钟执行一次 + */ + private LocalDateTime calculateSessionNextExecuteTime(LocalDateTime now, AnalysisGroupByWindow groupByWindow) { + Integer sessionTimeout = groupByWindow.getSessionWindowSize(); + String sessionTimeoutUnit = groupByWindow.getSessionWindowSizeUnit(); + + if (sessionTimeout == null || sessionTimeout <= 0) { + log.warn("会话窗口超时时间配置无效,使用默认值:30分钟"); + sessionTimeout = 30; + sessionTimeoutUnit = "m"; + } + + if (sessionTimeoutUnit == null || sessionTimeoutUnit.trim().isEmpty()) { + sessionTimeoutUnit = "m"; + } + + LocalDateTime nextTime; + switch (sessionTimeoutUnit.toLowerCase()) { + case "s": + nextTime = now.plusSeconds(sessionTimeout); + break; + case "m": + nextTime = now.plusMinutes(sessionTimeout); + break; + case "h": + nextTime = now.plusHours(sessionTimeout); + break; + case "d": + nextTime = now.plusDays(sessionTimeout); + break; + default: + log.warn("会话窗口超时单位无效: {},使用默认单位:分钟", sessionTimeoutUnit); + nextTime = now.plusMinutes(sessionTimeout); + } + + log.debug("会话窗口下次执行时间: 超时时间={}{},nextTime={}", + sessionTimeout, sessionTimeoutUnit, nextTime.format(DATE_FORMATTER)); + + return nextTime; + } +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/SqlGeneratorServiceImpl.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/SqlGeneratorServiceImpl.java new file mode 100644 index 0000000..eab92de --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/service/impl/SqlGeneratorServiceImpl.java @@ -0,0 +1,1058 @@ +package com.common.service.impl; + +import com.common.entity.AnalysisAnalysisRule; +import com.common.entity.AnalysisField; +import com.common.entity.AnalysisFilter; +import com.common.entity.AnalysisGroupByColumn; +import com.common.entity.AnalysisGroupByHaving; +import com.common.entity.AnalysisGroupByWindow; +import com.common.entity.AnalysisWhereCondition; +import com.common.service.SqlGeneratorService; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Service; +import com.common.util.JsonbUtil; +import java.util.ArrayList; +import java.util.List; + +/** + * SQL生成服务实现 + */ +@Slf4j +@Service +public class SqlGeneratorServiceImpl implements SqlGeneratorService { + + private static final String DEFAULT_TABLE_NAME = "syslog_normal_alarm"; + + @Override + public String generateSql(AnalysisAnalysisRule rule, + List fields, + List whereConditions, + List filters, + List groupByColumns, + List havingConditions, + AnalysisGroupByWindow groupByWindow, + String startTime, + String endTime) { + String selectClause = generateSelectClause(fields, groupByWindow); + String fromClause = generateFromClause(fields, groupByWindow); + String whereClause = generateWhereClauseFromFilters(filters, startTime, endTime); + String groupByClause = generateGroupByClause(groupByColumns, groupByWindow); + String havingClause = generateHavingClauseFromConditions(havingConditions); + + return buildFullSql(selectClause, fromClause, whereClause, groupByClause, havingClause); + } + @Override + public String generateSelectClause(List fields, AnalysisGroupByWindow groupByWindow) { + if (fields == null || fields.isEmpty()) { + return "SELECT *"; + } + + StringBuilder selectBuilder = new StringBuilder("SELECT "); + + List selectFields = new ArrayList<>(); + for (AnalysisField field : fields) { + // 根据type字段判断是否用于SELECT(measure或calc类型) + String type = field.getType(); + //if ("measure".equals(type) || "calc".equals(type) || StringUtils.isBlank(type)) { + String columnExpr = buildColumnExpression(field); + // 使用alarm_column_name作为别名 + String alias = StringUtils.isNotBlank(field.getAlarmColumnName()) ? field.getAlarmColumnName() : field.getColumnName(); + selectFields.add(columnExpr + " AS " + alias); + //selectFields.add(columnExpr ); + //} + } + // 如果有时间窗口,添加时间列 + if (groupByWindow != null && StringUtils.isNotBlank(groupByWindow.getWindowType())) { + String timeColumn = buildTimeWindowExpression(groupByWindow); + if (StringUtils.isNotBlank(timeColumn)) { + selectFields.add(timeColumn); + } + } + + /** + //补充log_start_at、log_start_at 字段 + selectFields.add("MIN(log_time) AS " + "log_start_at"); + selectFields.add("MAX(log_time) AS " + "log_end_at"); + //默认补充device_id、origin_log_ids字段 + selectFields.add("ARRAY_AGG(DISTINCT device_id) AS " + "device_id"); + selectFields.add("ARRAY_AGG(DISTINCT id) AS " + "origin_log_ids"); + **/ + + if (selectFields.isEmpty()) { + return "SELECT *"; + } + + selectBuilder.append(String.join(",\n", selectFields)); + return selectBuilder.toString(); + } + + @Override + public String generateFromClause(List fields, AnalysisGroupByWindow groupByWindow) { + String tableName = DEFAULT_TABLE_NAME; + String tableAlias = "t"; + + if (fields != null && !fields.isEmpty()) { + for (AnalysisField field : fields) { + if (StringUtils.isNotBlank(field.getTableName())) { + tableName = field.getTableName(); + } + if (StringUtils.isNotBlank(field.getTableAlias())) { + tableAlias = field.getTableAlias(); + break; + } + } + } + return "FROM " + tableName + " AS " + tableAlias; + } + + @Override + public String generateWhereClauseFromFilters(List filters, + String startTime, + String endTime) { + List whereParts = new ArrayList<>(); + + // 添加时间范围条件 + if (StringUtils.isNotBlank(startTime) && StringUtils.isNotBlank(endTime)) { + whereParts.add("log_time >= '" + startTime + "' AND log_time < '" + endTime + "'"); + } + + // 添加配置的WHERE条件 + if (filters != null && !filters.isEmpty()) { + for (AnalysisFilter filter : filters) { + String conditionExpr = buildFilterExpression(filter); + if (StringUtils.isNotBlank(conditionExpr)) { + whereParts.add(conditionExpr); + } + } + } + + if (whereParts.isEmpty()) { + return ""; + } + + return "WHERE " + String.join(" AND ", whereParts); + } + + @Override + @Deprecated + public String generateWhereClause(List conditions, + String startTime, + String endTime) { + List whereParts = new ArrayList<>(); + + // 添加时间范围条件 + if (StringUtils.isNotBlank(startTime) && StringUtils.isNotBlank(endTime)) { + whereParts.add("log_time >= '" + startTime + "' AND log_time < '" + endTime + "'"); + } + + // 添加配置的WHERE条件 + if (conditions != null && !conditions.isEmpty()) { + String conditionSql = buildWhereConditions(conditions); + if (StringUtils.isNotBlank(conditionSql)) { + whereParts.add("(" + conditionSql + ")"); + } + } + + if (whereParts.isEmpty()) { + return ""; + } + + return "WHERE " + String.join(" AND ", whereParts); + } + + + /** + * 构建过滤条件表达式 + */ + private String buildFilterExpression(AnalysisFilter filter) { + if (filter == null) { + return ""; + } + + String columnExpr = filter.getColumnName(); + if (StringUtils.isBlank(columnExpr)) { + log.warn("过滤条件不完整: columnName={}", filter.getColumnName()); + return ""; + } + + // 如果有函数,应用函数 + if (StringUtils.isNotBlank(filter.getFn())) { + columnExpr = buildColumnExpression(filter); + } + + String operator = filter.getOperator(); + // 使用JsonbUtil解析JSONB值 + String valueStr = JsonbUtil.parseString(filter.getValue()); + + if (StringUtils.isBlank(operator)) { + log.warn("过滤条件不完整: operator={}", operator); + return ""; + } + + switch (operator.toUpperCase()) { + case "=": + return columnExpr + " = '" + valueStr + "'"; + case "!=": + return columnExpr + " != '" + valueStr + "'"; + case ">": + return columnExpr + " > " + wrapValue(valueStr); + case "<": + return columnExpr + " < " + wrapValue(valueStr); + case ">=": + return columnExpr + " >= " + wrapValue(valueStr); + case "<=": + return columnExpr + " <= " + wrapValue(valueStr); + case "LIKE": + case "like": + return columnExpr + " LIKE '%" + valueStr + "%'"; + case "NOTLIKE": + case "notLike": + return columnExpr + " NOT LIKE '%" + valueStr + "%'"; + case "LIKESTART": + case "likeStart": + return columnExpr + " LIKE '" + valueStr + "%'"; + case "NOTLIKESTART": + case "notLikeStart": + return columnExpr + " NOT LIKE '" + valueStr + "%'"; + case "LIKEEND": + case "likeEnd": + return columnExpr + " LIKE '%" + valueStr + "'"; + case "NOTLIKEEND": + case "notLikeEnd": + return columnExpr + " NOT LIKE '%" + valueStr + "'"; + case "IN": + case "in": + return columnExpr + " IN (" + parseJsonArray(valueStr) + ")"; + case "NOTIN": + case "notIn": + return columnExpr + " NOT IN (" + parseJsonArray(valueStr) + ")"; + case "BETWEEN": + case "between": + String[] values = valueStr.split(","); + if (values.length == 2) { + return columnExpr + " BETWEEN '" + values[0].trim() + "' AND '" + values[1].trim() + "'"; + } + return columnExpr + " BETWEEN " + valueStr; + case "ISNOTEMPTY": + case "isNotEmpty": + return columnExpr + " IS NOT NULL AND " + columnExpr + " != ''"; + case "ISEMPTY": + case "isEmpty": + return columnExpr + " IS NULL OR " + columnExpr + " = ''"; + case "REGEXP_STR": + case "regexp_str": + return columnExpr + " ~ '" + valueStr + "'"; + case "IPCONTAINS": + case "ipContains": + return "iprange_contains(" + columnExpr + ", '" + valueStr + "')"; + case "IPNOTCONTAINS": + case "ipNotContains": + return "NOT iprange_contains(" + columnExpr + ", '" + valueStr + "')"; + case "IPRANGE": + case "ipRange": + String[] ipRange = valueStr.split(","); + if (ipRange.length == 2) { + return columnExpr + " >= '" + ipRange[0].trim() + "' AND " + columnExpr + " <= '" + ipRange[1].trim() + "'"; + } + return columnExpr + " BETWEEN " + valueStr; + case "IPNOTINRANGE": + case "ipNotInRange": + String[] ipNotRange = valueStr.split(","); + if (ipNotRange.length == 2) { + return "(" + columnExpr + " < '" + ipNotRange[0].trim() + "' OR " + columnExpr + " > '" + ipNotRange[1].trim() + "')"; + } + return "NOT (" + columnExpr + " BETWEEN " + valueStr + ")"; + case "INGSAFEDOMAIN": + case "inSafeDomain": + return "is_in_safe_domain(" + columnExpr + ", '" + valueStr + "')"; + case "NOTINGSAFEDOMAIN": + case "notInSafeDomain": + return "NOT is_in_safe_domain(" + columnExpr + ", '" + valueStr + "')"; + case "INBUSINESSDOMAIN": + case "inBusinessDomain": + return "is_in_business_domain(" + columnExpr + ", '" + valueStr + "')"; + case "NOTINBUSINESSDOMAIN": + case "notInBusinessDomain": + return "NOT is_in_business_domain(" + columnExpr + ", '" + valueStr + "')"; + case "ISINTRANET": + case "isIntranet": + return "is_intranet(" + columnExpr + ")"; + case "ISNOTINTRANET": + case "isNotIntranet": + return "NOT is_intranet(" + columnExpr + ")"; + default: + return columnExpr + " " + operator + " " + wrapValue(valueStr); + } + } + + @Override + public String generateGroupByClause(List columns, AnalysisGroupByWindow groupByWindow) { + List groupByFields = new ArrayList<>(); + + // 添加分组字段 + if (columns != null && !columns.isEmpty()) { + for (AnalysisGroupByColumn column : columns) { + // 构建带表别名的字段表达式 + String columnExpr = buildColumnExpression(column); + groupByFields.add(columnExpr); + } + } + + // 添加时间窗口字段 + if (groupByWindow != null && StringUtils.isNotBlank(groupByWindow.getWindowType())) { + String timeWindowExpr = buildTimeWindowGroupExpression(groupByWindow); + if (StringUtils.isNotBlank(timeWindowExpr)) { + groupByFields.add(timeWindowExpr); + } + } + + if (groupByFields.isEmpty()) { + return ""; + } + + return "GROUP BY " + String.join(", ", groupByFields); + } + + /** + * 构建字段表达式,支持表别名 + * + * @param column 分组字段配置 + * @return 字段表达式 + */ + private String buildColumnExpression(AnalysisGroupByColumn column) { + //String tableName = column.getTableName(); + String tableName = ""; + //String tableAlias = column.getTableAlias(); + String tableAlias =""; + String columnName = column.getColumnName(); + + // 如果有表别名,使用表别名,否则使用表名 + String qualifiedName = ""; + if (StringUtils.isNotBlank(tableAlias)) { + qualifiedName = tableAlias + "." + columnSafeWrap(columnName); + } else if (StringUtils.isNotBlank(tableName)) { + qualifiedName = tableName + "." + columnSafeWrap(columnName); + } else { + //qualifiedName = columnSafeWrap(columnName); + qualifiedName = columnName; + } + + return qualifiedName; + } + + @Override + public String generateHavingClauseFromConditions(List havingConditions) { + if (havingConditions == null || havingConditions.isEmpty()) { + return ""; + } + + List havingParts = new ArrayList<>(); + for (AnalysisGroupByHaving condition : havingConditions) { + String conditionExpr = buildHavingExpression(condition); + if (StringUtils.isNotBlank(conditionExpr)) { + havingParts.add(conditionExpr); + } + } + + if (havingParts.isEmpty()) { + return ""; + } + + return "HAVING " + String.join(" AND ", havingParts); + } + + @Override + @Deprecated + public String generateHavingClause(AnalysisAnalysisRule rule) { + // 从rule_content或rule_expression中解析HAVING条件 + // 这里简化处理,实际需要解析JSON配置 + if (StringUtils.isNotBlank(rule.getRuleExpression())) { + return "HAVING " + rule.getRuleExpression(); + } + return ""; + } + @Override + public String generateWindowClause(AnalysisGroupByWindow groupByWindow) { + if (groupByWindow == null || StringUtils.isBlank(groupByWindow.getWindowType())) { + return ""; + } + + String windowType = groupByWindow.getWindowType().toUpperCase(); + + switch (windowType) { + case "TUMBLE": + return buildTumbleWindowExpression(groupByWindow); + case "HOP": + return buildHopWindowExpression(groupByWindow); + case "SESSION": + return buildSessionWindowExpression(groupByWindow); + default: + log.warn("不支持的窗口类型: {}", windowType); + return ""; + } + } + + /** + * 构建时间窗口表达式(用于SELECT) + */ + private String buildTimeWindowExpression(AnalysisGroupByWindow groupByWindow) { + if (groupByWindow == null) { + return ""; + } + + String windowType = groupByWindow.getWindowType().toUpperCase(); + + switch (windowType) { + case "TUMBLE": + return buildTumbleTimeExpression(groupByWindow); + case "HOP": + return buildHopTimeExpression(groupByWindow); + case "SESSION": + return buildSessionTimeExpression(groupByWindow); + default: + return ""; + } + } + + /** + * 构建时间窗口GROUP BY表达式 + */ + private String buildTimeWindowGroupExpression(AnalysisGroupByWindow groupByWindow) { + if (groupByWindow == null) { + return ""; + } + + String windowType = groupByWindow.getWindowType().toUpperCase(); + + switch (windowType) { + case "TUMBLE": + return buildTumbleWindowGroupExpression(groupByWindow); + case "HOP": + return buildHopWindowGroupExpression(groupByWindow); + case "SESSION": + return buildSessionWindowGroupExpression(groupByWindow); + default: + return ""; + } + } + + /** + * 构建滚动窗口表达式 + */ + private String buildTumbleWindowExpression(AnalysisGroupByWindow window) { + String timeCol = "log_time"; + String sizeUnit = convertTimeUnit(window.getTumbleWindowSizeUnit()); + Integer size = window.getTumbleWindowSize(); + + return String.format("TUMBLE(%s, INTERVAL '%d %s') AS window_start", timeCol, size, sizeUnit); + } + + /** + * 构建滚动窗口时间表达式(用于SELECT) + */ + private String buildTumbleTimeExpression(AnalysisGroupByWindow window) { + String timeCol = "log_time"; + String sizeUnit = convertTimeUnit(window.getTumbleWindowSizeUnit()); + Integer size = window.getTumbleWindowSize(); + //return String.format("TUMBLE_START(%s, INTERVAL '%d %s') AS window_time", timeCol, size, sizeUnit); + return String.format("TUMBLE(%s, INTERVAL '%d %s') AS window_time", timeCol, size, sizeUnit); + } + + /** + * 构建滚动窗口GROUP BY表达式 + */ + private String buildTumbleWindowGroupExpression(AnalysisGroupByWindow window) { + String timeCol = "log_time"; + String sizeUnit = convertTimeUnit(window.getTumbleWindowSizeUnit()); + Integer size = window.getTumbleWindowSize(); + + return String.format("TUMBLE(%s, INTERVAL '%d %s')", timeCol, size, sizeUnit); + } + + /** + * 构建滑动窗口表达式 + */ + private String buildHopWindowExpression(AnalysisGroupByWindow window) { + String timeCol = "log_time"; + String sizeUnit = convertTimeUnit(window.getHopWindowSizeUnit()); + String slideUnit = convertTimeUnit(window.getHopWindowSlideUnit()); + Integer size = window.getHopWindowSize(); + Integer slide = window.getHopWindowSlide(); + + return String.format("HOP(%s, INTERVAL '%d %s', INTERVAL '%d %s') AS window_start", + timeCol, size, sizeUnit, slide, slideUnit); + } + + /** + * 构建滑动窗口时间表达式(用于SELECT) + */ + private String buildHopTimeExpression(AnalysisGroupByWindow window) { + String timeCol = "log_time"; + String sizeUnit = convertTimeUnit(window.getHopWindowSizeUnit()); + String slideUnit = convertTimeUnit(window.getHopWindowSlideUnit()); + Integer size = window.getHopWindowSize(); + Integer slide = window.getHopWindowSlide(); + + //return String.format("HOP_START(%s, INTERVAL '%d %s', INTERVAL '%d %s') AS window_time", + // timeCol, size, sizeUnit, slide, slideUnit); + return String.format("HOP(%s, INTERVAL '%d %s', INTERVAL '%d %s') AS window_time", + timeCol, size, sizeUnit, slide, slideUnit); + } + + /** + * 构建滑动窗口GROUP BY表达式 + */ + private String buildHopWindowGroupExpression(AnalysisGroupByWindow window) { + String timeCol = "log_time"; + String sizeUnit = convertTimeUnit(window.getHopWindowSizeUnit()); + String slideUnit = convertTimeUnit(window.getHopWindowSlideUnit()); + Integer size = window.getHopWindowSize(); + Integer slide = window.getHopWindowSlide(); + + return String.format("HOP(%s, INTERVAL '%d %s', INTERVAL '%d %s')", + timeCol, size, sizeUnit, slide, slideUnit); + } + + /** + * 构建会话窗口表达式 + */ + private String buildSessionWindowExpression(AnalysisGroupByWindow window) { + String timeCol = "log_time"; + String sizeUnit = convertTimeUnit(window.getSessionWindowSizeUnit()); + Integer size = window.getSessionWindowSize(); + + return String.format("SESSION(%s, INTERVAL '%d %s') AS window_start", timeCol, size, sizeUnit); + } + + /** + * 构建会话窗口时间表达式(用于SELECT) + */ + private String buildSessionTimeExpression(AnalysisGroupByWindow window) { + String timeCol = "log_time"; + String sizeUnit = convertTimeUnit(window.getSessionWindowSizeUnit()); + Integer size = window.getSessionWindowSize(); + + //return String.format("SESSION_START(%s, INTERVAL '%d %s') AS window_time", timeCol, size, sizeUnit); + return String.format("SESSION(%s, INTERVAL '%d %s') AS window_time", timeCol, size, sizeUnit); + } + + /** + * 构建会话窗口GROUP BY表达式 + */ + private String buildSessionWindowGroupExpression(AnalysisGroupByWindow window) { + String timeCol = "log_time"; + String sizeUnit = convertTimeUnit(window.getSessionWindowSizeUnit()); + Integer size = window.getSessionWindowSize(); + + return String.format("SESSION(%s, INTERVAL '%d %s')", timeCol, size, sizeUnit); + } + + /** + * 转换时间单位 + */ + private String convertTimeUnit(String unit) { + if (StringUtils.isBlank(unit)) { + return "SECOND"; + } + switch (unit.toLowerCase()) { + case "s": + case "second": + case "seconds": + return "SECOND"; + case "m": + case "minute": + case "minutes": + return "MINUTE"; + case "h": + case "hour": + case "hours": + return "HOUR"; + case "d": + case "day": + case "days": + return "DAY"; + default: + return unit.toUpperCase(); + } + } + /** + * 构建HAVING条件表达式 + */ + private String buildHavingExpression(AnalysisGroupByHaving condition) { + if (condition == null) { + return ""; + } + + String columnExpr = condition.getColumnName(); + if (StringUtils.isBlank(columnExpr)) { + log.warn("HAVING条件不完整: columnName={}", condition.getColumnName()); + return ""; + } + + // 如果有聚合函数,应用函数 + if (StringUtils.isNotBlank(condition.getFn())) { + columnExpr = buildColumnExpression(condition); + } + + String operator = condition.getOperator(); + // 使用JsonbUtil解析JSONB值 + String valueStr = JsonbUtil.parseString(condition.getValue()); + + if (StringUtils.isBlank(operator)) { + log.warn("HAVING条件不完整: operator={}", operator); + return ""; + } + + switch (operator.toUpperCase()) { + case "=": + return columnExpr + " = '" + valueStr + "'"; + case "!=": + return columnExpr + " != '" + valueStr + "'"; + case ">": + return columnExpr + " > " + wrapValue(valueStr); + case "<": + return columnExpr + " < " + wrapValue(valueStr); + case ">=": + return columnExpr + " >= " + wrapValue(valueStr); + case "<=": + return columnExpr + " <= " + wrapValue(valueStr); + case "LIKE": + case "like": + return columnExpr + " LIKE '%" + valueStr + "%'"; + case "NOTLIKE": + case "notLike": + return columnExpr + " NOT LIKE '%" + valueStr + "%'"; + case "LIKESTART": + case "likeStart": + return columnExpr + " LIKE '" + valueStr + "%'"; + case "NOTLIKESTART": + case "notLikeStart": + return columnExpr + " NOT LIKE '" + valueStr + "%'"; + case "LIKEEND": + case "likeEnd": + return columnExpr + " LIKE '%" + valueStr + "'"; + case "NOTLIKEEND": + case "notLikeEnd": + return columnExpr + " NOT LIKE '%" + valueStr + "'"; + case "IN": + case "in": + return columnExpr + " IN (" + parseJsonArray(valueStr) + ")"; + case "NOTIN": + case "notIn": + return columnExpr + " NOT IN (" + parseJsonArray(valueStr) + ")"; + case "BETWEEN": + case "between": + String[] values = valueStr.split(","); + if (values.length == 2) { + return columnExpr + " BETWEEN '" + values[0].trim() + "' AND '" + values[1].trim() + "'"; + } + return columnExpr + " BETWEEN " + valueStr; + case "ISNOTEMPTY": + case "isNotEmpty": + return columnExpr + " IS NOT NULL AND " + columnExpr + " != ''"; + case "ISEMPTY": + case "isEmpty": + return columnExpr + " IS NULL OR " + columnExpr + " = ''"; + case "REGEXP_STR": + case "regexp_str": + return columnExpr + " ~ '" + valueStr + "'"; + case "IPCONTAINS": + case "ipContains": + return "iprange_contains(" + columnExpr + ", '" + valueStr + "')"; + case "IPNOTCONTAINS": + case "ipNotContains": + return "NOT iprange_contains(" + columnExpr + ", '" + valueStr + "')"; + case "IPRANGE": + case "ipRange": + String[] ipRange = valueStr.split(","); + if (ipRange.length == 2) { + return columnExpr + " >= '" + ipRange[0].trim() + "' AND " + columnExpr + " <= '" + ipRange[1].trim() + "'"; + } + return columnExpr + " BETWEEN " + valueStr; + case "IPNOTINRANGE": + case "ipNotInRange": + String[] ipNotRange = valueStr.split(","); + if (ipNotRange.length == 2) { + return "(" + columnExpr + " < '" + ipNotRange[0].trim() + "' OR " + columnExpr + " > '" + ipNotRange[1].trim() + "')"; + } + return "NOT (" + columnExpr + " BETWEEN " + valueStr + ")"; + case "INGSAFEDOMAIN": + case "inSafeDomain": + return "is_in_safe_domain(" + columnExpr + ", '" + valueStr + "')"; + case "NOTINGSAFEDOMAIN": + case "notInSafeDomain": + return "NOT is_in_safe_domain(" + columnExpr + ", '" + valueStr + "')"; + case "INBUSINESSDOMAIN": + case "inBusinessDomain": + return "is_in_business_domain(" + columnExpr + ", '" + valueStr + "')"; + case "NOTINBUSINESSDOMAIN": + case "notInBusinessDomain": + return "NOT is_in_business_domain(" + columnExpr + ", '" + valueStr + "')"; + case "ISINTRANET": + case "isIntranet": + return "is_intranet(" + columnExpr + ")"; + case "ISNOTINTRANET": + case "isNotIntranet": + return "NOT is_intranet(" + columnExpr + ")"; + default: + return columnExpr + " " + operator + " " + wrapValue(valueStr); + } + } + + /** + * 构建列表达式(支持AnalysisFilter和AnalysisGroupByHaving) + */ + private String buildColumnExpression(AnalysisFilter filter) { + return buildColumnExpressionInternal(filter.getFn(), filter.getColumnName(), filter.getArguments()); + } + + /** + * 构建列表达式(支持AnalysisGroupByHaving) + */ + private String buildColumnExpression(AnalysisGroupByHaving having) { + return buildColumnExpressionInternal(having.getFn(), having.getColumnName(), having.getArguments()); + } + + /** + * 内部方法:构建列表达式 + */ + private String buildColumnExpressionInternal(String fn, String columnName, Object arguments) { + if (StringUtils.isBlank(fn)) { + return columnName; + } + + String functionName = fn.toUpperCase(); + String argsStr = parseJsonbArguments(arguments); + + switch (functionName) { + // 汇聚函数 + case "COUNT": + return "COUNT(" + columnName + ")"; + case "DEDUPLICATION_COUNT": + return "COUNT(DISTINCT " + columnName + ")"; + case "FIRST_VALUE": + return "FIRST_VALUE(" + columnName + ") OVER (ORDER BY " + columnName + ")"; + case "LAST_VALUE": + return "LAST_VALUE(" + columnName + ") OVER (ORDER BY " + columnName + ")"; + case "SORTATTACKRESULT": + return "MAX(attack_result) OVER (PARTITION BY " + columnName + ")"; + case "SUM": + return "SUM(" + columnName + ")"; + case "MAX": + return "MAX(" + columnName + ")"; + case "MIN": + return "MIN(" + columnName + ")"; + case "AVG": + return "AVG(" + columnName + ")"; + case "DUPLICATESANDSPLICE": + if (StringUtils.isNotBlank(argsStr)) { + return "STRING_AGG(DISTINCT " + columnName + ", '" + argsStr + "')"; + } + return "STRING_AGG(DISTINCT " + columnName + ", ',')"; + case "CONCAT_AGG": + if (StringUtils.isNotBlank(argsStr)) { + return "STRING_AGG(" + columnName + ", '" + argsStr + "')"; + } + return "STRING_AGG(" + columnName + ", ',')"; + case "CONCAT_AGG_ID": + if (StringUtils.isNotBlank(argsStr)) { + return "STRING_AGG(" + columnName + ", '" + argsStr + "')"; + } + return "STRING_AGG(" + columnName + ", ',')"; + case "SPLIT_DISTINCT_CONCAT": + if (StringUtils.isNotBlank(argsStr)) { + String[] splitArgs = argsStr.split(","); + if (splitArgs.length >= 3) { + String separator = splitArgs[0].trim(); + String delimiter = splitArgs[1].trim(); + String limit = splitArgs[2].trim(); + return "STRING_AGG(DISTINCT REGEXP_SPLIT(" + columnName + ", '" + delimiter + "'), '" + separator + "') LIMIT " + limit; + } + } + return columnName; + //自定添加方法 + case "MODE_WITH_GROUP": + return "MODE() WITHIN GROUP (ORDER BY " + columnName + ")"; + + // 聚合函数(兼容旧代码) + case "ARRAY_AGG": + + return "ARRAY_AGG(DISTINCT " + columnName + ")"; + case "STRING_AGG": + if (StringUtils.isNotBlank(argsStr)) { + return "STRING_AGG(" + columnSafeWrap(columnName) + ", " + argsStr + ")"; + } + return "STRING_AGG(DISTINCT " + columnName + ", ',')"; + + // 时间函数 + case "YEAR": + return "EXTRACT(YEAR FROM " + columnName + ")"; + case "MONTH": + return "EXTRACT(MONTH FROM " + columnName + ")"; + case "DAYOFMONTH": + return "EXTRACT(DAY FROM " + columnName + ")"; + case "HOUR": + return "EXTRACT(HOUR FROM " + columnName + ")"; + case "MINUTE": + return "EXTRACT(MINUTE FROM " + columnName + ")"; + case "SECOND": + return "EXTRACT(SECOND FROM " + columnName + ")"; + case "WEEK": + return "EXTRACT(WEEK FROM " + columnName + ")"; + + // 字符串函数 + case "SUBSTRING": + if (StringUtils.isNotBlank(argsStr)) { + return "SUBSTRING(" + columnName + ", " + argsStr + ")"; + } + return "SUBSTRING(" + columnName + ", 1)"; + case "SUBSTRING_RANGE": + if (StringUtils.isNotBlank(argsStr)) { + String[] rangeArgs = argsStr.split(","); + if (rangeArgs.length == 2) { + return "SUBSTRING(" + columnName + ", " + rangeArgs[0].trim() + ", " + rangeArgs[1].trim() + ")"; + } + } + return columnName; + case "REGEXP_EXTRACT": + if (StringUtils.isNotBlank(argsStr)) { + String[] regexArgs = argsStr.split(","); + if (regexArgs.length >= 2) { + return "REGEXP_EXTRACT(" + columnName + ", '" + regexArgs[0].trim() + "', " + regexArgs[1].trim() + ")"; + } + return "REGEXP_EXTRACT(" + columnName + ", '" + argsStr + "', 1)"; + } + return columnName; + case "SORT_KEY": + if (StringUtils.isNotBlank(argsStr)) { + String[] sortArgs = argsStr.split(","); + if (sortArgs.length == 3) { + return "CONCAT(" + columnSafeWrap(sortArgs[0].trim()) + ", '" + sortArgs[1].trim() + "', " + columnSafeWrap(sortArgs[2].trim()) + ")"; + } + } + return columnName; + + // 数值函数 + case "FLOOR": + return "FLOOR(" + columnName + ")"; + + // IP相关函数 + case "LOG4JPAYLOADEXTRACTIP": + return "extract_ip_from_log4j(" + columnName + ")"; + case "LOG4JPAYLOADEXTRACTDOMAIN": + return "extract_domain_from_log4j(" + columnName + ")"; + + // 其他函数 + case "TO_CHAR": + return "TO_CHAR(" + columnName + ", 'YYYYMMDD')"; + case "HOST": + return "HOST(" + columnName + ")::text"; + default: + return functionName + "(" + columnName + ")"; + } + } + + /** + * 解析JSONB格式的arguments + */ + private String parseJsonbArguments(Object arguments) { + if (arguments == null) { + return ""; + } + String argsStr = arguments.toString(); + if (argsStr.trim().startsWith("[") && argsStr.trim().endsWith("]")) { + try { + String trimmed = argsStr.trim().substring(1, argsStr.trim().length() - 1); + if (trimmed.isEmpty()) { + return ""; + } + return trimmed.replace("\"", ""); + } catch (Exception e) { + log.warn("解析arguments失败: {}", argsStr, e); + return ""; + } + } + return argsStr; + } + + /** + * 解析JSON值 + */ + private String parseJsonValue(Object value) { + if (value == null) { + return ""; + } + return JsonbUtil.parseString(value); + //return value.toString(); + } + + /** + * 解析JSON数组 + */ + private String parseJsonArray(String value) { + if (StringUtils.isBlank(value)) { + return ""; + } + if (value.trim().startsWith("[") && value.trim().endsWith("]")) { + try { + String trimmed = value.trim().substring(1, value.trim().length() - 1); + if (trimmed.isEmpty()) { + return ""; + } + return trimmed.replace("\"", "'"); + } catch (Exception e) { + log.warn("解析JSON数组失败: {}", value, e); + return value; + } + } + return value; + } + + @Override + public String buildFullSql(String selectClause, + String fromClause, + String whereClause, + String groupByClause, + String havingClause) { + StringBuilder sqlBuilder = new StringBuilder(); + sqlBuilder.append(selectClause).append("\n"); + sqlBuilder.append(fromClause).append("\n"); + + if (StringUtils.isNotBlank(whereClause)) { + sqlBuilder.append(whereClause).append("\n"); + } + + if (StringUtils.isNotBlank(groupByClause)) { + sqlBuilder.append(groupByClause).append("\n"); + } + + if (StringUtils.isNotBlank(havingClause)) { + sqlBuilder.append(havingClause).append("\n"); + } + + return sqlBuilder.toString(); + } + + /** + * 列名安全包装(处理类型转换) + */ + private String columnSafeWrap(String columnName) { + if (columnName.toLowerCase().contains("ip")) { + return "host(" + columnName + ")::text"; + } + if (columnName.toLowerCase().contains("port")) { + return columnName + "::int4"; + } + return columnName; + } + + /** + * 值包装(处理字符串) + */ + private String wrapValue(String value) { + if (value == null) { + return "NULL"; + } + if (value.startsWith("'") || value.equalsIgnoreCase("NULL") || + value.matches("\\d+")) { + return value; + } + return "'" + value + "'"; + } + + /** + * 构建列表达式(支持AnalysisField) + */ + private String buildColumnExpression(AnalysisField field) { + return buildColumnExpressionInternal(field.getFn(), field.getColumnName(), field.getArguments()); + } + + /** + * 递归构建WHERE条件树 + * 注意:此方法已弃用,新的表结构中括号信息不存储在where_condition表中 + */ + @Deprecated + private String buildWhereConditions(List conditions) { + if (conditions == null || conditions.isEmpty()) { + return ""; + } + + // 构建条件树结构 + List rootConditions = new ArrayList<>(); + for (AnalysisWhereCondition cond : conditions) { + if (cond.getParentCondId() == null || cond.getParentCondId() == 0) { + rootConditions.add(cond); + } + } + + if (rootConditions.isEmpty()) { + // 没有根节点,平铺所有条件 + return buildFlatConditions(conditions); + } + + // 构建树形结构 + List conditionParts = new ArrayList<>(); + for (AnalysisWhereCondition root : rootConditions) { + conditionParts.add(buildConditionNode(root, conditions)); + } + + return String.join(" ", conditionParts); + } + + /** + * 构建单个条件节点(递归) + * 注意:此方法已弃用,新的表结构中不使用bracketType + */ + @Deprecated + private String buildConditionNode(AnalysisWhereCondition node, + List allConditions) { + StringBuilder nodeBuilder = new StringBuilder(); + + // 添加逻辑运算符(如果有) + if (StringUtils.isNotBlank(node.getLogicalOp())) { + nodeBuilder.append(node.getLogicalOp()).append(" "); + } + + // 查找子条件 + List children = new ArrayList<>(); + for (AnalysisWhereCondition cond : allConditions) { + if (node.getCondId().equals(cond.getParentCondId())) { + children.add(cond); + } + } + + // 递归处理子条件 + for (AnalysisWhereCondition child : children) { + nodeBuilder.append(buildConditionNode(child, allConditions)); + } + + return nodeBuilder.toString(); + } + + /** + * 构建条件表达式 + * 注意:此方法已弃用,具体的条件表达式应由AnalysisFilter提供 + */ + @Deprecated + private String buildConditionExpression(AnalysisWhereCondition condition) { + // 新的表结构中,AnalysisWhereCondition不再包含具体的字段信息 + // 具体的条件表达式应从AnalysisFilter中获取 + return ""; + } + + /** + * 构建平铺条件(简单场景) + * 注意:此方法已弃用,新的表结构中使用AnalysisFilter + */ + @Deprecated + private String buildFlatConditions(List conditions) { + List conditionParts = new ArrayList<>(); + for (AnalysisWhereCondition cond : conditions) { + if (StringUtils.isNotBlank(cond.getLogicalOp()) && !conditionParts.isEmpty()) { + conditionParts.add(cond.getLogicalOp()); + } + } + return String.join(" ", conditionParts); + } +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/common/util/JsonbUtil.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/util/JsonbUtil.java new file mode 100644 index 0000000..a3d0ca5 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/common/util/JsonbUtil.java @@ -0,0 +1,161 @@ +package com.common.util; + +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * JSONB字段解析工具类 + * 用于处理PostgreSQL JSONB类型字段的解析 + */ +public class JsonbUtil { + + private static final ObjectMapper objectMapper = new ObjectMapper(); + + /** + * 解析JSONB字段值为字符串 + * 适用于JSONB存储的是带引号的字符串,如 "127.0.0.1" + * + * @param value JSONB字段的值 + * @return 解析后的字符串 + */ + public static String parseString(Object value) { + if (value == null) { + return null; + } + + // 如果已经是字符串,直接返回 + if (value instanceof String) { + String strValue = (String) value; + // 去除可能存在的引号 + if (strValue.startsWith("\"") && strValue.endsWith("\"")) { + return strValue.substring(1, strValue.length() - 1); + } + return strValue; + } + + // 其他类型转为字符串 + return value.toString(); + } + + /** + * 解析JSONB字段值为Integer + * + * @param value JSONB字段的值 + * @return 解析后的Integer + */ + public static Integer parseInteger(Object value) { + if (value == null) { + return null; + } + + if (value instanceof Integer) { + return (Integer) value; + } + + if (value instanceof Number) { + return ((Number) value).intValue(); + } + + if (value instanceof String) { + String strValue = parseString(value); + try { + return Integer.parseInt(strValue); + } catch (NumberFormatException e) { + return null; + } + } + + return null; + } + + /** + * 解析JSONB字段值为Long + * + * @param value JSONB字段的值 + * @return 解析后的Long + */ + public static Long parseLong(Object value) { + if (value == null) { + return null; + } + + if (value instanceof Long) { + return (Long) value; + } + + if (value instanceof Number) { + return ((Number) value).longValue(); + } + + if (value instanceof String) { + String strValue = parseString(value); + try { + return Long.parseLong(strValue); + } catch (NumberFormatException e) { + return null; + } + } + + return null; + } + + /** + * 解析JSONB字段值为Boolean + * + * @param value JSONB字段的值 + * @return 解析后的Boolean + */ + public static Boolean parseBoolean(Object value) { + if (value == null) { + return null; + } + + if (value instanceof Boolean) { + return (Boolean) value; + } + + if (value instanceof String) { + String strValue = parseString(value); + return Boolean.parseBoolean(strValue); + } + + return null; + } + + /** + * 从JSONB Object中解析字符串值 + * 适用于复杂的JSON对象结构 + * + * @param jsonbValue JSONB字段的值(可能是Map或字符串) + * @param fieldName JSON对象中的字段名 + * @return 解析后的字符串 + */ + @SuppressWarnings("unchecked") + public static String parseStringFromJson(Object jsonbValue, String fieldName) { + if (jsonbValue == null) { + return null; + } + + try { + if (jsonbValue instanceof java.util.Map) { + java.util.Map map = (java.util.Map) jsonbValue; + Object fieldValue = map.get(fieldName); + return parseString(fieldValue); + } + + // 如果是JSON字符串,尝试解析 + if (jsonbValue instanceof String) { + String strValue = (String) jsonbValue; + if (strValue.startsWith("{")) { + java.util.Map map = objectMapper.readValue(strValue, java.util.Map.class); + Object fieldValue = map.get(fieldName); + return parseString(fieldValue); + } + } + } catch (Exception e) { + // 解析失败,返回null + return null; + } + + return null; + } +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/java/com/controllers/AnalysisRuleController.java b/haobang-security-xdr/syslog-consumer/src/main/java/com/controllers/AnalysisRuleController.java new file mode 100644 index 0000000..bb14ca0 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/java/com/controllers/AnalysisRuleController.java @@ -0,0 +1,136 @@ +package com.controllers; + +import com.common.entity.AnalysisAnalysisRule; +import com.common.entity.AnalysisTaskHistory; +import com.common.mapper.AnalysisTaskHistoryMapper; +import com.common.service.AnalysisRuleService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * 分析规则控制器 + */ +@Slf4j +@RestController +@RequestMapping("/api/analysis") +public class AnalysisRuleController { + + @Autowired + private AnalysisRuleService analysisRuleService; + + @Autowired + private AnalysisTaskHistoryMapper taskHistoryMapper; + + /** + * 手动触发实时分析 + */ + @PostMapping("/realtime/execute") + public Map executeRealtimeAnalysis() { + Map result = new HashMap<>(); + try { + List> results = analysisRuleService.executeRealtimeAnalysis(); + result.put("code", 200); + result.put("message", "实时分析执行成功"); + result.put("data", results); + } catch (Exception e) { + log.error("执行实时分析失败", e); + result.put("code", 500); + result.put("message", "执行实时分析失败: " + e.getMessage()); + } + return result; + } + + /** + * 手动触发离线分析 + */ + @PostMapping("/offline/execute") + public Map executeOfflineAnalysis() { + Map result = new HashMap<>(); + try { + List> results = analysisRuleService.executeOfflineAnalysis(); + result.put("code", 200); + result.put("message", "离线分析执行成功"); + result.put("data", results); + } catch (Exception e) { + log.error("执行离线分析失败", e); + result.put("code", 500); + result.put("message", "执行离线分析失败: " + e.getMessage()); + } + return result; + } + + /** + * 查询活动规则 + */ + @GetMapping("/rules") + public Map getActiveRules(@RequestParam String runMode) { + Map result = new HashMap<>(); + try { + List rules = analysisRuleService.getActiveRules(runMode); + result.put("code", 200); + result.put("message", "查询成功"); + result.put("data", rules); + } catch (Exception e) { + log.error("查询活动规则失败", e); + result.put("code", 500); + result.put("message", "查询活动规则失败: " + e.getMessage()); + } + return result; + } + + /** + * 停止规则 + */ + @PostMapping("/rules/{ruleId}/stop") + public Map stopRule(@PathVariable String ruleId) { + Map result = new HashMap<>(); + try { + analysisRuleService.stopRule(ruleId); + result.put("code", 200); + result.put("message", "规则已停止"); + } catch (Exception e) { + log.error("停止规则失败", e); + result.put("code", 500); + result.put("message", "停止规则失败: " + e.getMessage()); + } + return result; + } + + /** + * 查询规则执行历史 + */ + @GetMapping("/rules/{ruleId}/history") + public Map getRuleHistory( + @PathVariable String ruleId, + @RequestParam(defaultValue = "10") int limit) { + Map result = new HashMap<>(); + try { + List historyList = taskHistoryMapper.selectRecentByRuleId(ruleId, limit); + result.put("code", 200); + result.put("message", "查询成功"); + result.put("data", historyList); + } catch (Exception e) { + log.error("查询规则执行历史失败", e); + result.put("code", 500); + result.put("message", "查询规则执行历史失败: " + e.getMessage()); + } + return result; + } + + /** + * 健康检查 + */ + @GetMapping("/health") + public Map health() { + Map result = new HashMap<>(); + result.put("code", 200); + result.put("message", "服务正常"); + result.put("timestamp", System.currentTimeMillis()); + return result; + } +} diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/application-dev.properties b/haobang-security-xdr/syslog-consumer/src/main/resources/application-dev.properties index 684d278..16ee85e 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/resources/application-dev.properties +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/application-dev.properties @@ -25,9 +25,12 @@ influxdb.batch.size=1000 influxdb.flush.interval=1000 influxdb.retry.attempts=3 influxdb.retry.delay=1000 - +# InfluxDB 2.7 ӳʱ +influxdb.connection.timeout=30s +influxdb.connection.read-timeout=30s +influxdb.connection.write-timeout=60s # Application Configuration -app.worker.threads=4 +app.worker.threads=8 app.max.queue.size=10000 app.metrics.enabled=true @@ -123,3 +126,24 @@ spring.elasticsearch.socket-timeout=30s etl.batch.page-size=1000 etl.batch.insert-batch-size=500 etl.schedule.cron=0 0 2 * * ? + + +# ============================================ +# HikariCP Connection Pool Configuration +# ============================================ +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.max-lifetime=900000 +spring.datasource.hikari.connection-test-query=SELECT 1 +spring.datasource.hikari.validation-timeout=5000 +spring.datasource.hikari.leak-detection-threshold=30000 +spring.datasource.hikari.pool-name=HikariPool-SyslogConsumer +spring.datasource.hikari.auto-commit=false +spring.datasource.hikari.schema=public + +# +analysis.realtime.enabled= true +# 룩 - Ĭ10 +analysis.realtime.check-interval-seconds: 10 \ No newline at end of file diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/application-prod-zc.properties b/haobang-security-xdr/syslog-consumer/src/main/resources/application-prod-zc.properties index e63b756..80372c2 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/resources/application-prod-zc.properties +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/application-prod-zc.properties @@ -25,9 +25,12 @@ influxdb.batch.size=1000 influxdb.flush.interval=1000 influxdb.retry.attempts=3 influxdb.retry.delay=1000 - +# InfluxDB 2.7 ӳʱ +influxdb.connection.timeout=30s +influxdb.connection.read-timeout=30s +influxdb.connection.write-timeout=60s # Application Configuration -app.worker.threads=4 +app.worker.threads=8 app.max.queue.size=10000 app.metrics.enabled=true @@ -122,3 +125,19 @@ spring.elasticsearch.socket-timeout=30s etl.batch.page-size=1000 etl.batch.insert-batch-size=500 etl.schedule.cron=0 0 2 * * ? + + +# ============================================ +# HikariCP Connection Pool Configuration +# ============================================ +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.max-lifetime=900000 +spring.datasource.hikari.connection-test-query=SELECT 1 +spring.datasource.hikari.validation-timeout=5000 +spring.datasource.hikari.leak-detection-threshold=30000 +spring.datasource.hikari.pool-name=HikariPool-SyslogConsumer +spring.datasource.hikari.auto-commit=false +spring.datasource.hikari.schema=public \ No newline at end of file diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/application-prod.properties b/haobang-security-xdr/syslog-consumer/src/main/resources/application-prod.properties index f4428fe..e4703f5 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/resources/application-prod.properties +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/application-prod.properties @@ -25,9 +25,12 @@ influxdb.batch.size=1000 influxdb.flush.interval=1000 influxdb.retry.attempts=3 influxdb.retry.delay=1000 - +# InfluxDB 2.7 ӳʱ +influxdb.connection.timeout=30s +influxdb.connection.read-timeout=30s +influxdb.connection.write-timeout=60s # Application Configuration -app.worker.threads=4 +app.worker.threads=8 app.max.queue.size=10000 app.metrics.enabled=true @@ -123,4 +126,26 @@ spring.elasticsearch.socket-timeout=30s # ETL etl.batch.page-size=1000 etl.batch.insert-batch-size=500 -etl.schedule.cron=0 0 2 * * ? \ No newline at end of file +etl.schedule.cron=0 0 2 * * ? + + +# ============================================ +# HikariCP Connection Pool Configuration +# ============================================ +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.max-lifetime=900000 +spring.datasource.hikari.connection-test-query=SELECT 1 +spring.datasource.hikari.validation-timeout=5000 +spring.datasource.hikari.leak-detection-threshold=30000 +spring.datasource.hikari.pool-name=HikariPool-SyslogConsumer +spring.datasource.hikari.auto-commit=false +spring.datasource.hikari.schema=public + + +# +analysis.realtime.enabled= true +# 룩 - Ĭ10 +analysis.realtime.check-interval-seconds: 10 \ No newline at end of file diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/application-test.properties b/haobang-security-xdr/syslog-consumer/src/main/resources/application-test.properties index 13c29b2..4cfb03b 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/resources/application-test.properties +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/application-test.properties @@ -27,7 +27,7 @@ influxdb.retry.attempts=3 influxdb.retry.delay=1000 # Application Configuration -app.worker.threads=4 +app.worker.threads=8 app.max.queue.size=10000 app.metrics.enabled=true diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/application.properties b/haobang-security-xdr/syslog-consumer/src/main/resources/application.properties index 684d278..e4703f5 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/resources/application.properties +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/application.properties @@ -7,7 +7,7 @@ server.error.include-message=always server.error.include-binding-errors=always #run.environment: dev|test|pro -server.run.environment=dev +server.run.environment=pro # Syslog Server Configuration @@ -17,31 +17,33 @@ syslog.max.frame.length=65536 syslog.buffer.size=1000 # InfluxDB 2.7 Configuration -influxdb.url=http://192.168.222.131:8086 -influxdb.token=3Tvu-IZWtaY03UDkbUDlufD0kxn85keo9LhYQcv2Cxk0LJmXqqHkNVrO664DbaJAYwoGI7UIg904KqZC7Q_ZFA== -influxdb.org=yelang +influxdb.url=http://192.168.4.26:8087 +influxdb.token=LFjXZyRxTf1V84oN-wwjhSjS4qIK-ZMoHzQJB67ir3qHNSBVJbMcTkPuNmM0cNxvzFEDWLYNzrz1VJKMitY5hw== +influxdb.org=influxdb influxdb.bucket=yelangbucket influxdb.batch.size=1000 influxdb.flush.interval=1000 influxdb.retry.attempts=3 influxdb.retry.delay=1000 - +# InfluxDB 2.7 ӳʱ +influxdb.connection.timeout=30s +influxdb.connection.read-timeout=30s +influxdb.connection.write-timeout=60s # Application Configuration -app.worker.threads=4 +app.worker.threads=8 app.max.queue.size=10000 app.metrics.enabled=true #database Configuration -spring.datasource.url=jdbc:postgresql://117.72.68.72:54329/ecosys +spring.datasource.url=jdbc:postgresql://192.168.4.26:5432/ecosys spring.datasource.username=postgres -spring.datasource.password=TnLanWaidYSwTSG5 +spring.datasource.password=caZ2TcmXNSW8L2Ap spring.datasource.driver-class-name=org.postgresql.Driver # mybatis Configuration mybatis.mapper-locations=classpath:mapper/*.xml mybatis.type-aliases-package=com.common.entity - #mybatis handler mybatis.configuration.default-statement-timeout=30 mybatis.configuration.default-fetch-size=1000 @@ -49,20 +51,20 @@ mybatis.configuration.map-underscore-to-camel-case=true mybatis.type-handlers-package=com.Modules.etl.handler mybatis-plus.configuration.map-underscore-to-camel-case=true mybatis-plus.type-handlers-package=com.Modules.etl.handler + # kafka Configuration -spring.kafka.consumer.bootstrap-servers=192.168.222.130:9092 -spring.kafka.consumer.group-id=test-group-app +spring.kafka.consumer.bootstrap-servers=192.168.4.26:9092 +spring.kafka.consumer.group-id=agent-syslog-group spring.kafka.consumer.auto-offset-reset=latest spring.kafka.consumer.enable-auto-commit=false spring.kafka.consumer.auto-commit-interval=1000 -spring.kafka.consumer.topic=test-topic +spring.kafka.consumer.topic=agent-syslog-topic + spring.kafka.consumer.max-poll-records=1000 spring.kafka.consumer.properties.max.poll.interval.ms=300000 spring.kafka.consumer.properties.session.timeout.ms=45000 -#spring.kafka.consumer.key-deserializer: org.apache.kafka.common.serialization.StringDeserializer -#spring.kafka.consumer.value-deserializer: org.apache.kafka.common.serialization.StringDeserializer spring.kafka.consumer.fetch-min-size= 1048576 spring.kafka.listener.ack-mode= manual spring.kafka.listener.concurrency= 2 @@ -81,21 +83,23 @@ partition.check.tomorrow.enabled=true partition.check.future.days=7 partition.auto.create=true -# -spring.redis.host=localhost + +# +spring.redis.host=192.168.4.26 spring.redis.port=6379 # 루û룬ʡԣ -spring.redis.password= +spring.redis.password=123456 spring.redis.database=0 -spring.redis.timeout=2000 +spring.redis.timeout=5000 +#spring.redis.password=${REDIS_PASSWORD:default_prod_password} -spring.redis.lettuce.pool.max-active=8 -spring.redis.lettuce.pool.max-wait=-1 -spring.redis.lettuce.pool.max-idle=8 -spring.redis.lettuce.pool.min-idle=0 -# ʱ϶̣ -spring.cache.redis.time-to-live=600000 +spring.redis.lettuce.pool.max-active=20 +spring.redis.lettuce.pool.max-wait=5000 +spring.redis.lettuce.pool.max-idle=10 +spring.redis.lettuce.pool.min-idle=5 +# ʱϳ +spring.cache.redis.time-to-live=3600000 # Ӧô app.processor.thread-pool.core-pool-size=10 @@ -123,3 +127,25 @@ spring.elasticsearch.socket-timeout=30s etl.batch.page-size=1000 etl.batch.insert-batch-size=500 etl.schedule.cron=0 0 2 * * ? + + +# ============================================ +# HikariCP Connection Pool Configuration +# ============================================ +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.max-lifetime=900000 +spring.datasource.hikari.connection-test-query=SELECT 1 +spring.datasource.hikari.validation-timeout=5000 +spring.datasource.hikari.leak-detection-threshold=30000 +spring.datasource.hikari.pool-name=HikariPool-SyslogConsumer +spring.datasource.hikari.auto-commit=false +spring.datasource.hikari.schema=public + + +# +analysis.realtime.enabled= true +# 룩 - Ĭ10 +analysis.realtime.check-interval-seconds: 10 \ No newline at end of file diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisAnalysisRuleMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisAnalysisRuleMapper.xml new file mode 100644 index 0000000..fbe747b --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisAnalysisRuleMapper.xml @@ -0,0 +1,81 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + rule_id, create_user, rule_name, analysis_method, run_mode, rule_output, + organization, task_status, create_dept, del_flag, create_time, update_time, + create_by, update_by, remark, tenant_id, rule_desc, rule_type, rule_status, + rule_content, rule_expression, priority, tags, version, subset_id + + + + + + + + + + + + + + UPDATE analysis_analysis_rule + SET task_status = #{taskStatus}, + update_time = NOW() + + ,update_by = #{updateBy} + + WHERE rule_id = #{ruleId, jdbcType=OTHER}::uuid + + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisFieldMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisFieldMapper.xml new file mode 100644 index 0000000..9416648 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisFieldMapper.xml @@ -0,0 +1,72 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, rule_id, type, data_source, database, table_name, table_alias, + column_name, column_desc, data_type, fn, arguments, placeholder, + base_type, category_id, create_dept, del_flag, create_time, + update_time, create_by, update_by, remark, tenant_id,alarm_column_name,alarm_column_desc + + + + + + + + + + + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisFilterMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisFilterMapper.xml new file mode 100644 index 0000000..b8f9dd6 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisFilterMapper.xml @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, rule_id, data_source, database, table_name, table_alias, + column_name, column_desc, data_type, fn, arguments, operator, value, + base_type, category_id, create_dept, del_flag, create_time, + update_time, create_by, update_by, remark, tenant_id, cond_id, seq_num + + + + + + + + + + + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByColumnMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByColumnMapper.xml new file mode 100644 index 0000000..64048ae --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByColumnMapper.xml @@ -0,0 +1,80 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, group_by_id, data_source, database, table_name, table_alias, + column_name, column_desc, data_type, base_type, category_id, + create_dept, del_flag, create_time, update_time, create_by, update_by, + remark, tenant_id, rule_id, group_id, field_id, sort + + + + + + + + + + + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByHavingMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByHavingMapper.xml new file mode 100644 index 0000000..ca3cdb1 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByHavingMapper.xml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, group_by_id, data_source, database, table_name, table_alias, + column_name, column_desc, data_type, fn, arguments, operator, value, + base_type, category_id, create_dept, del_flag, create_time, + update_time, create_by, update_by, remark, tenant_id + + + + + + + + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByMapper.xml new file mode 100644 index 0000000..1e426e8 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByMapper.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + id, rule_id, group_type, window_type, + create_dept, del_flag, create_time, update_time, create_by, update_by, + remark, tenant_id + + + + + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByWindowMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByWindowMapper.xml new file mode 100644 index 0000000..78bb10c --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisGroupByWindowMapper.xml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, group_by_id, window_type, + tumble_window_time_type, tumble_window_size, tumble_window_size_unit, + hop_window_time_type, hop_window_size, hop_window_size_unit, + hop_window_slide, hop_window_slide_unit, hop_window_alarm_once_per_window, + session_window_time_type, session_window_size, session_window_size_unit, + create_dept, del_flag, create_time, update_time, create_by, update_by, + remark, tenant_id + + + + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisTaskHistoryMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisTaskHistoryMapper.xml new file mode 100644 index 0000000..10eb841 --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisTaskHistoryMapper.xml @@ -0,0 +1,83 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + id, rule_id, start_time, end_time, duration_time, progress_percent, + input_count, output_count, status, create_dept, del_flag, + create_time, update_time, create_by, update_by, remark, tenant_id + + + + + INSERT INTO analysis_task_history ( + + ) VALUES ( + #{id}, #{ruleId}::uuid, #{startTime}, #{endTime}, #{durationTime}, #{progressPercent}, + #{inputCount}, #{outputCount}, #{status}, #{createDept}, #{delFlag}, + #{createTime}, #{updateTime}, #{createBy}, #{updateBy}, #{remark}, #{tenantId} + ) + + + + + UPDATE analysis_task_history + SET end_time = #{endTime}, + duration_time = #{durationTime}, + progress_percent = #{progressPercent}, + input_count = #{inputCount}, + output_count = #{outputCount}, + status = #{status}, + update_time = NOW() + + ,remark = #{remark} + + WHERE id = #{id} + + + + + + + + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisWhereConditionMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisWhereConditionMapper.xml new file mode 100644 index 0000000..541bebb --- /dev/null +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AnalysisWhereConditionMapper.xml @@ -0,0 +1,58 @@ + + + + + + + + + + + + + + + + + + + + + + cond_id, rule_id, logical_op, seq_num, parent_cond_id, + create_dept, del_flag, create_time, update_time, + create_by, update_by, remark, tenant_id + + + + + + + + + + + + diff --git a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AppLogMapper.xml b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AppLogMapper.xml index 1feee39..54ec894 100644 --- a/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AppLogMapper.xml +++ b/haobang-security-xdr/syslog-consumer/src/main/resources/mapper/AppLogMapper.xml @@ -1,6 +1,6 @@ - + INSERT INTO applog (