初次提交代码

This commit is contained in:
2026-01-11 15:33:22 +08:00
commit 6603c6f4a1
455 changed files with 32175 additions and 0 deletions

View File

@@ -0,0 +1,80 @@
package com.common.mapper;
import com.common.entity.Alarm;
import org.apache.ibatis.annotations.*;
import java.util.List;
@Mapper
public interface AlarmMapper {
/**
* 批量插入告警数据
*/
@Insert({"<script>",
"INSERT INTO alarm (",
"id, created_at, alarm_name, alarm_level, alarm_type, ",
"alarm_major_type, alarm_minor_type,alarm_area_id, attack_ip, victim_ip, ",
"device_id, comment,origin_log_ids,log_start_at, log_end_at, http_status, ",
"attack_port, victim_port, attack_method, etl_time, log_count, ",
"attack_chain_phase, disposition_advice, attack_direction, ",
"judged_state, disposed_state, attack_result, fall, payload, " ,
"http_req_header , http_req_body,http_resp_header , http_resp_body ",
") VALUES ",
"<foreach collection='list' item='item' separator=','>",
"( #{item.id}, #{item.createdAt}, #{item.alarmName}, #{item.alarmLevel}, ",
"#{item.alarmType}, #{item.alarmMajorType}, #{item.alarmMinorType}, #{item.alarmAreaId}, ",
"#{item.attackIp, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.victimIp, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.deviceId, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ",
"#{item.comment}, " ,
"#{item.originLogIds, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.logStartAt}, #{item.logEndAt}, #{item.httpStatus}, ",
"#{item.attackPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ",
"#{item.victimPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ",
"#{item.attackMethod}, #{item.etlTime}, #{item.logCount}, ",
"#{item.attackChainPhase, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ",
"#{item.dispositionAdvice}, #{item.attackDirection}, ",
"#{item.judgedState}, #{item.disposedState}, #{item.attackResult}, #{item.fall}, ",
"#{item.payload}, ",
"#{item.httpReqHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.httpReqBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.httpRespHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.httpRespBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler})",
"</foreach>",
"</script>"})
void batchInsert(@Param("list") List<Alarm> alarmList);
/**
* 单条插入告警数据
*/
@Insert("INSERT INTO alarm (" +
"id, created_at, alarm_name, alarm_level, alarm_type, " +
"alarm_major_type, alarm_minor_type,alarm_area_id, attack_ip, victim_ip, " +
"device_id, comment,origin_log_ids, log_start_at, log_end_at, http_status, " +
"attack_port, victim_port, attack_method, etl_time, log_count, " +
"attack_chain_phase, disposition_advice, attack_direction, " +
"judged_state, disposed_state, attack_result, fall, payload, " +
"http_req_header , http_req_body,http_resp_header , http_resp_body " +
") VALUES (" +
"#{id}, #{createdAt}, #{alarmName}, #{alarmLevel}, " +
"#{alarmType}, #{alarmMajorType}, #{alarmMinorType}, #{alarmAreaId}, " +
"#{attackIp, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{victimIp, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{deviceId, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " +
"#{comment}, " +
"#{originLogIds, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{logStartAt}, #{logEndAt}, #{httpStatus}, " +
"#{attackPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " +
"#{victimPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " +
"#{attackMethod}, #{etlTime}, #{logCount}, " +
"#{attackChainPhase, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " +
"#{dispositionAdvice}, #{attackDirection}, " +
"#{judgedState}, #{disposedState}, #{attackResult}, #{fall}, #{payload}, " +
"#{httpReqHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{httpReqBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{httpRespHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{httpRespBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler} " +
")")
void insert(Alarm alarm);
}

View File

@@ -0,0 +1,84 @@
package com.common.mapper;
import com.common.entity.Alarm;
import com.common.entity.AlarmVisit;
import org.apache.ibatis.annotations.Insert;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface AlarmVisitMapper {
/**
* 批量插入告警数据
*/
@Insert({"<script>",
"INSERT INTO alarm_visit (",
"id, created_at, alarm_name, alarm_level, alarm_type, ",
"alarm_major_type, alarm_minor_type,alarm_area_id, attack_ip, victim_ip, ",
"device_id, comment,origin_log_ids,log_start_at, log_end_at, http_status, ",
"attack_port, victim_port, attack_method, etl_time, log_count, ",
"attack_chain_phase, disposition_advice, attack_direction, ",
"judged_state, disposed_state, attack_result, fall, payload, " ,
"http_req_header , http_req_body,http_resp_header , http_resp_body ",
") VALUES ",
"<foreach collection='list' item='item' separator=','>",
"(#{item.id}, #{item.createdAt}, #{item.alarmName}, #{item.alarmLevel}, ",
"#{item.alarmType}, #{item.alarmMajorType}, #{item.alarmMinorType}, #{item.alarmAreaId}, ",
"#{item.attackIp, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.victimIp, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.deviceId, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ",
"#{item.comment}, " ,
"#{item.originLogIds, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.logStartAt}, #{item.logEndAt}, #{item.httpStatus}, ",
"#{item.attackPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ",
"#{item.victimPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ",
"#{item.attackMethod}, #{item.etlTime}, #{item.logCount}, ",
"#{item.attackChainPhase, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, ",
"#{item.dispositionAdvice}, #{item.attackDirection}, ",
"#{item.judgedState}, #{item.disposedState}, #{item.attackResult}, #{item.fall}, ",
"#{item.payload}, ",
"#{item.httpReqHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.httpReqBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.httpRespHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, ",
"#{item.httpRespBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}) ",
"</foreach>",
"</script>"})
void batchInsert(@Param("list") List<AlarmVisit> alarmList);
/**
* 单条插入告警数据
*/
@Insert("INSERT INTO alarm_visit (" +
"id, created_at, alarm_name, alarm_level, alarm_type, " +
"alarm_major_type, alarm_minor_type,alarm_area_id, attack_ip, victim_ip, " +
"device_id, comment,origin_log_ids, log_start_at, log_end_at, http_status, " +
"attack_port, victim_port, attack_method, etl_time, log_count, " +
"attack_chain_phase, disposition_advice, attack_direction, " +
"judged_state, disposed_state, attack_result, fall, payload, " +
"http_req_header , http_req_body,http_resp_header , http_resp_body " +
") VALUES (" +
"#{id}, #{createdAt}, #{alarmName}, #{alarmLevel}, " +
"#{alarmType}, #{alarmMajorType}, #{alarmMinorType}, #{alarmAreaId}, " +
"#{attackIp, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{victimIp, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{deviceId, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " +
"#{comment}, " +
"#{originLogIds, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{logStartAt}, #{logEndAt}, #{httpStatus}, " +
"#{attackPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " +
"#{victimPort, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " +
"#{attackMethod}, #{etlTime}, #{logCount}, " +
"#{attackChainPhase, typeHandler=com.Modules.etl.handler.ArrayIntegerTypeHandler}, " +
"#{dispositionAdvice}, #{attackDirection}, " +
"#{judgedState}, #{disposedState}, #{attackResult}, #{fall}, #{payload}, " +
"#{httpReqHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{httpReqBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{httpRespHeaders, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler}, " +
"#{httpRespBodys, typeHandler=com.Modules.etl.handler.ArrayStringTypeHandler} " +
")")
void insert(AlarmVisit alarm);
}

View File

@@ -0,0 +1,41 @@
package com.common.mapper;
import com.common.entity.AppLogEntity;
import org.apache.ibatis.annotations.*;
import java.time.LocalDateTime;
import java.util.List;
@Mapper
public interface AppLogMapper {
/**
* 插入日志
*/
@Insert("INSERT INTO applog (es_index, es_type, es_id, es_score, dt_time, collect_time, log_type, " +
"trace_id, method, app_name, ip, class_name, env, content, thread_name, log_level, seq, " +
"indexed_at, log_date, created_at, updated_at) " +
"VALUES (#{esIndex}, #{esType}, #{esId}, #{esScore}, #{dtTime}, #{collectTime}, #{logType}, " +
"#{traceId}, #{method}, #{appName}, #{ip}, #{className}, #{env}, #{content}, #{threadName}, " +
"#{logLevel}, #{seq}, #{indexedAt}, #{logDate}, #{createdAt}, #{updatedAt})")
@Options(useGeneratedKeys = true, keyProperty = "id")
int insert(AppLogEntity appLog);
/**
* 批量插入
*/
int batchInsert(@Param("list") List<AppLogEntity> appLogs);
/**
* 根据ES ID检查是否存在
*/
@Select("SELECT COUNT(1) FROM applog WHERE es_id = #{esId}")
boolean existsByEsId(String esId);
/**
* 根据ES ID和索引检查是否存在
*/
@Select("SELECT COUNT(1) FROM applog WHERE es_id = #{esId} AND es_index = #{esIndex}")
boolean existsByEsIdAndIndex(@Param("esId") String esId, @Param("esIndex") String esIndex);
}

View File

@@ -0,0 +1,192 @@
package com.common.mapper;
import com.common.entity.DeviceCollectTask;
import org.apache.ibatis.annotations.*;
import java.util.List;
import java.util.Map;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Update;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import java.time.LocalDateTime;
import java.util.List;
@Mapper
public interface DeviceCollectTaskMapper extends BaseMapper<DeviceCollectTask>{
/**
* 根据ID查询采集任务
*/
@Select("SELECT * FROM device_collect_task WHERE id = #{id}")
DeviceCollectTask selectById(Integer id);
/**
* 查询所有采集任务
*/
@Select("SELECT * FROM device_collect_task")
List<DeviceCollectTask> selectAll();
/**
* 根据设备ID查询采集任务
*/
@Select("SELECT * FROM device_collect_task WHERE device_id = #{deviceId}")
List<DeviceCollectTask> selectByDeviceId(Integer deviceId);
/**
* 根据方法类型查询采集任务
*/
@Select("SELECT * FROM device_collect_task WHERE method = #{method}")
List<DeviceCollectTask> selectByMethod(Integer method);
/**
* 根据任务名称模糊查询
*/
@Select("SELECT * FROM device_collect_task WHERE task_name LIKE CONCAT('%', #{taskName}, '%')")
List<DeviceCollectTask> selectByTaskNameLike(String taskName);
/**
* 查询成功的采集任务
*/
@Select("SELECT * FROM device_collect_task WHERE last_success_time IS NOT NULL")
List<DeviceCollectTask> selectSuccessTasks();
/**
* 查询失败的采集任务
*/
@Select("SELECT * FROM device_collect_task WHERE last_failed_time IS NOT NULL")
List<DeviceCollectTask> selectFailedTasks();
/**
* 多条件组合查询
*/
List<DeviceCollectTask> selectByCondition(DeviceCollectTask condition);
/**
* 插入采集任务
*/
@Insert("INSERT INTO device_collect_task (created_at, updated_at, device_id, method, task_name, " +
"first_time, last_success_time, last_failed_time, detail_id, epm, epm_peak, " +
"process_architecture, task_count, recent_discover_time, epm_upper_limit) " +
"VALUES (NOW(), NOW(), #{deviceId}, #{method}, #{taskName}, #{firstTime}, " +
"#{lastSuccessTime}, #{lastFailedTime}, #{detailId}, #{epm}, #{epmPeak}, " +
"#{processArchitecture}, #{taskCount}, #{recentDiscoverTime}, #{epmUpperLimit})")
@Options(useGeneratedKeys = true, keyProperty = "id")
int insert(DeviceCollectTask task);
/**
* 更新采集任务
*/
@Update("UPDATE device_collect_task SET " +
"updated_at = NOW(), " +
"device_id = #{deviceId}, " +
"method = #{method}, " +
"task_name = #{taskName}, " +
"first_time = #{firstTime}, " +
"last_success_time = #{lastSuccessTime}, " +
"last_failed_time = #{lastFailedTime}, " +
"detail_id = #{detailId}, " +
"epm = #{epm}, " +
"epm_peak = #{epmPeak}, " +
"process_architecture = #{processArchitecture}, " +
"task_count = #{taskCount}, " +
"recent_discover_time = #{recentDiscoverTime}, " +
"epm_upper_limit = #{epmUpperLimit} " +
"WHERE id = #{id}")
int update(DeviceCollectTask task);
/**
* 删除采集任务
*/
@Delete("DELETE FROM device_collect_task WHERE id = #{id}")
int deleteById(Integer id);
/**
* 根据设备ID删除采集任务
*/
@Delete("DELETE FROM device_collect_task WHERE device_id = #{deviceId}")
int deleteByDeviceId(Integer deviceId);
/**
* 更新任务成功状态
*/
@Update("UPDATE device_collect_task SET " +
"last_success_time = NOW(), " +
"updated_at = NOW(), " +
"task_count = COALESCE(task_count, 0) + 1 " +
"WHERE id = #{id}")
int updateSuccessStatus(Integer id);
/**
* 更新任务失败状态
*/
@Update("UPDATE device_collect_task SET " +
"last_failed_time = NOW(), " +
"updated_at = NOW() " +
"WHERE id = #{id}")
int updateFailedStatus(Integer id);
/**
* 更新EPM指标
*/
@Update("UPDATE device_collect_task SET " +
"epm = #{epm}, " +
"epm_peak = GREATEST(COALESCE(epm_peak, 0), #{epm}), " +
"updated_at = NOW() " +
"WHERE id = #{id}")
int updateEpm(@Param("id") Integer id, @Param("epm") Integer epm);
/**
* 统计设备采集任务数量
*/
@Select("SELECT COUNT(*) FROM device_collect_task WHERE device_id = #{deviceId}")
int countByDeviceId(Integer deviceId);
/**
* 获取设备的最新采集任务
*/
@Select("SELECT * FROM device_collect_task WHERE device_id = #{deviceId} ORDER BY updated_at DESC LIMIT 1")
DeviceCollectTask selectLatestByDeviceId(Integer deviceId);
/**
* 批量更新设备采集任务的时间
*/
@Update("<script>" +
"<foreach collection='tasks' item='task' separator=';'>" +
"UPDATE device_collect_task SET " +
" first_time = CASE " +
" WHEN first_time IS NULL AND #{task.firstTime}::TIMESTAMP IS NOT NULL THEN #{task.firstTime}::TIMESTAMP " +
" ELSE first_time " +
" END, " +
" last_success_time = #{task.lastSuccessTime}::TIMESTAMP, " +
" last_failed_time = #{task.lastFailedTime}::TIMESTAMP, " +
" updated_at = #{task.updatedAt}::TIMESTAMP " +
"WHERE id = #{task.id}" +
"</foreach>" +
"</script>")
int batchUpdateTimes(@Param("tasks") List<DeviceCollectTask> tasks);
/**
* 更新单个任务的时间
*/
@Update("UPDATE device_collect_task " +
"SET first_time = CASE " +
" WHEN first_time IS NULL AND #{firstTime}::TIMESTAMP IS NOT NULL THEN #{firstTime}::TIMESTAMP " +
" ELSE first_time " +
" END, " +
" last_success_time = #{lastSuccessTime}::TIMESTAMP, " +
" last_failed_time = #{lastFailTime}::TIMESTAMP, " +
" updated_at = #{updateTime}::TIMESTAMP " +
"WHERE id = #{deviceCollectId}")
int updateTaskTime(@Param("deviceCollectId") String deviceCollectId,
@Param("firstTime") LocalDateTime firstTime,
@Param("lastSuccessTime") LocalDateTime lastSuccessTime,
@Param("lastFailTime") LocalDateTime lastFailTime,
@Param("updateTime") LocalDateTime updateTime);
/**
* 查询所有设备任务
*/
@Select("SELECT * FROM device_collect_task")
List<DeviceCollectTask> selectAllTasks();
}

View File

@@ -0,0 +1,124 @@
package com.common.mapper;
import com.common.entity.DeviceCollectTaskTime;
import com.common.entity.DeviceReceiveLog;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
@Mapper
public interface DeviceReceiveLogMapper {
/**
* 插入单条记录
*/
int insert(DeviceReceiveLog log);
/**
* 批量插入记录
*/
int batchInsert(@Param("list") List<DeviceReceiveLog> logs);
/**
* 根据ID查询
*/
DeviceReceiveLog selectById(@Param("id") Long id);
/**
* 根据设备ID查询
*/
List<DeviceReceiveLog> selectByDeviceId(@Param("deviceId") Integer deviceId);
/**
* 根据采集探针ID查询
*/
List<DeviceReceiveLog> selectByCollectId(@Param("collectId") Integer collectId);
/**
* 根据IP地址查询
*/
List<DeviceReceiveLog> selectByDeviceIp(@Param("deviceIp") String deviceIp);
/**
* 根据时间范围查询
*/
List<DeviceReceiveLog> selectByTimeRange(
@Param("startTime") LocalDateTime startTime,
@Param("endTime") LocalDateTime endTime);
/**
* 多条件组合查询
*/
List<DeviceReceiveLog> selectByCondition(DeviceReceiveLog condition);
/**
* 统计设备接收日志数量
*/
Long countByCondition(DeviceReceiveLog condition);
/**
* 根据时间范围删除旧数据
*/
int deleteByTimeRange(
@Param("startTime") LocalDateTime startTime,
@Param("endTime") LocalDateTime endTime);
/**
* 获取最近N条记录
*/
List<DeviceReceiveLog> selectRecent(@Param("limit") Integer limit);
/**
* 按设备分组统计日志数量
*/
List<Map<String, Object>> countByDeviceGroup();
/**
* 查询当天每个设备的最新成功时间
*/
@Select("SELECT device_collect_id, MAX(created_at) AS last_success_time " +
"FROM device_receive_log " +
"WHERE push_success = true " +
"AND created_at >= CURRENT_DATE " +
"GROUP BY device_collect_id")
List<DeviceCollectTaskTime> selectDailySuccessTimes();
/**
* 查询当天每个设备的最新失败时间
*/
@Select("SELECT device_collect_id, MAX(created_at) AS last_fail_time " +
"FROM device_receive_log " +
"WHERE push_success = false " +
"AND created_at >= CURRENT_DATE " +
"GROUP BY device_collect_id")
List<DeviceCollectTaskTime> selectDailyFailTimes();
/**
* 查询每个设备的首次成功时间(历史最早)
*/
@Select("SELECT device_collect_id, MIN(created_at) AS first_success_time " +
"FROM device_receive_log " +
"WHERE push_success = true " +
"GROUP BY device_collect_id")
List<DeviceCollectTaskTime> selectFirstSuccessTimes();
/**
* 查询指定时间范围内的成功记录
*/
@Select("SELECT device_collect_id, MIN(created_at) AS first_time, " +
"MAX(created_at) AS last_success_time " +
"FROM device_receive_log " +
"WHERE push_success = true " +
"AND created_at >= #{startTime} " +
"AND created_at < #{endTime} " +
"GROUP BY device_collect_id")
List<DeviceCollectTaskTime> selectSuccessTimesByRange(
@Param("startTime") LocalDateTime startTime,
@Param("endTime") LocalDateTime endTime);
}

View File

@@ -0,0 +1,70 @@
package com.common.mapper;
import com.common.entity.DmColumn;
import org.apache.ibatis.annotations.*;
import java.util.List;
import java.util.Map;
@Mapper
public interface DmColumnMapper {
// 查询所有列
@Select("SELECT * FROM dm_column WHERE deleted_at IS NULL")
List<DmColumn> findAll();
// 根据ID查询
// @Select("SELECT * FROM dm_column WHERE id = #{id} AND deleted_at IS NULL")
DmColumn findById(Long id);
// 根据名称查询
@Select("SELECT * FROM dm_column WHERE name = #{name} AND deleted_at IS NULL")
DmColumn findByName(String name);
// 根据显示名称模糊查询
@Select("SELECT * FROM dm_column WHERE display_name LIKE CONCAT('%', #{displayName}, '%') AND deleted_at IS NULL")
List<DmColumn> findByDisplayName(String displayName);
// 根据是否内置查询
@Select("SELECT * FROM dm_column WHERE is_built_in = #{isBuiltIn} AND deleted_at IS NULL")
List<DmColumn> findByIsBuiltIn(Boolean isBuiltIn);
// 插入数据
@Insert("INSERT INTO dm_column (created_at, updated_at, name, display_name, storage_data_type, " +
"business_data_type, is_built_in, is_hidden, is_not_normalizable, is_required, category_id, " +
"custom_asset_category_id, is_virtual, table_id, asset_table_id, column_set_id, base_type, " +
"user_task_id, created_by_id, create_dept, create_by, create_time, update_by, update_time) " +
"VALUES (NOW(), NOW(), #{name}, #{displayName}, #{storageDataType}, #{businessDataType}, " +
"#{isBuiltIn}, #{isHidden}, #{isNotNormalizable}, #{isRequired}, #{categoryId}, " +
"#{customAssetCategoryId}, #{isVirtual}, #{tableId}, #{assetTableId}, #{columnSetId}, " +
"#{baseType}, #{userTaskId}, #{createdById}, #{createDept}, #{createBy}, #{createTime}, " +
"#{updateBy}, #{updateTime})")
@Options(useGeneratedKeys = true, keyProperty = "id")
int insert(DmColumn dmColumn);
// 批量插入
@Insert("<script>" +
"INSERT INTO dm_column (created_at, updated_at, name, display_name, is_built_in, is_hidden) " +
"VALUES " +
"<foreach collection='list' item='item' separator=','>" +
"(NOW(), NOW(), #{item.name}, #{item.displayName}, #{item.isBuiltIn}, #{item.isHidden})" +
"</foreach>" +
"</script>")
int batchInsert(List<DmColumn> dmColumns);
// 查询条件构造使用XML方式
List<DmColumn> findByCondition(DmColumn condition);
// 统计数量
@Select("SELECT COUNT(*) FROM dm_column WHERE deleted_at IS NULL")
Long count();
/**
* 查询所有数据标准化字段
* @param
* @return 数据标准化字段
*/
List<Map<String, Object>> selectAllNormal();
}

View File

@@ -0,0 +1,46 @@
package com.common.mapper;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import com.common.entity.DmNormalizeRule;
import java.util.List;
import java.util.Map;
@Mapper
public interface DmNormalizeRuleMapper {
/**
* 根据ID查询数据标准化规则
* @param id 主键ID
* @return 数据标准化规则对象
*/
DmNormalizeRule selectById(@Param("id") Long id);
/**
* 根据Device ID查询数据标准化规则
* @param id 主键device ID
* @return 数据标准化规则对象
*/
List<Map<String, Object>> selectByDeviceId(@Param("id") Long id);
/**
* 根据ID更新数据标准化规则
* @param rule 数据标准化规则对象
* @return 更新影响的行数
*/
int updateById(DmNormalizeRule rule);
/**
* 根据ID和租户ID查询带租户隔离
* @param id 主键ID
* @param tenantId 租户ID
* @return 数据标准化规则对象
*/
DmNormalizeRule selectByIdAndTenant(@Param("id") Long id, @Param("tenantId") String tenantId);
/**
* 根据ID和租户ID更新带租户隔离
* @param rule 数据标准化规则对象
* @return 更新影响的行数
*/
int updateByIdAndTenant(DmNormalizeRule rule);
}

View File

@@ -0,0 +1,18 @@
// SecExceptionAlgorithmMapper.java
package com.common.mapper;
import com.common.entity.SecExceptionAlgorithm;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Select;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface SecExceptionAlgorithmMapper {
@Select("SELECT * FROM sec_exception_algorithm WHERE status = 1 AND del_flag = '0'")
List<SecExceptionAlgorithm> findEnabledAlgorithms();
@Select("SELECT * FROM sec_exception_algorithm WHERE id = #{id}")
SecExceptionAlgorithm findById(@Param("id") Long id);
}

View File

@@ -0,0 +1,88 @@
package com.common.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.common.entity.SyslogNonNormalMessage;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
/**
* 非标日志Mapper接口
*/
@Mapper
public interface SyslogNonNormalMessageMapper extends BaseMapper<SyslogNonNormalMessage> {
/**
* 批量插入非标日志
* @param list 日志列表
* @return 插入数量
*/
int batchInsert(@Param("list") List<SyslogNonNormalMessage> list);
/**
* 根据时间段查询非标日志
* @param startTime 开始时间
* @param endTime 结束时间
* @return 日志列表
*/
@Select("SELECT * FROM syslog_non_normal_message WHERE log_time BETWEEN #{startTime} AND #{endTime} AND del_flag = '0'")
List<SyslogNonNormalMessage> findByTimeRange(@Param("startTime") LocalDateTime startTime,
@Param("endTime") LocalDateTime endTime);
/**
* 根据设备ID和状态统计
* @param deviceId 设备ID
* @param ruleResult 规则结果
* @return 统计结果
*/
@Select("SELECT COUNT(*) as count FROM syslog_non_normal_message " +
"WHERE device_id = #{deviceId} AND rule_result = #{ruleResult} AND del_flag = '0'")
Map<String, Object> countByDeviceAndResult(@Param("deviceId") Integer deviceId,
@Param("ruleResult") String ruleResult);
/**
* 逻辑删除非标日志
* @param id 日志ID
* @return 更新数量
*/
@Select("UPDATE syslog_non_normal_message SET del_flag = '1', update_time = NOW() WHERE id = #{id}")
int logicalDelete(@Param("id") String id);
/**
* 根据ID和创建时间批量查询非标日志
* @param ids ID列表
* @param createdAts 创建时间列表
* @return 非标日志列表
*/
List<SyslogNonNormalMessage> getMessagesByIdsAndCreatedAts(
@Param("ids") List<String> ids,
@Param("createdAts") List<String> createdAts);
/**
* 根据ID列表批量查询非标日志
* @param ids ID列表
* @return 非标日志列表
*/
List<SyslogNonNormalMessage> getMessagesByIds(@Param("ids") List<String> ids);
/**
* 批量更新del_flag字段
* @param messages 需要更新的非标日志列表
* @return 更新记录数
*/
int updateBatchDelFlag(@Param("messages") List<SyslogNonNormalMessage> messages);
/**
* 批量更新记录
* @param messages 需要更新的非标日志列表
* @return 更新记录数
*/
int updateBatch(@Param("messages") List<SyslogNonNormalMessage> messages);
}

View File

@@ -0,0 +1,84 @@
package com.common.mapper;
import com.common.entity.GroupedSyslogData;
import com.common.entity.SyslogNormalData;
import com.common.entity.SyslogNormalAlarm;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
@Mapper
public interface SyslogNormalAlarmMapper {
/**
* 动态插入数据 - 使用 Map
*/
int insertDynamic(@Param("dataMap") Map<String, Object> dataMap);
/**
* 动态插入数据 - 使用实体类
*/
int insertByEntity(SyslogNormalAlarm entity);
/**
* 批量插入数据
*/
int batchInsert(@Param("dataList") List<Map<String, Object>> dataList);
/**
* 统计24小时内分组数据量
*/
@Select("SELECT count(1) as group_count from ( select " +
"to_char(log_time, 'YYYYMMDD') || '_' || " +
"dest_ip || '_' || " +
"origin_event_name " +
"FROM syslog_normal_alarm " +
"WHERE log_time >= #{startTime} AND log_time < #{endTime} " +
"AND event_level >= 1 AND src_ip NOT IN ('127.0.0.1', '127.0.0.2') " +
"GROUP BY to_char(log_time, 'YYYYMMDD'),src_ip, dest_ip, origin_event_name )" )
Long count24HoursGroupedData(@Param("startTime") LocalDateTime startTime,
@Param("endTime") LocalDateTime endTime);
/**
* 分页查询分组数据
*/
@Select("SELECT " +
"to_char(log_time, 'YYYYMMDD') as log_date, " +
" MIN(origin_event_type) AS first_event_type, " +
"ARRAY_AGG(DISTINCT host(src_ip)::text) as attack_ips, " +
"origin_event_name, " +
"MIN(log_time) as min_log_time, " +
"MAX(log_time) as max_log_time, " +
"COUNT(1) as log_count, " +
"ARRAY_AGG(DISTINCT host(dest_ip)::text) as victim_ips, " +
"ARRAY_AGG(DISTINCT device_id) as device_ids, " +
"ARRAY_AGG(DISTINCT id) as origin_log_ids, " +
"MAX(event_level) as max_event_level, " +
"MAX(origin_event_type) as event_type, " +
"MIN(event_type) as min_event_type, " +
"ARRAY_AGG(DISTINCT src_port::int4) as attack_ports, " +
"ARRAY_AGG(DISTINCT dest_port::int4) as victim_ports, " +
"ARRAY_AGG(DISTINCT http_resp_codes::text) as http_status_codes, " +
"ARRAY_AGG(DISTINCT payload::BYTEA) as payload_samples, " +
"ARRAY_AGG(DISTINCT http_req_header) as httpReqHeaders, " +
"ARRAY_AGG(DISTINCT http_req_body) as httpReqBodys, " +
"ARRAY_AGG(DISTINCT http_resp_header) as httpRespHeaders, " +
"ARRAY_AGG(DISTINCT http_resp_body) as httpRespBodys, " +
"STRING_AGG(DISTINCT COALESCE(host(dest_ip)::text, ''), ',') as victim_ips_str " +
"FROM syslog_normal_alarm " +
"WHERE log_time >= #{startTime} AND log_time < #{endTime} " +
"AND event_level >= 1 AND src_ip NOT IN ('127.0.0.1', '127.0.0.2') " +
"GROUP BY to_char(log_time, 'YYYYMMDD'), src_ip, dest_ip, origin_event_name " +
"ORDER BY log_date, dest_ip, origin_event_name " +
"LIMIT #{pageSize} OFFSET #{offset}")
List<GroupedSyslogData> select24HoursGroupedDataByPage(
@Param("startTime") LocalDateTime startTime,
@Param("endTime") LocalDateTime endTime,
@Param("offset") int offset,
@Param("pageSize") int pageSize);
}

View File

@@ -0,0 +1,101 @@
package com.common.mapper;
import com.common.entity.GroupedSyslogData;
import com.common.entity.SyslogNormalData;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Map;
import org.apache.ibatis.annotations.Select;
import java.time.LocalDateTime;
import java.util.List;
@Mapper
public interface SyslogNormalDataMapper {
/**
* 动态插入数据 - 使用 Map
*/
int insertDynamic(@Param("dataMap") Map<String, Object> dataMap);
/**
* 动态插入数据 - 使用实体类
*/
int insertByEntity(SyslogNormalData entity);
/**
* 批量插入数据
*/
int batchInsert(@Param("dataList") java.util.List<Map<String, Object>> dataList);
/**
* 统计24小时内分组数据量
*/
@Select("SELECT count(1) as group_count from ( select " +
"to_char(log_time, 'YYYYMMDD') || '_' || " +
"dest_ip || '_' || " +
"origin_event_name " +
"FROM syslog_normal_data " +
"WHERE log_time >= #{startTime} AND log_time < #{endTime} " +
"AND http_resp_codes =200 and origin_event_type <> '' and origin_event_name='访问日志' AND src_ip NOT IN ('127.0.0.1', '127.0.0.2') " +
"GROUP BY to_char(log_time, 'YYYYMMDD'),src_ip, dest_ip, origin_event_name )" )
Long count24HoursGroupedData(@Param("startTime") LocalDateTime startTime,
@Param("endTime") LocalDateTime endTime);
/**
* 分页查询分组数据
*/
@Select("SELECT " +
"to_char(log_time, 'YYYYMMDD') as log_date, " +
" MIN(origin_event_type) AS first_event_type, " +
"ARRAY_AGG(DISTINCT host(src_ip)::text) as attack_ips, " +
"origin_event_name, " +
"MIN(log_time) as min_log_time, " +
"MAX(log_time) as max_log_time, " +
"COUNT(1) as log_count, " +
"ARRAY_AGG(DISTINCT host(dest_ip)::text) as victim_ips, " +
"ARRAY_AGG(DISTINCT device_id) as device_ids, " +
"ARRAY_AGG(DISTINCT id) as origin_log_ids, " +
"MAX(event_level) as max_event_level, " +
"MAX(origin_event_type) as event_type, " +
"MIN(event_type) as min_event_type, " +
"ARRAY_AGG(DISTINCT src_port::int4) as attack_ports, " +
"ARRAY_AGG(DISTINCT dest_port::int4) as victim_ports, " +
"ARRAY_AGG(DISTINCT http_resp_codes::text) as http_status_codes, " +
"ARRAY_AGG(DISTINCT payload::BYTEA) as payload_samples, " +
"ARRAY_AGG(DISTINCT http_req_header) as httpReqHeaders, " +
"ARRAY_AGG(DISTINCT http_req_body) as httpReqBodys, " +
"ARRAY_AGG(DISTINCT http_resp_header) as httpRespHeaders, " +
"ARRAY_AGG(DISTINCT http_resp_body) as httpRespBodys, " +
"STRING_AGG(DISTINCT COALESCE(host(dest_ip)::text, ''), ',') as victim_ips_str " +
"FROM syslog_normal_data " +
"WHERE log_time >= #{startTime} AND log_time < #{endTime} " +
"AND http_resp_codes =200 and origin_event_type <> '' and origin_event_name='访问日志' AND src_ip NOT IN ('127.0.0.1', '127.0.0.2') " +
"GROUP BY to_char(log_time, 'YYYYMMDD'), src_ip, dest_ip, origin_event_name " +
"ORDER BY log_date, dest_ip, origin_event_name " +
"LIMIT #{pageSize} OFFSET #{offset}")
List<GroupedSyslogData> select24HoursGroupedDataByPage(
@Param("startTime") LocalDateTime startTime,
@Param("endTime") LocalDateTime endTime,
@Param("offset") int offset,
@Param("pageSize") int pageSize);
/**
* 查询指定时间后的所需字段使用XML映射
* 注意参数类型保持为LocalDateTime但实际映射时使用类型处理器转换
*/
List<SyslogNormalData> findRequiredFieldsAfterTime(@Param("startTime") LocalDateTime startTime);
/**
* 分页查询所需字段
*/
List<SyslogNormalData> findRequiredFieldsByPage(
@Param("startTime") LocalDateTime startTime,
@Param("limit") int limit,
@Param("offset") int offset
);
@Select("SELECT * FROM syslog_normal_data WHERE created_at >= #{startTime, jdbcType=TIMESTAMP} ORDER BY created_at ASC")
List<SyslogNormalData> findAfterTime(@Param("startTime") LocalDateTime startTime);
@Select("SELECT * FROM syslog_normal_data WHERE id = #{id}")
SyslogNormalData findById(@Param("id") String id);
}

View File

@@ -0,0 +1,108 @@
package com.common.mapper;
import com.common.entity.XdrHoneypot;
import org.apache.ibatis.annotations.*;
import java.util.List;
/**
* XDR Honeypot Mapper 接口
*/
public interface XdrHoneypotMapper {
/**
* 插入单条记录
*/
@Insert("INSERT INTO xdr_honeypot (vcsource, dstart_time, dtime, risk_level, vcconnection, " +
"file_info, extra, vctype, agent_sn, agent_name, honeypot_id, honeypot_name, " +
"src_ip, src_port, src_mac, dest_ip, dest_port, proxy_ip, node) " +
"VALUES (#{vcsource}, #{dstartTime}, #{dtime}, #{riskLevel}, #{vcconnection}, " +
"#{fileInfo}, #{extra}, #{vctype}, #{agentSn}, #{agentName}, #{honeypotId}, #{honeypotName}, " +
"#{srcIp}, #{srcPort}, #{srcMac}, #{destIp}, #{destPort}, #{proxyIp}, #{node})")
@Options(useGeneratedKeys = true, keyProperty = "id")
int insert(XdrHoneypot xdrHoneypot);
/**
* 批量插入记录
*/
int batchInsert(List<XdrHoneypot> xdrHoneypotList);
/**
* 根据ID查询记录
*/
@Select("SELECT * FROM xdr_honeypot WHERE id = #{id}")
@Results({
@Result(property = "id", column = "id"),
@Result(property = "vcsource", column = "vcsource"),
@Result(property = "dstartTime", column = "dstart_time"),
@Result(property = "dtime", column = "dtime"),
@Result(property = "riskLevel", column = "risk_level"),
@Result(property = "vcconnection", column = "vcconnection"),
@Result(property = "fileInfo", column = "file_info"),
@Result(property = "extra", column = "extra"),
@Result(property = "vctype", column = "vctype"),
@Result(property = "agentSn", column = "agent_sn"),
@Result(property = "agentName", column = "agent_name"),
@Result(property = "honeypotId", column = "honeypot_id"),
@Result(property = "honeypotName", column = "honeypot_name"),
@Result(property = "srcIp", column = "src_ip"),
@Result(property = "srcPort", column = "src_port"),
@Result(property = "srcMac", column = "src_mac"),
@Result(property = "destIp", column = "dest_ip"),
@Result(property = "destPort", column = "dest_port"),
@Result(property = "proxyIp", column = "proxy_ip"),
@Result(property = "node", column = "node")
})
XdrHoneypot selectById(Long id);
/**
* 查询所有记录
*/
@Select("SELECT * FROM xdr_honeypot ORDER BY id DESC")
@Results({
@Result(property = "dstartTime", column = "dstart_time"),
@Result(property = "riskLevel", column = "risk_level"),
@Result(property = "fileInfo", column = "file_info"),
@Result(property = "agentSn", column = "agent_sn"),
@Result(property = "agentName", column = "agent_name"),
@Result(property = "honeypotId", column = "honeypot_id"),
@Result(property = "honeypotName", column = "honeypot_name"),
@Result(property = "srcIp", column = "src_ip"),
@Result(property = "srcPort", column = "src_port"),
@Result(property = "srcMac", column = "src_mac"),
@Result(property = "destIp", column = "dest_ip"),
@Result(property = "destPort", column = "dest_port"),
@Result(property = "proxyIp", column = "proxy_ip")
})
List<XdrHoneypot> selectAll();
/**
* 根据源IP查询记录
*/
@Select("SELECT * FROM xdr_honeypot WHERE src_ip = #{srcIp} ORDER BY dtime DESC")
List<XdrHoneypot> selectBySrcIp(String srcIp);
/**
* 根据风险等级查询记录
*/
@Select("SELECT * FROM xdr_honeypot WHERE risk_level = #{riskLevel} ORDER BY dtime DESC")
List<XdrHoneypot> selectByRiskLevel(String riskLevel);
/**
* 更新记录
*/
@Update("UPDATE xdr_honeypot SET risk_level = #{riskLevel}, extra = #{extra} WHERE id = #{id}")
int update(XdrHoneypot xdrHoneypot);
/**
* 根据ID删除记录
*/
@Delete("DELETE FROM xdr_honeypot WHERE id = #{id}")
int delete(Long id);
/**
* 统计总记录数
*/
@Select("SELECT COUNT(*) FROM xdr_honeypot")
long count();
}