提交 adc9b3bb 作者: 925993793@qq.com

事件服务功能开发以及调试

上级 3c59f720
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId> <artifactId>spring-boot-starter-parent</artifactId>
<version>2.3.5.RELEASE</version> <version>2.7.1</version>
<relativePath/> <!-- lookup parent from repository --> <relativePath/> <!-- lookup parent from repository -->
</parent> </parent>
<groupId>com.zzsn</groupId> <groupId>com.zzsn</groupId>
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
<description>Demo project for Spring Boot</description> <description>Demo project for Spring Boot</description>
<properties> <properties>
<java.version>1.8</java.version> <java.version>1.8</java.version>
<elasticsearch.version>7.8.1</elasticsearch.version>
</properties> </properties>
<dependencies> <dependencies>
<dependency> <dependency>
...@@ -63,11 +64,6 @@ ...@@ -63,11 +64,6 @@
<artifactId>fastjson2</artifactId> <artifactId>fastjson2</artifactId>
<version>2.0.25</version> <version>2.0.25</version>
</dependency> </dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.83</version>
</dependency>
<!-- hutool工具类--> <!-- hutool工具类-->
<dependency> <dependency>
...@@ -131,34 +127,16 @@ ...@@ -131,34 +127,16 @@
<artifactId>esdk-obs-java-bundle</artifactId> <artifactId>esdk-obs-java-bundle</artifactId>
<version>3.22.12</version> <version>3.22.12</version>
</dependency> </dependency>
<dependency>
<groupId>com.github.tobato</groupId>
<artifactId>fastdfs-client</artifactId>
<version>1.26.1-RELEASE</version>
</dependency>
<!-- es--> <!-- es-->
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId> <artifactId>spring-boot-starter-data-elasticsearch</artifactId>
<version>2.2.6.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-elasticsearch</artifactId>
<version>4.0.5.RELEASE</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.springfox</groupId> <groupId>io.springfox</groupId>
<artifactId>springfox-swagger-common</artifactId> <artifactId>springfox-swagger-common</artifactId>
<version>3.0.0</version> <version>3.0.0</version>
</dependency> </dependency>
<dependency>
<groupId>org.zwobble.mammoth</groupId>
<artifactId>mammoth</artifactId>
<version>1.5.0</version>
</dependency>
<dependency> <dependency>
<groupId>org.springframework.kafka</groupId> <groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId> <artifactId>spring-kafka</artifactId>
...@@ -175,17 +153,6 @@ ...@@ -175,17 +153,6 @@
<artifactId>cron-utils</artifactId> <artifactId>cron-utils</artifactId>
<version>9.1.5</version> <version>9.1.5</version>
</dependency> </dependency>
<!-- mini文件存储服务 -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>20.0</version>
</dependency>
<dependency>
<groupId>io.minio</groupId>
<artifactId>minio</artifactId>
<version>8.5.2</version>
</dependency>
<!-- 动态数据源 --> <!-- 动态数据源 -->
<dependency> <dependency>
...@@ -204,6 +171,18 @@ ...@@ -204,6 +171,18 @@
<artifactId>aspose-words</artifactId> <artifactId>aspose-words</artifactId>
<version>19.1</version> <version>19.1</version>
</dependency> </dependency>
<!-- https://mvnrepository.com/artifact/org.fusesource.hawtbuf/hawtbuf -->
<dependency>
<groupId>org.fusesource.hawtbuf</groupId>
<artifactId>hawtbuf</artifactId>
<version>1.11</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>
......
package com.zzsn.event.config;
import com.obs.services.ObsClient;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* Description: 华为云obs配置
* Author: EDY
* Date: 2023/10/9
*/
@Configuration
public class ObsConfig {
@Value("${obs.endPoint}")
String endPoint;
@Value("${obs.ak}")
private String ak;
@Value("${obs.sk}")
private String sk;
@Bean
public ObsClient obsClient (){
// 创建ObsClient实例
return new ObsClient(ak, sk, endPoint);
}
}
package com.zzsn.event.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.SchedulingConfigurer;
import org.springframework.scheduling.config.ScheduledTaskRegistrar;
import java.util.concurrent.*;
/**
* 定义定时任务线程池
* 只有不同的定时方法才会使用不同的线程,同一个定时任务还是走同一个线程
*
* @author lkg
* @date 2024/4/12
*/
@Configuration
public class ScheduleConfig implements SchedulingConfigurer {
@Override
public void configureTasks(ScheduledTaskRegistrar scheduledTaskRegistrar) {
scheduledTaskRegistrar.setScheduler(Executors.newScheduledThreadPool(20));
}
}
...@@ -22,6 +22,10 @@ public class Constants { ...@@ -22,6 +22,10 @@ public class Constants {
public static final String COLLECT_INDEX = "basedata"; public static final String COLLECT_INDEX = "basedata";
//专题事件脉络展示 伪事件脉络 的资讯数量阈值 //专题事件脉络展示 伪事件脉络 的资讯数量阈值
public static final int FAKE_NUM = 6; public static final int FAKE_NUM = 6;
//kafka 发送分析命令 主题
public static final String KEYWORDS_SEND_DATA = "keyWordsCrawl";
//kafka 发送分析命令 主题 //kafka 发送分析命令 主题
public static final String EVENT_VIEWPOINT_SEND_DATA = "event_viewpoint_send_data"; public static final String EVENT_VIEWPOINT_SEND_DATA = "event_viewpoint_send_data";
//kafka 发送 事件脉络所需信息 主题 //kafka 发送 事件脉络所需信息 主题
...@@ -36,6 +40,8 @@ public class Constants { ...@@ -36,6 +40,8 @@ public class Constants {
public static final String EVENT_CONTEXT_RECEIVE_TOPIC = "event_topic_result_data"; public static final String EVENT_CONTEXT_RECEIVE_TOPIC = "event_topic_result_data";
//kafka 接收 伪事件脉络结果 主题 //kafka 接收 伪事件脉络结果 主题
public static final String FAKE_EVENT_CONTEXT_RECEIVE_TOPIC = "fake_event_context-result_data"; public static final String FAKE_EVENT_CONTEXT_RECEIVE_TOPIC = "fake_event_context-result_data";
//kafka 接收 事件分析报告 主题
public static final String EVENT_REPORT_RECEIVE_TOPIC = "event_report_result_data";
//重复数索引ku //重复数索引ku
public final static String ES_REPEAT_OLD = "repeathold"; public final static String ES_REPEAT_OLD = "repeathold";
......
...@@ -4,11 +4,12 @@ import cn.hutool.core.date.DateField; ...@@ -4,11 +4,12 @@ import cn.hutool.core.date.DateField;
import cn.hutool.core.date.DateTime; import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUnit; import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil; import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers; import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.zzsn.event.constant.Constants; import com.zzsn.event.constant.Constants;
import com.zzsn.event.constant.Result; import com.zzsn.event.constant.Result;
import com.zzsn.event.entity.EventAnalysisReport;
import com.zzsn.event.entity.LabelEntity; import com.zzsn.event.entity.LabelEntity;
import com.zzsn.event.entity.SubjectAnalysis; import com.zzsn.event.entity.SubjectAnalysis;
import com.zzsn.event.service.*; import com.zzsn.event.service.*;
...@@ -16,7 +17,6 @@ import com.zzsn.event.util.CalculateUtil; ...@@ -16,7 +17,6 @@ import com.zzsn.event.util.CalculateUtil;
import com.zzsn.event.util.HttpUtil; import com.zzsn.event.util.HttpUtil;
import com.zzsn.event.util.RedisUtil; import com.zzsn.event.util.RedisUtil;
import com.zzsn.event.vo.CountVO; import com.zzsn.event.vo.CountVO;
import com.zzsn.event.vo.PropagationPathVo;
import com.zzsn.event.vo.SubjectDataVo; import com.zzsn.event.vo.SubjectDataVo;
import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiOperation;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
...@@ -24,14 +24,13 @@ import org.apache.commons.lang3.ObjectUtils; ...@@ -24,14 +24,13 @@ import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.*;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.time.LocalDate; import java.math.RoundingMode;
import java.time.temporal.ChronoUnit; import java.util.ArrayList;
import java.util.*; import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors; import java.util.stream.Collectors;
...@@ -55,6 +54,8 @@ public class EventAnalysisController { ...@@ -55,6 +54,8 @@ public class EventAnalysisController {
@Autowired @Autowired
private SubjectAnalysisService subjectAnalysisService; private SubjectAnalysisService subjectAnalysisService;
@Autowired @Autowired
private EventAnalysisReportService eventAnalysisReportService;
@Autowired
private EsService esService; private EsService esService;
@Autowired @Autowired
private RedisUtil redisUtil; private RedisUtil redisUtil;
...@@ -83,7 +84,7 @@ public class EventAnalysisController { ...@@ -83,7 +84,7 @@ public class EventAnalysisController {
long hours = DateUtil.between(DateUtil.parseDateTime(startTime), DateUtil.parseDateTime(endTime), DateUnit.HOUR); long hours = DateUtil.between(DateUtil.parseDateTime(startTime), DateUtil.parseDateTime(endTime), DateUnit.HOUR);
map.put("duration", String.valueOf(hours)); map.put("duration", String.valueOf(hours));
Object count = map.get("totalCount"); Object count = map.get("totalCount");
String divide = CalculateUtil.divide(String.valueOf(count), String.valueOf(hours), 2); String divide = CalculateUtil.divide(String.valueOf(count), String.valueOf(hours), 0, RoundingMode.UP);
map.put("spread", divide); map.put("spread", divide);
String mainReport = esStatisticsService.mainReport(subjectId); String mainReport = esStatisticsService.mainReport(subjectId);
map.put("mainReport", mainReport); map.put("mainReport", mainReport);
...@@ -101,7 +102,7 @@ public class EventAnalysisController { ...@@ -101,7 +102,7 @@ public class EventAnalysisController {
@GetMapping("/hotList") @GetMapping("/hotList")
public Result<?> hotList(@RequestParam(name = "subjectId") String subjectId, public Result<?> hotList(@RequestParam(name = "subjectId") String subjectId,
@RequestParam(name = "size", defaultValue = "10") Integer size) { @RequestParam(name = "size", defaultValue = "10") Integer size) {
String[] fetchFields = new String[]{"id", "title", "origin", "publishDate", "sourceAddress"}; String[] fetchFields = new String[]{"id","subjectId", "title", "origin", "publishDate", "sourceAddress"};
List<SubjectDataVo> pageList = esService.pageList(subjectId, null, null, fetchFields, 2, 1, size); List<SubjectDataVo> pageList = esService.pageList(subjectId, null, null, fetchFields, 2, 1, size);
if (CollectionUtils.isNotEmpty(pageList)) { if (CollectionUtils.isNotEmpty(pageList)) {
List<String> idList = new ArrayList<>(); List<String> idList = new ArrayList<>();
...@@ -289,6 +290,34 @@ public class EventAnalysisController { ...@@ -289,6 +290,34 @@ public class EventAnalysisController {
} }
/** /**
* 获取事件分析报告详细信息
*
* @param eventId 事件id
* @author lkg
* @date 2024/4/12
*/
@GetMapping("/reportInfo")
public Result<?> reportInfo(@RequestParam String eventId){
LambdaQueryWrapper<EventAnalysisReport> queryWrapper = Wrappers.lambdaQuery();
queryWrapper.eq(EventAnalysisReport::getEventId,eventId);
EventAnalysisReport one = eventAnalysisReportService.getOne(queryWrapper);
return Result.OK(one);
}
/**
* 事件分析报告编辑
*
* @param report 分析报告信息
* @author lkg
* @date 2024/4/12
*/
@PostMapping("/reportEdit")
public Result<?> edit(@RequestBody EventAnalysisReport report){
eventAnalysisReportService.edit(report);
return Result.OK();
}
/**
* 获取趋势图数据的实际时间范围 * 获取趋势图数据的实际时间范围
* *
* @param startTime 专题开始时间 * @param startTime 专题开始时间
......
package com.zzsn.event.controller; package com.zzsn.event.controller;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.zzsn.event.constant.Result; import com.zzsn.event.constant.Result;
...@@ -17,7 +17,6 @@ import com.zzsn.event.xxljob.service.IXxlJobInfoService; ...@@ -17,7 +17,6 @@ import com.zzsn.event.xxljob.service.IXxlJobInfoService;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
...@@ -176,28 +175,26 @@ public class EventManageController { ...@@ -176,28 +175,26 @@ public class EventManageController {
/** /**
* 1.6 通过id查询 * 1.6 通过id查询
* *
* @param id * @param id 事件id
* @return * @return
*/ */
@ApiOperation(value = "事件-通过id查询", notes = "事件-通过id查询") @ApiOperation(value = "事件-通过id查询", notes = "事件-通过id查询")
@GetMapping(value = "/queryById") @GetMapping(value = "/queryById")
public Result<?> queryById(@RequestParam(name = "id") String id) { public Result<?> queryById(@RequestParam(name = "id") String id) {
Event event = eventService.getById(id); EventVO eventVO = eventService.queryInfo(id);
String relationEvents = event.getRelationEvents(); String relationEvents = eventVO.getRelationEvents();
if (null != relationEvents) { if (null != relationEvents) {
List<String> split = Arrays.asList(relationEvents.split(",")); List<String> split = Arrays.asList(relationEvents.split(","));
List<Event> list = eventService.list(new LambdaQueryWrapper<Event>().in(Event::getId, split)); List<EventVO> relationEventList = eventService.eventList(split);
event.setRelatedEventList(list); eventVO.setRelatedEventList(relationEventList);
} }
EventTag one = eventTagService.getOne(new LambdaQueryWrapper<EventTag>() EventTag one = eventTagService.getOne(new LambdaQueryWrapper<EventTag>()
.eq(EventTag::getEventId, event.getId()) .eq(EventTag::getEventId, id)
.last(" limit 1")); .last(" limit 1"));
event.setEventTag(one); eventVO.setEventTag(one);
AddEventParam eventParam = new AddEventParam(); List<RegionVO> regionList = eventRegionMapService.regionList(id);
BeanUtils.copyProperties(event, eventParam); eventVO.setRegionList(regionList);
List<RegionVO> regionList = eventRegionMapService.regionList(event.getId()); return Result.OK(eventVO);
eventParam.setRegionList(regionList);
return Result.OK(eventParam);
} }
/** /**
......
package com.zzsn.event.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModel;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
/**
* 事件分析报告
* @TableName event_analysis_report
*/
@Data
@TableName("event_analysis_report")
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="EventAnalysisReport对象", description="事件分析报告")
public class EventAnalysisReport implements Serializable {
/**
* 主键id
*/
@TableId(type = IdType.ASSIGN_ID)
private String id;
/**
* 事件id
*/
@TableField("event_id")
private String eventId;
/**
* 报告名称
*/
@TableField("report_name")
private String reportName;
/**
* 报告内同
*/
@TableField("content")
private String content;
/**
* 文件路径
*/
@TableField("file_path")
private String filePath;
/**
* 入库时间
*/
@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern="yyyy-MM-dd HH:mm:ss")
@TableField("create_time")
private Date createTime;
}
package com.zzsn.event.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import org.jeecgframework.poi.excel.annotation.Excel;
import java.io.Serializable;
/**
* @Description: 专题与搜索引擎关联表
* @Author: jeecg-boot
* @Date: 2022-06-21
* @Version: V1.0
*/
@Data
@TableName("subject_search_engines_map")
@Accessors(chain = true)
@EqualsAndHashCode(callSuper = false)
@ApiModel(value="subject_search_engines_map对象", description="专题与搜索引擎关联表")
public class SubjectSearchEnginesMap implements Serializable {
private static final long serialVersionUID = 1L;
/**主键*/
@TableId(type = IdType.ASSIGN_ID)
@ApiModelProperty(value = "主键")
private String id;
/**专题id*/
@Excel(name = "专题id", width = 15)
@ApiModelProperty(value = "专题id")
private String subjectId;
/**搜索引擎id*/
@Excel(name = "搜索引擎id", width = 15)
@ApiModelProperty(value = "搜索引擎id")
private String searchEngineId;
}
package com.zzsn.event.kafka.consumer; package com.zzsn.event.kafka;
import com.alibaba.fastjson2.JSON; import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers; import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.zzsn.event.constant.Constants; import com.zzsn.event.constant.Constants;
import com.zzsn.event.entity.Event; import com.zzsn.event.entity.Event;
import com.zzsn.event.entity.EventAnalysisReport;
import com.zzsn.event.entity.SubjectAnalysis; import com.zzsn.event.entity.SubjectAnalysis;
import com.zzsn.event.service.EventAnalysisReportService;
import com.zzsn.event.service.IEventService; import com.zzsn.event.service.IEventService;
import com.zzsn.event.service.SubjectAnalysisService; import com.zzsn.event.service.SubjectAnalysisService;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
...@@ -31,6 +34,8 @@ public class KafkaConsumer { ...@@ -31,6 +34,8 @@ public class KafkaConsumer {
private SubjectAnalysisService subjectAnalysisService; private SubjectAnalysisService subjectAnalysisService;
@Autowired @Autowired
private IEventService eventService; private IEventService eventService;
@Autowired
private EventAnalysisReportService eventAnalysisReportService;
/** /**
* 获取-观点分析-分析结果数据,并入库 * 获取-观点分析-分析结果数据,并入库
...@@ -40,7 +45,6 @@ public class KafkaConsumer { ...@@ -40,7 +45,6 @@ public class KafkaConsumer {
@KafkaListener(topics = {Constants.VIEWPOINT_RECEIVE_TOPIC}) @KafkaListener(topics = {Constants.VIEWPOINT_RECEIVE_TOPIC})
public void viewPointAnalysis(ConsumerRecord<String, String> record) { public void viewPointAnalysis(ConsumerRecord<String, String> record) {
String value = record.value(); String value = record.value();
log.info("viewpointMessage:{}",value);
if (StringUtils.isNotEmpty(value)) { if (StringUtils.isNotEmpty(value)) {
String subjectId = null; String subjectId = null;
try { try {
...@@ -121,6 +125,21 @@ public class KafkaConsumer { ...@@ -121,6 +125,21 @@ public class KafkaConsumer {
} }
} }
/**
* 获取事件的分析报告数据
*
* @param record 接受的kafka数据
* @author lkg
* @date 2024/4/12
*/
@KafkaListener(topics = {Constants.EVENT_REPORT_RECEIVE_TOPIC})
public void eventReport(ConsumerRecord<String, String> record) {
String value = record.value();
EventAnalysisReport eventAnalysisReport = JSONObject.parseObject(value, EventAnalysisReport.class);
eventAnalysisReportService.modify(eventAnalysisReport.getEventId(),eventAnalysisReport.getFilePath());
log.info("id为-{}-的事件,分析报告更新完成",eventAnalysisReport.getEventId());
}
private boolean exist(SubjectAnalysis subjectAnalyse){ private boolean exist(SubjectAnalysis subjectAnalyse){
LambdaQueryWrapper<SubjectAnalysis> queryWrapper = Wrappers.lambdaQuery(); LambdaQueryWrapper<SubjectAnalysis> queryWrapper = Wrappers.lambdaQuery();
queryWrapper.eq(SubjectAnalysis::getCategory,2).eq(SubjectAnalysis::getDataId,subjectAnalyse.getDataId()) queryWrapper.eq(SubjectAnalysis::getCategory,2).eq(SubjectAnalysis::getDataId,subjectAnalyse.getDataId())
......
package com.zzsn.event.kafka.producer;
import org.springframework.cloud.stream.annotation.Output;
import org.springframework.messaging.MessageChannel;
/**
* @author zs
* @Description 信息通道
* @since 2021/6/10
*/
public interface IInfosourceSource {
/**
* 专题通道
*/
@Output
MessageChannel subjectModel();
/**
* 关键词通道
*
* @return org.springframework.messaging.MessageChannel
*/
@Output
MessageChannel keyWordsCrawl();
}
package com.zzsn.event.kafka.producer;
import com.alibaba.fastjson.JSON;
import com.zzsn.event.vo.KeyWordsDTO;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.messaging.Message;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.stereotype.Component;
@Component
@Slf4j
@EnableBinding(IInfosourceSource.class)
public class ProduceInfo {
@Autowired
private IInfosourceSource source;
/**
* 关键词执行
* @param keyWordsDTO 关键词信息源
*/
public void sendKeyWordsInfoSourceMsg(KeyWordsDTO keyWordsDTO) {
try{
String msg = JSON.toJSONString(keyWordsDTO);
Message<String> message = MessageBuilder.withPayload(msg).build();
source.keyWordsCrawl().send(message);
}catch (Exception e){
e.printStackTrace();
log.error("关键词:"+ keyWordsDTO.getWordsCode() + "推送kafka失败");
}
}
}
package com.zzsn.event.mapper;
import com.zzsn.event.entity.EventAnalysisReport;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* @author lenovo
* @description 针对表【event_analysis_report(事件分析报告)】的数据库操作Mapper
* @createDate 2024-04-12 11:13:13
* @Entity com.zzsn.event.entity.EventAnalysisReport
*/
@Mapper
public interface EventAnalysisReportMapper extends BaseMapper<EventAnalysisReport> {
}
...@@ -21,6 +21,15 @@ public interface EventMapper extends BaseMapper<Event> { ...@@ -21,6 +21,15 @@ public interface EventMapper extends BaseMapper<Event> {
List<SubjectKafkaVo> eventSubjectList(); List<SubjectKafkaVo> eventSubjectList();
/** /**
* 获取事件详情
*
* @param eventId 事件id
* @author lkg
* @date 2024/4/12
*/
EventVO queryInfo(@Param("eventId") String eventId);
/**
* 分页列表-后台管理 * 分页列表-后台管理
* *
* @param eventName 事件名称 * @param eventName 事件名称
...@@ -131,4 +140,13 @@ public interface EventMapper extends BaseMapper<Event> { ...@@ -131,4 +140,13 @@ public interface EventMapper extends BaseMapper<Event> {
* @date 2024/4/11 * @date 2024/4/11
*/ */
List<ModelVO> modelList(); List<ModelVO> modelList();
/**
* 获取事件信息集合
*
* @param eventIdList 事件id集合
* @author lkg
* @date 2024/4/12
*/
List<EventVO> eventList(@Param("eventIdList") List<String> eventIdList);
} }
package com.zzsn.event.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.zzsn.event.entity.SubjectSearchEnginesMap;
import com.zzsn.event.vo.SearchEnginesVo;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @Description: 专题与搜索引擎关联表
* @Author: jeecg-boot
* @Date: 2022-06-21
* @Version: V1.0
*/
@Mapper
public interface SubjectSearchEnginesMapMapper extends BaseMapper<SubjectSearchEnginesMap> {
void deleteBySubjectId(@Param("subjectId") String subjectId);
List<SearchEnginesVo> bindSearchEngineList(@Param("searchEnginesVo") SearchEnginesVo searchEnginesVo);
List<String> querySearchList(@Param("subjectId") String subjectId);
}
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.zzsn.event.mapper.EventAnalysisReportMapper">
<resultMap id="BaseResultMap" type="com.zzsn.event.entity.EventAnalysisReport">
<id property="id" column="id" jdbcType="VARCHAR"/>
<result property="evnetId" column="evnet_id" jdbcType="VARCHAR"/>
<result property="reportName" column="report_name" jdbcType="VARCHAR"/>
<result property="content" column="content" jdbcType="VARCHAR"/>
<result property="filePath" column="file_path" jdbcType="VARCHAR"/>
<result property="createTime" column="create_time" jdbcType="TIMESTAMP"/>
</resultMap>
<sql id="Base_Column_List">
id,evnet_id,report_name,
content,file_path,create_time
</sql>
</mapper>
...@@ -15,6 +15,13 @@ ...@@ -15,6 +15,13 @@
from event s from event s
</select> </select>
<select id="queryInfo" resultType="com.zzsn.event.vo.EventVO">
select t.id,t.event_name,t.event_icon,t.start_time,t.end_time,t.publish_date,t.event_describe,t.event_label,
t.face_public,t.relation_events,t.event_type,c.type_name,IFNULL(r.id,false) as hasReport
from event t inner join event_category c on t.event_type = c.id
left join event_analysis_report r on t.id = r.event_id
where t.id = #{eventId}
</select>
<select id="pageList" resultType="com.zzsn.event.vo.EventManageVO"> <select id="pageList" resultType="com.zzsn.event.vo.EventManageVO">
select t2.type_name,t1.id,t1.event_icon,t1.event_name,t1.event_label, select t2.type_name,t1.id,t1.event_icon,t1.event_name,t1.event_label,
...@@ -149,9 +156,9 @@ ...@@ -149,9 +156,9 @@
</select> </select>
<select id="processList" resultType="com.zzsn.event.vo.SubjectKafkaVo"> <select id="processList" resultType="com.zzsn.event.vo.SubjectKafkaVo">
select s.id,s.event_name as subject_name,s.start_time as time_enable,s.end_time as select s.id,s.event_name as subject_name,s.start_time as time_enable,s.end_time as time_disable,
time_disable,s.incre_ana_rule, s.incre_ana_rule, s.total_ana_rule,s.time_ana_rule,s.analysis_time,s.event_time,s.face_public,
s.total_ana_rule,s.time_ana_rule,s.analysis_time,s.event_time s.publish_status,s.event_describe
from event s from event s
where 1=1 where 1=1
<if test="disableDate != null"> <if test="disableDate != null">
...@@ -194,4 +201,14 @@ ...@@ -194,4 +201,14 @@
<select id="modelList" resultType="com.zzsn.event.vo.ModelVO"> <select id="modelList" resultType="com.zzsn.event.vo.ModelVO">
select id,model_name,type from model where pid = '0' and type is not null select id,model_name,type from model where pid = '0' and type is not null
</select> </select>
<select id="eventList" resultType="com.zzsn.event.vo.EventVO">
select t.id,t.event_name from event t where 1=1
<if test="eventIdList != null and eventIdList.size()>0">
and t.id in
<foreach collection="eventIdList" open="(" separator="," close=")" item="item">
#{item}
</foreach>
</if>
</select>
</mapper> </mapper>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.zzsn.event.mapper.SubjectSearchEnginesMapMapper">
<select id="bindSearchEngineList" resultType="com.zzsn.event.vo.SearchEnginesVo">
SELECT b.subject_id as subjectId, a.* FROM search_engines a
LEFT JOIN subject_search_engines_map b ON a.id = b.search_engine_id and b.subject_id = #{searchEnginesVo.subjectId}
where 1 = 1
<if test="searchEnginesVo.type!=null">
and a.type = #{searchEnginesVo.type}
</if>
</select>
<delete id="deleteBySubjectId" >
delete from subject_search_engines_map
WHERE subject_id = #{subjectId}
</delete>
<select id="querySearchList" resultType="String">
SELECT a.dictionary_code FROM search_engines a
INNER JOIN subject_search_engines_map b ON a.id = b.search_engine_id and b.subject_id = #{subjectId}
</select>
</mapper>
...@@ -118,7 +118,7 @@ public class EsService { ...@@ -118,7 +118,7 @@ public class EsService {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
//创建查询对象 //创建查询对象
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
boolQuery.must(QueryBuilders.matchQuery("subjectId", subjectId)); boolQuery.must(QueryBuilders.termQuery("subjectId.keyword", subjectId));
if (StringUtils.isNotBlank(startDate) || StringUtils.isNotBlank(endDate)) { if (StringUtils.isNotBlank(startDate) || StringUtils.isNotBlank(endDate)) {
if (StringUtils.isNotBlank(startDate)) { if (StringUtils.isNotBlank(startDate)) {
boolQuery.filter(QueryBuilders.rangeQuery("publishDate").gte(EsDateUtil.esFieldDateFormat(startDate))); boolQuery.filter(QueryBuilders.rangeQuery("publishDate").gte(EsDateUtil.esFieldDateFormat(startDate)));
...@@ -328,7 +328,7 @@ public class EsService { ...@@ -328,7 +328,7 @@ public class EsService {
for (SearchHit hit : searchHits) { for (SearchHit hit : searchHits) {
String index = hit.getIndex(); String index = hit.getIndex();
String queryInfo = hit.getSourceAsString(); String queryInfo = hit.getSourceAsString();
SubjectDataVo info = com.alibaba.fastjson.JSON.parseObject(queryInfo, SubjectDataVo.class); SubjectDataVo info = JSON.parseObject(queryInfo, SubjectDataVo.class);
info.setPublishDate(EsDateUtil.esFieldDateMapping(info.getPublishDate())); info.setPublishDate(EsDateUtil.esFieldDateMapping(info.getPublishDate()));
info.setIndex(index); info.setIndex(index);
list.add(info); list.add(info);
...@@ -442,8 +442,9 @@ public class EsService { ...@@ -442,8 +442,9 @@ public class EsService {
searchSourceBuilder.size(0); searchSourceBuilder.size(0);
//创建查询对象 //创建查询对象
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
String[] arr = new String[]{"title"}; // String[] arr = new String[]{"title"};
boolQuery.must(QueryBuilders.multiMatchQuery(title, arr)); // boolQuery.must(QueryBuilders.multiMatchQuery(title, arr));
boolQuery.must(QueryBuilders.matchQuery("title",title));
boolQuery.filter(QueryBuilders.rangeQuery("publishDate").gt(EsDateUtil.esFieldDateFormat(publishDate))); boolQuery.filter(QueryBuilders.rangeQuery("publishDate").gt(EsDateUtil.esFieldDateFormat(publishDate)));
TermsAggregationBuilder aggregationBuilder = AggregationBuilders.terms("group_origin") TermsAggregationBuilder aggregationBuilder = AggregationBuilders.terms("group_origin")
.field("origin.keyword") .field("origin.keyword")
...@@ -556,9 +557,11 @@ public class EsService { ...@@ -556,9 +557,11 @@ public class EsService {
if (searchHits != null) { if (searchHits != null) {
SearchHit[] hits = searchHits.getHits(); SearchHit[] hits = searchHits.getHits();
for (SearchHit hit : hits) { for (SearchHit hit : hits) {
String index = hit.getIndex();
String sourceAsString = hit.getSourceAsString(); String sourceAsString = hit.getSourceAsString();
SubjectDataVo subjectDataVo = JSON.parseObject(sourceAsString, SubjectDataVo.class); SubjectDataVo subjectDataVo = JSON.parseObject(sourceAsString, SubjectDataVo.class);
subjectDataVo.setPublishDate(EsDateUtil.esFieldDateMapping(subjectDataVo.getPublishDate())); subjectDataVo.setPublishDate(EsDateUtil.esFieldDateMapping(subjectDataVo.getPublishDate()));
subjectDataVo.setIndex(index);
list.add(subjectDataVo); list.add(subjectDataVo);
} }
} }
......
package com.zzsn.event.service;
import com.zzsn.event.entity.EventAnalysisReport;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* @author lenovo
* @description 针对表【event_analysis_report(事件分析报告)】的数据库操作Service
* @createDate 2024-04-12 11:13:13
*/
public interface EventAnalysisReportService extends IService<EventAnalysisReport> {
/**
* 新增分析报告-通过kafka
*
* @param eventId 事件id
* @param filePath 文件路径
* @author lkg
* @date 2024/4/12
*/
void modify(String eventId,String filePath);
/**
* 编辑事件报告
*
* @param report 分析报告信息
* @author lkg
* @date 2024/4/12
*/
void edit(EventAnalysisReport report);
}
...@@ -2,10 +2,8 @@ package com.zzsn.event.service; ...@@ -2,10 +2,8 @@ package com.zzsn.event.service;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.service.IService; import com.baomidou.mybatisplus.extension.service.IService;
import com.zzsn.event.constant.Result;
import com.zzsn.event.entity.Event; import com.zzsn.event.entity.Event;
import com.zzsn.event.vo.*; import com.zzsn.event.vo.*;
import org.apache.ibatis.annotations.Param;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
import java.util.Date; import java.util.Date;
...@@ -24,6 +22,15 @@ public interface IEventService extends IService<Event> { ...@@ -24,6 +22,15 @@ public interface IEventService extends IService<Event> {
List<SubjectKafkaVo> eventSubjectList(); List<SubjectKafkaVo> eventSubjectList();
/** /**
* 获取事件详情
*
* @param eventId 事件id
* @author lkg
* @date 2024/4/12
*/
EventVO queryInfo(String eventId);
/**
* 分页列表-后台管理 * 分页列表-后台管理
* *
* @param eventName 事件名称 * @param eventName 事件名称
...@@ -110,4 +117,13 @@ public interface IEventService extends IService<Event> { ...@@ -110,4 +117,13 @@ public interface IEventService extends IService<Event> {
* @date 2024/4/11 * @date 2024/4/11
*/ */
List<ModelVO> modelList(); List<ModelVO> modelList();
/**
* 获取事件信息集合
*
* @param eventIdList 事件id集合
* @author lkg
* @date 2024/4/12
*/
List<EventVO> eventList(List<String> eventIdList);
} }
package com.zzsn.event.service; package com.zzsn.event.service;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.service.IService; import com.baomidou.mybatisplus.extension.service.IService;
import com.zzsn.event.vo.KeyWordsPage; import com.zzsn.event.vo.KeyWordsPage;
import com.zzsn.event.xxljob.entity.KeyWords; import com.zzsn.event.xxljob.entity.KeyWords;
import java.util.List; import java.util.List;
/** /**
......
package com.zzsn.event.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.zzsn.event.entity.SubjectSearchEnginesMap;
import com.zzsn.event.vo.SearchEnginesVo;
import java.util.List;
/**
* @Description: 专题与搜索引擎关联表
* @Author: jeecg-boot
* @Date: 2022-06-21
* @Version: V1.0
*/
public interface ISubjectSearchEnginesMapService extends IService<SubjectSearchEnginesMap> {
void deleteBySubjectId(String subjectId);
List<SearchEnginesVo> bindSearchEngineList(SearchEnginesVo searchEnginesVo);
/**
* 根据专题id查询出专题绑定的搜索引擎编码
* @param subjectId
* @return
*/
List<String> querySearchList(String subjectId);
}
package com.zzsn.event.service;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.zzsn.event.entity.SubjectSearchEnginesMap;
import com.zzsn.event.mapper.SubjectSearchEnginesMapMapper;
import com.zzsn.event.vo.SearchEnginesVo;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* @Description: 专题与搜索引擎关联表
* @Author: jeecg-boot
* @Date: 2022-06-21
* @Version: V1.0
*/
@Service
public class SubjectSearchEnginesMapServiceImpl extends ServiceImpl<SubjectSearchEnginesMapMapper, SubjectSearchEnginesMap> implements ISubjectSearchEnginesMapService {
@Override
public void deleteBySubjectId(String subjectId){
baseMapper.deleteBySubjectId(subjectId);
}
@Override
public List<SearchEnginesVo> bindSearchEngineList(SearchEnginesVo searchEnginesVo){
return baseMapper.bindSearchEngineList(searchEnginesVo);
}
@Override
public List<String> querySearchList(String subjectId){
return baseMapper.querySearchList(subjectId);
}
}
package com.zzsn.event.service.impl; package com.zzsn.event.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.core.toolkit.Wrappers; import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.zzsn.event.constant.Constants; import com.zzsn.event.constant.Constants;
import com.zzsn.event.entity.Event; import com.zzsn.event.entity.Event;
...@@ -12,7 +13,6 @@ import com.zzsn.event.service.SubjectAnalysisService; ...@@ -12,7 +13,6 @@ import com.zzsn.event.service.SubjectAnalysisService;
import com.zzsn.event.util.DateUtil; import com.zzsn.event.util.DateUtil;
import com.zzsn.event.vo.PropagationPathVo; import com.zzsn.event.vo.PropagationPathVo;
import com.zzsn.event.vo.SubjectDataVo; import com.zzsn.event.vo.SubjectDataVo;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
...@@ -133,9 +133,10 @@ public class AnalysisServiceImpl implements AnalysisService { ...@@ -133,9 +133,10 @@ public class AnalysisServiceImpl implements AnalysisService {
} }
//获取发布时间最早的前N条资讯(来源不重复) //获取发布时间最早的前N条资讯(来源不重复)
private List<SubjectDataVo> topN(List<SubjectDataVo> list,Integer num){ private List<SubjectDataVo> topN(List<SubjectDataVo> list,Integer num){
list.sort(Comparator.comparing(SubjectDataVo::getPublishDate)); List<SubjectDataVo> collect = list.stream().filter(e -> StringUtils.isNotEmpty(e.getOrigin())).
sorted(Comparator.comparing(SubjectDataVo::getPublishDate)).collect(Collectors.toList());
TreeSet<SubjectDataVo> subjectDataVos = new TreeSet<>(Comparator.comparing(SubjectDataVo::getOrigin)); TreeSet<SubjectDataVo> subjectDataVos = new TreeSet<>(Comparator.comparing(SubjectDataVo::getOrigin));
for (SubjectDataVo subjectDataVo : list) { for (SubjectDataVo subjectDataVo : collect) {
subjectDataVos.add(subjectDataVo); subjectDataVos.add(subjectDataVo);
if (subjectDataVos.size() == num) { if (subjectDataVos.size() == num) {
break; break;
......
...@@ -50,6 +50,9 @@ public class EsStatisticsServiceImpl implements EsStatisticsService { ...@@ -50,6 +50,9 @@ public class EsStatisticsServiceImpl implements EsStatisticsService {
@Override @Override
public Map<String, String> totalAndMax(String subjectId, String startTime, String endTime, Integer type) { public Map<String, String> totalAndMax(String subjectId, String startTime, String endTime, Integer type) {
Map<String, String> map = new HashMap<>(); Map<String, String> map = new HashMap<>();
long totalCount = 0L;
long max = 0L;
String maxTime = null;
SearchRequest searchRequest = new SearchRequest(Constants.ES_DATA_FOR_SUBJECT); SearchRequest searchRequest = new SearchRequest(Constants.ES_DATA_FOR_SUBJECT);
SearchSourceBuilder searchSourceBuilder = formatSourceBuilder(subjectId, null, startTime, endTime); SearchSourceBuilder searchSourceBuilder = formatSourceBuilder(subjectId, null, startTime, endTime);
searchSourceBuilder.size(0); searchSourceBuilder.size(0);
...@@ -68,20 +71,21 @@ public class EsStatisticsServiceImpl implements EsStatisticsService { ...@@ -68,20 +71,21 @@ public class EsStatisticsServiceImpl implements EsStatisticsService {
searchRequest.source(searchSourceBuilder); searchRequest.source(searchSourceBuilder);
try { try {
SearchResponse response = client.search(searchRequest, RequestOptions.DEFAULT); SearchResponse response = client.search(searchRequest, RequestOptions.DEFAULT);
long value = response.getHits().getTotalHits().value; totalCount = response.getHits().getTotalHits().value;
map.put("totalCount", String.valueOf(value));
Aggregations aggregations = response.getAggregations(); Aggregations aggregations = response.getAggregations();
ParsedDateHistogram groupHour = aggregations.get("group_hour"); ParsedDateHistogram groupHour = aggregations.get("group_hour");
List<? extends Histogram.Bucket> buckets = groupHour.getBuckets(); List<? extends Histogram.Bucket> buckets = groupHour.getBuckets();
if (CollectionUtils.isNotEmpty(buckets)) { if (CollectionUtils.isNotEmpty(buckets)) {
Histogram.Bucket bucket = buckets.get(0); Histogram.Bucket bucket = buckets.get(0);
long count = bucket.getDocCount(); max = bucket.getDocCount();
map.put("max", String.valueOf(count)); maxTime = bucket.getKeyAsString();
map.put("maxTime", bucket.getKeyAsString());
} }
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
map.put("totalCount", String.valueOf(totalCount));
map.put("max", String.valueOf(max));
map.put("maxTime", maxTime);
return map; return map;
} }
......
package com.zzsn.event.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.obs.services.model.PutObjectResult;
import com.zzsn.event.entity.EventAnalysisReport;
import com.zzsn.event.service.EventAnalysisReportService;
import com.zzsn.event.mapper.EventAnalysisReportMapper;
import com.zzsn.event.util.DocUtil;
import com.zzsn.event.util.ObsUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.InputStream;
import java.util.Date;
import java.util.UUID;
/**
* @author lenovo
* @description 针对表【event_analysis_report(事件分析报告)】的数据库操作Service实现
* @createDate 2024-04-12 11:13:13
*/
@Service
public class EventAnalysisReportServiceImpl extends ServiceImpl<EventAnalysisReportMapper, EventAnalysisReport> implements EventAnalysisReportService{
@Autowired
private ObsUtil obsUtil;
@Override
public void modify(String eventId, String filePath) {
try {
LambdaQueryWrapper<EventAnalysisReport> queryWrapper = Wrappers.lambdaQuery();
queryWrapper.eq(EventAnalysisReport::getEventId,eventId);
int count = this.count(queryWrapper);
if (count > 0) {
this.remove(queryWrapper);
}
EventAnalysisReport report = new EventAnalysisReport();
report.setEventId(eventId);
report.setFilePath(filePath);
report.setCreateTime(new Date());
InputStream inputStream = obsUtil.getObjectStream(filePath);
String content = DocUtil.convertDocStream2Html(inputStream);
report.setContent(content);
this.save(report);
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void edit(EventAnalysisReport report) {
try {
String content = report.getContent();
String path = report.getFilePath();
String fileName = path.substring(path.lastIndexOf("/") + 1);
if (StringUtils.isNotBlank(content)) {
byte[] bytes = DocUtil.convertDocHtml2Doc(content);
PutObjectResult putObjectResult = obsUtil.uploadFile("event/report/" + fileName, bytes);
String filePath = putObjectResult.getObjectKey();
report.setFilePath(filePath);
}
this.updateById(report);
} catch (Exception e) {
e.printStackTrace();
}
}
}
package com.zzsn.event.service.impl; package com.zzsn.event.service.impl;
import cn.hutool.core.map.MapUtil; import cn.hutool.core.map.MapUtil;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.Wrappers; import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.zzsn.event.constant.Constants; import com.zzsn.event.constant.Constants;
import com.zzsn.event.constant.Result;
import com.zzsn.event.entity.Event; import com.zzsn.event.entity.Event;
import com.zzsn.event.entity.EventRegionMap; import com.zzsn.event.entity.EventRegionMap;
import com.zzsn.event.entity.EventTag; import com.zzsn.event.entity.EventTag;
import com.zzsn.event.enums.CodePrefixEnum; import com.zzsn.event.enums.CodePrefixEnum;
import com.zzsn.event.kafka.producer.ProduceInfo;
import com.zzsn.event.mapper.EventMapper; import com.zzsn.event.mapper.EventMapper;
import com.zzsn.event.service.*; import com.zzsn.event.service.*;
import com.zzsn.event.util.*; import com.zzsn.event.util.CodeGenerateUtil;
import com.zzsn.event.util.CronUtil;
import com.zzsn.event.util.HanlpUtil;
import com.zzsn.event.util.RedisUtil;
import com.zzsn.event.vo.*; import com.zzsn.event.vo.*;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
...@@ -66,14 +66,10 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements ...@@ -66,14 +66,10 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
@Autowired @Autowired
private IKeyWordsService iKeyWordsService; private IKeyWordsService iKeyWordsService;
@Autowired @Autowired
private ISubjectSearchEnginesMapService iSubjectSearchEnginesMapService;
@Autowired
private ISubjectInfoSourceMapService iSubjectInfoSourceMapService; private ISubjectInfoSourceMapService iSubjectInfoSourceMapService;
@Autowired @Autowired
private ISubjectKeywordsMapService iSubjectKeywordsMapService; private ISubjectKeywordsMapService iSubjectKeywordsMapService;
@Autowired @Autowired
private ProduceInfo produceInfo;
@Autowired
private IEventTagService eventTagService; private IEventTagService eventTagService;
@Autowired @Autowired
private EventRegionMapService eventRegionMapService; private EventRegionMapService eventRegionMapService;
...@@ -87,6 +83,8 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements ...@@ -87,6 +83,8 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
@Value("${files.storage}") @Value("${files.storage}")
String filesStorage; String filesStorage;
@Value("${img.preview:}")
String imgPreview;
@Override @Override
...@@ -100,9 +98,25 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements ...@@ -100,9 +98,25 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
} }
@Override @Override
public EventVO queryInfo(String eventId) {
EventVO eventVO = baseMapper.queryInfo(eventId);
String eventIcon = eventVO.getEventIcon();
if (StringUtils.isNotEmpty(eventIcon)) {
eventVO.setEventIcon(imgPreview + eventIcon);
}
return eventVO;
}
@Override
public IPage<EventManageVO> pageList(String eventName, Integer eventType, String startTime, String endTime, String order, String orderType, Integer pageNo, Integer pageSize) { public IPage<EventManageVO> pageList(String eventName, Integer eventType, String startTime, String endTime, String order, String orderType, Integer pageNo, Integer pageSize) {
int offset = (pageNo - 1) * pageSize; int offset = (pageNo - 1) * pageSize;
List<EventManageVO> pageList = baseMapper.pageList(eventName, eventType, startTime, endTime, order, orderType, offset, pageSize); List<EventManageVO> pageList = baseMapper.pageList(eventName, eventType, startTime, endTime, order, orderType, offset, pageSize);
for (EventManageVO eventManageVO : pageList) {
String eventIcon = eventManageVO.getEventIcon();
if (StringUtils.isNotEmpty(eventIcon)) {
eventManageVO.setEventIcon(imgPreview + eventIcon);
}
}
//获取总条数 //获取总条数
Integer count = baseMapper.totalCount(eventName, eventType, startTime, endTime); Integer count = baseMapper.totalCount(eventName, eventType, startTime, endTime);
IPage<EventManageVO> pageData = new Page<>(pageNo, pageSize, count); IPage<EventManageVO> pageData = new Page<>(pageNo, pageSize, count);
...@@ -119,6 +133,12 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements ...@@ -119,6 +133,12 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
} }
List<EventFrontVO> pageList = baseMapper.frontPageList(eventName, eventType,labelField,labelName,type,order,orderType, offset, pageSize); List<EventFrontVO> pageList = baseMapper.frontPageList(eventName, eventType,labelField,labelName,type,order,orderType, offset, pageSize);
if (CollectionUtils.isNotEmpty(pageList)) { if (CollectionUtils.isNotEmpty(pageList)) {
for (EventFrontVO eventFrontVO : pageList) {
String eventIcon = eventFrontVO.getEventIcon();
if (StringUtils.isNotEmpty(eventIcon)) {
eventFrontVO.setEventIcon(imgPreview + eventIcon);
}
}
//获取专题资讯的首发来源 //获取专题资讯的首发来源
Map<String, String> map = getFirstMap(pageList); Map<String, String> map = getFirstMap(pageList);
if (MapUtil.isNotEmpty(map)) { if (MapUtil.isNotEmpty(map)) {
...@@ -221,15 +241,6 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements ...@@ -221,15 +241,6 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
eventRegionMapService.remove(queryWrapper); eventRegionMapService.remove(queryWrapper);
List<RegionVO> regionList = addEventParam.getRegionList(); List<RegionVO> regionList = addEventParam.getRegionList();
addMap(eventId, regionList); addMap(eventId, regionList);
//查询出该专题绑定的关键词组
List<KeyWordsPage> keyWordsPages = bindKeyWordsList(eventId);
List<String> keyWordIds = new ArrayList<>();
for (KeyWordsPage keyWordsPage : keyWordsPages) {
keyWordIds.add(keyWordsPage.getId());
}
//更新redis中关键词时间
updateRedisKeyWordsDate(addEventParam.getId(), keyWordIds);
} }
@Override @Override
...@@ -272,6 +283,11 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements ...@@ -272,6 +283,11 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
} }
@Override @Override
public List<EventVO> eventList(List<String> eventIdList) {
return baseMapper.eventList(eventIdList);
}
@Override
public List<EventExcelVO> frontList(List<String> eventIdList,Integer size) { public List<EventExcelVO> frontList(List<String> eventIdList,Integer size) {
return baseMapper.frontList(eventIdList,size); return baseMapper.frontList(eventIdList,size);
} }
...@@ -328,32 +344,4 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements ...@@ -328,32 +344,4 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
return filePath; return filePath;
} }
//更新专题绑定的关键词组的缓存信息
private void updateRedisKeyWordsDate(String subjectId, List<String> keyWordsIds) {
for (String keyWordsId : keyWordsIds) {
KeyWordsDTO keyWordsDTO = iSubjectKeywordsMapService.selectMinByKeyWordsId(keyWordsId);
KeyWordsDTO redisKeyWordsDTO = (KeyWordsDTO) redisUtil.get(Constants.KEY_WORDS_TO_REDIS_PREFIX + keyWordsDTO.getWordsCode());
int count = iSubjectKeywordsMapService.selectCountByKeyWordsId(keyWordsId);
KeyWordsDTO keyWordsDTO1;
if (count <= 0) {
keyWordsDTO1 = iSubjectKeywordsMapService.selectMaxByKeyWordsId(keyWordsId);
redisKeyWordsDTO.setStartTime(keyWordsDTO.getStartTime());
redisKeyWordsDTO.setEndTime(keyWordsDTO1.getEndTime());
} else {
if (redisKeyWordsDTO == null) {
redisKeyWordsDTO = keyWordsDTO;
} else {
redisKeyWordsDTO.setEndTime(null);
redisKeyWordsDTO.setStartTime(keyWordsDTO.getStartTime());
}
}
//查询出该专题绑定了哪些搜索引擎
List<String> stringList = iSubjectSearchEnginesMapService.querySearchList(subjectId);
redisKeyWordsDTO.setSearchEngines(stringList);
redisUtil.set(Constants.KEY_WORDS_TO_REDIS_PREFIX + redisKeyWordsDTO.getWordsCode(), redisKeyWordsDTO);
//立即执行一次
produceInfo.sendKeyWordsInfoSourceMsg(redisKeyWordsDTO);
}
}
} }
package com.zzsn.event.task; package com.zzsn.event.task;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson2.JSON; import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONWriter; import com.alibaba.fastjson2.JSONWriter;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
...@@ -30,6 +29,7 @@ import org.springframework.scheduling.annotation.Scheduled; ...@@ -30,6 +29,7 @@ import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.*; import java.util.*;
import java.util.concurrent.CompletableFuture;
/** /**
* @author lkg * @author lkg
...@@ -202,13 +202,16 @@ public class AnalysisTask { ...@@ -202,13 +202,16 @@ public class AnalysisTask {
* 定时生成传播路径 * 定时生成传播路径
* 每天凌晨0点10分执行一次 * 每天凌晨0点10分执行一次
*/ */
@Scheduled(cron = "0 0 0 * * ?") @Scheduled(cron = "0 0 1 * * ?")
public void propagationPath() { public void propagationPath() {
Date today = new Date(); Date today = new Date();
Date deadlineDate = DateUtil.addDate(today, -1); Date deadlineDate = DateUtil.addDate(today, -1);
List<SubjectKafkaVo> subjects = eventService.eventSubjectList(); List<SubjectKafkaVo> subjects = eventService.eventSubjectList();
for (SubjectKafkaVo subject : subjects) { for (SubjectKafkaVo subject : subjects) {
CompletableFuture.runAsync(()->{
String subjectId = subject.getId(); String subjectId = subject.getId();
int count = esService.count(subjectId, null, null);
if (count > 0) {
String key = Constants.SUBJECT_ANALYSIS_PRE + Constants.PROPAGATION_KEY + subjectId; String key = Constants.SUBJECT_ANALYSIS_PRE + Constants.PROPAGATION_KEY + subjectId;
Date timeDisable = subject.getTimeDisable(); Date timeDisable = subject.getTimeDisable();
//已经结束的事件专题,永久缓存 //已经结束的事件专题,永久缓存
...@@ -218,16 +221,18 @@ public class AnalysisTask { ...@@ -218,16 +221,18 @@ public class AnalysisTask {
PropagationPathVo pathVo = analysisService.propagationPath(subjectId); PropagationPathVo pathVo = analysisService.propagationPath(subjectId);
if (ObjectUtils.isNotEmpty(pathVo)) { if (ObjectUtils.isNotEmpty(pathVo)) {
redisUtil.set(key, pathVo); redisUtil.set(key, pathVo);
log.info("专题-{},传播路径数据【永久】缓存成功!", subject.getSubjectName());
} }
log.info("专题id为-{}-的专题-传播路径数据:{}-缓存成功!", subjectId, JSONObject.toJSON(pathVo));
} }
} else {//已经结束的事件专题,缓存有效期一天 } else {//已经结束的事件专题,缓存有效期一天
PropagationPathVo pathVo = analysisService.propagationPath(subjectId); PropagationPathVo pathVo = analysisService.propagationPath(subjectId);
if (ObjectUtils.isNotEmpty(pathVo)) { if (ObjectUtils.isNotEmpty(pathVo)) {
redisUtil.set(key, pathVo, 3600 * 24); redisUtil.set(key, pathVo, 3600 * 24);
log.info("专题-{},传播路径数据缓存成功!", subject.getSubjectName());
}
} }
} }
log.info("专题id为-{}-的专题-传播路径数据-缓存成功!", subjectId); });
} }
} }
} }
package com.zzsn.event.util;
import com.aspose.words.*;
import org.apache.commons.lang3.SystemUtils;
import org.fusesource.hawtbuf.DataByteArrayOutputStream;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
/**
* aspose 导出doc工具
*
* @author liuyang
* @since 20201014
*/
public class DocUtil {
private static final String fontsPath = "/usr/share/fonts";
/**
* 获取license
*
*/
public static void getLicense() {
try {
Resource resource = new ClassPathResource("license.xml");
InputStream is = resource.getInputStream();
License aposeLic = new License();
aposeLic.setLicense(is);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* word文件流转成htm 内容
*
* @param inputStream
* @return
* @throws Exception
*/
public static String convertDocStream2Html(InputStream inputStream) throws Exception {
getLicense();
Document doc = new Document(inputStream);
HtmlSaveOptions saveOptions = new HtmlSaveOptions(SaveFormat.HTML);
saveOptions.setExportHeadersFootersMode(ExportHeadersFootersMode.NONE); // HtmlSaveOptions的其他设置信息请参考相关API
saveOptions.setCssStyleSheetType(CssStyleSheetType.INLINE);
//指定是将字体资源导出到HTML,MHTML还是EPUB。默认值为false。
// saveOptions.setFontsFolderAlias("font");
// saveOptions.setFontSavingCallback(fontSavingArgs -> fontSavingArgs.getFontStream());
//指定是否应使用Base64编码将字体资源嵌入HTML。默认值为false。
saveOptions.setExportImagesAsBase64(true);
//指定页面设置是导出到HTML,MHTML还是EPUB。默认值为false。
saveOptions.setExportPageSetup(true);
//指定在保存到HTML,MHTML或EPUB时是否应以相对单位输出字体大小。默认值为false。
// saveOptions.setExportRelativeFontSize(true);
//控制文本输入表单字段如何保存到HTML或MHTML。默认值为false。
saveOptions.setExportTextInputFormFieldAsText(true);
//如果为true,则在适用的情况下输出漂亮的格式。默认值为false。
// saveOptions.setPrettyFormat(true);
//获取或设置一个值,该值确定是否使用高质量(即慢速)渲染算法。(继承自SaveOptions)
saveOptions.setUseHighQualityRendering(true);
// saveOptions.setDocumentSplitCriteria(DocumentSplitCriteria.HEADING_PARAGRAPH);
//控制如何将表格、行和单元格宽度导出为HTML、MHTML或EPUB。默认值为HtmlElementSizeOutputMode.ALL。该属性的值为HtmlElementSizeOutputMode整数常量。
saveOptions.setTableWidthOutputMode(HtmlElementSizeOutputMode.RELATIVE_ONLY);
saveOptions.setExportTocPageNumbers(true);
//指定保存为HTML、MHTML或EPUB时是否规范化段落的负左缩进和负右缩进。默认值为false。
saveOptions.setAllowNegativeIndent(true);
//指定是否按Aspose缩放图像。导出到HTML、MHTML或EPUB时,文字的边界形状大小。默认值为true。
saveOptions.setScaleImageToShapeSize(true);
ByteArrayOutputStream htmlStream = new ByteArrayOutputStream();
String htmlText = "";
try {
doc.save(htmlStream, saveOptions);
htmlText = htmlStream.toString("UTF-8");
htmlStream.close();
} catch (Exception e) {
e.printStackTrace();
}
return htmlText;
}
/**
* 根据html内容转为文件流
*
* @param content
* @return
* @throws Exception
*/
public static byte[] convertDocHtml2Doc(String content) throws Exception {
getLicense();
if (SystemUtils.IS_OS_LINUX) {
FontSettings fontSettings = new FontSettings();
//true 表示递归查询子文件夹
fontSettings.setFontsFolder(fontsPath, true);
}
Document doc = new Document();
DataByteArrayOutputStream dataByteArrayOutputStream = new DataByteArrayOutputStream();
DocumentBuilder builder = new DocumentBuilder(doc);
builder.insertHtml(content);
doc.save(dataByteArrayOutputStream, SaveFormat.DOCX);
return dataByteArrayOutputStream.getData();
}
}
...@@ -2,9 +2,9 @@ package com.zzsn.event.util; ...@@ -2,9 +2,9 @@ package com.zzsn.event.util;
import cn.hutool.core.collection.CollectionUtil; import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.util.ObjectUtil; import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson2.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature; import com.alibaba.fastjson2.JSONWriter;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import lombok.SneakyThrows; import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
...@@ -72,17 +72,9 @@ import java.util.*; ...@@ -72,17 +72,9 @@ import java.util.*;
@Component @Component
public class EsOpUtil<T> { public class EsOpUtil<T> {
public RestHighLevelClient getClient() {
return client;
}
@Autowired @Autowired
RestHighLevelClient client; RestHighLevelClient client;
public Boolean isClinet() {
return false;
}
/** /**
* 获取节点相关信息 * 获取节点相关信息
* *
...@@ -332,12 +324,12 @@ public class EsOpUtil<T> { ...@@ -332,12 +324,12 @@ public class EsOpUtil<T> {
* @return * @return
*/ */
public String docSavaByEntity(String index, String id, Object object) { public String docSavaByEntity(String index, String id, Object object) {
return docSaveByJson(index, id, JSON.toJSONString(object, SerializerFeature.WriteMapNullValue)); return docSaveByJson(index, id, JSON.toJSONString(object, JSONWriter.Feature.WriteMapNullValue));
} }
public void docSavaByEntityAsync(String index, String id, Object object) { public void docSavaByEntityAsync(String index, String id, Object object) {
docSaveByJsonAsync(index, id, JSON.toJSONString(object, SerializerFeature.WriteMapNullValue)); docSaveByJsonAsync(index, id, JSON.toJSONString(object, JSONWriter.Feature.WriteMapNullValue));
} }
/** /**
...@@ -525,62 +517,19 @@ public class EsOpUtil<T> { ...@@ -525,62 +517,19 @@ public class EsOpUtil<T> {
searchRequest.source(searchSourceBuilder); searchRequest.source(searchSourceBuilder);
SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
SearchHits hits = searchResponse.getHits(); SearchHits hits = searchResponse.getHits();
Long total = hits.getTotalHits().value; long total = hits.getTotalHits().value;
if (total > 0) { if (total > 0) {
log.info("isExistSubjectAndArticle[index:{}][id:{}][subject:{}]重复,库中已存在", index, id, subjectId); log.info("isExistSubjectAndArticle[index:{}][id:{}][subject:{}]重复,库中已存在", index, id, subjectId);
return true; return true;
} }
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
log.info("isExistSubjectAndArticle[index:{}][id:{}][subject:{}]发生异常,异常信息为[{}]", index, id, subjectId); log.info("isExistSubjectAndArticle[index:{}][id:{}][subject:{}]发生异常", index, id, subjectId);
return false; return false;
} }
return false; return false;
} }
/**
* 按条件查询数据
*
* @param index
* @param start
* @param size
* @param queryBuilder
* @return
*/
public Map<String, Object> searchByQuery(String index, int start, int size, QueryBuilder queryBuilder) {
try {
Map<String, Object> resultMap = new HashMap<>(100);
SearchRequest searchRequest = new SearchRequest(index);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
if (queryBuilder != null) {
searchSourceBuilder.query(queryBuilder);
}
if (start >= 0 && size >= 0) {
searchSourceBuilder.from(start);
searchSourceBuilder.size(size);
}
searchRequest.source(searchSourceBuilder);
SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
SearchHits hits = searchResponse.getHits();
Long total = hits.getTotalHits().value;
resultMap.put("total", total);
SearchHit[] searchHits = hits.getHits();
List<Map<String, Object>> mapList = new ArrayList<>(searchHits.length);
for (SearchHit hit : searchHits) {
//存储的字段
Map<String, Object> sourceAsMap = hit.getSourceAsMap();
//得分
sourceAsMap.put("score", hit.getScore());
mapList.add(sourceAsMap);
}
resultMap.put("data", mapList);
return resultMap;
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
public boolean existBySourceAddress(String index, String sourceAddress) { public boolean existBySourceAddress(String index, String sourceAddress) {
SearchRequest searchRequest = new SearchRequest(index); SearchRequest searchRequest = new SearchRequest(index);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
...@@ -664,7 +613,7 @@ public class EsOpUtil<T> { ...@@ -664,7 +613,7 @@ public class EsOpUtil<T> {
private String getIdOrCode(Map<String,String> columValue){ private String getIdOrCode(Map<String,String> columValue){
StringBuffer script = new StringBuffer(); StringBuffer script = new StringBuffer();
columValue.forEach((colum,value)->{ columValue.forEach((colum,value)->{
script.append("ctx._source."+colum+" = '"+value+"';"); script.append("ctx._source.").append(colum).append(" = '").append(value).append("';");
}); });
return script.toString(); return script.toString();
} }
...@@ -897,22 +846,4 @@ public class EsOpUtil<T> { ...@@ -897,22 +846,4 @@ public class EsOpUtil<T> {
return pageData; return pageData;
} }
} }
/**
* 根据index,id索引文件
*
* @param index 索引名称
* @param id 资讯id
*/
public Map<String, Object> searchDoc(String index, String id) {
Map<String, Object> map = new HashMap<>();
try {
GetRequest searchRequest = new GetRequest(index, id);
GetResponse documentFields = client.get(searchRequest, RequestOptions.DEFAULT);
map = documentFields.getSourceAsMap();
} catch (IOException e) {
log.warn("查询doc异常,index=[{}],id=[{}], ex=[{}]", index, id, e.getMessage());
}
return map;
}
} }
package com.zzsn.event.util; package com.zzsn.event.util;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson2.JSONObject;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.http.*; import org.apache.http.*;
import org.apache.http.auth.AuthScope; import org.apache.http.auth.AuthScope;
......
package com.zzsn.event.util; package com.zzsn.event.util;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson2.JSONObject;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.util.HashMap; import java.util.HashMap;
......
...@@ -8,10 +8,7 @@ import org.springframework.data.redis.core.ScanOptions; ...@@ -8,10 +8,7 @@ import org.springframework.data.redis.core.ScanOptions;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.HashSet; import java.util.*;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/** /**
...@@ -77,7 +74,7 @@ public class RedisUtil { ...@@ -77,7 +74,7 @@ public class RedisUtil {
if (key.length == 1) { if (key.length == 1) {
redisTemplate.delete(key[0]); redisTemplate.delete(key[0]);
} else { } else {
redisTemplate.delete(CollectionUtils.arrayToList(key)); redisTemplate.delete((Collection<String>) CollectionUtils.arrayToList(key));
} }
} }
} }
...@@ -552,46 +549,5 @@ public class RedisUtil { ...@@ -552,46 +549,5 @@ public class RedisUtil {
} }
} }
/**
* 获取指定前缀的一系列key
* 使用scan命令代替keys, Redis是单线程处理,keys命令在KEY数量较多时,
* 操作效率极低【时间复杂度为O(N)】,该命令一旦执行会严重阻塞线上其它命令的正常请求
* @param keyPrefix key统一的前缀
*/
private Set<String> keys(String keyPrefix) {
String realKey = keyPrefix + "*";
try {
return redisTemplate.execute((RedisCallback<Set<String>>) connection -> {
Set<String> binaryKeys = new HashSet<>();
Cursor<byte[]> cursor = connection.scan(new ScanOptions.ScanOptionsBuilder().match(realKey).count(Integer.MAX_VALUE).build());
while (cursor.hasNext()) {
binaryKeys.add(new String(cursor.next()));
}
return binaryKeys;
});
} catch (Throwable e) {
e.printStackTrace();
}
return null;
}
/**
* 删除指定前缀的一系列key
* @param keyPrefix key统一的前缀
*/
public void removeAll(String keyPrefix) {
try {
Set<String> keys = keys(keyPrefix);
if (!CollectionUtils.isEmpty(keys)) {
redisTemplate.delete(keys);
}
} catch (Throwable e) {
e.printStackTrace();
}
}
} }
...@@ -12,6 +12,6 @@ public class AddEventParam extends Event { ...@@ -12,6 +12,6 @@ public class AddEventParam extends Event {
private Integer extractHotWords; private Integer extractHotWords;
// time unit minute // time unit minute
private Integer period; private Integer period;
/**事件地域信息*/
private List<RegionVO> regionList; List<RegionVO> regionList;
} }
...@@ -15,7 +15,7 @@ public class EventManageVO { ...@@ -15,7 +15,7 @@ public class EventManageVO {
private String id; private String id;
private String eventIcon; private String eventIcon;
private String eventName; private String eventName;
private String eventType; private String typeName;
private String eventLabel; private String eventLabel;
private Integer facePublic; private Integer facePublic;
private Integer publishStatus; private Integer publishStatus;
......
package com.zzsn.event.vo;
import com.zzsn.event.entity.EventTag;
import lombok.Data;
import java.util.List;
/**
*
*
* @author lkg
* @date 2024/4/12
*/
@Data
public class EventVO {
/**事件id*/
private String id;
/**事件图标*/
private String eventIcon;
/**事件名称*/
private String eventName;
/**开始时间*/
private String startTime;
/**jssj*/
private String endTime;
/**fbsj*/
private String publishDate;
/**事件描述*/
private String eventDescribe;
/**事件标签*/
private String eventLabel;
/**关联事件*/
private String relationEvents;
/**是否公开(0-未公开;1-已公开)*/
private Integer facePublic;
/**事件分类id*/
private String eventType;
/**事件分类名称*/
private String typeName;
/**是否生成分析报告*/
private Boolean hasReport;
/**关联事件详细信息*/
private List<EventVO> relatedEventList;
/**事件标签信息*/
private EventTag eventTag;
/**事件地域信息*/
List<RegionVO> regionList;
}
...@@ -16,10 +16,16 @@ public class SubjectKafkaVo { ...@@ -16,10 +16,16 @@ public class SubjectKafkaVo {
private String id; private String id;
/*专题名称*/ /*专题名称*/
private String subjectName; private String subjectName;
/*事件描述*/
private String eventDescribe;
/*专题开始时间*/ /*专题开始时间*/
private Date timeEnable; private Date timeEnable;
/*专题结束*/ /*专题结束*/
private Date timeDisable; private Date timeDisable;
/*是否公开(0-未公开;1-已公开)*/
private Integer facePublic;
/*发布状态(0-未发布;1-已发布)*/
private Integer publishStatus;
/*专题增量分析规则*/ /*专题增量分析规则*/
private Integer increAnaRule; private Integer increAnaRule;
/*专题总量分析规则*/ /*专题总量分析规则*/
......
...@@ -33,8 +33,6 @@ import java.util.List; ...@@ -33,8 +33,6 @@ import java.util.List;
@DS("multi-datasource1") @DS("multi-datasource1")
public class XxlJobInfoServiceImpl extends ServiceImpl<XxlJobInfoMapper, XxlJobInfo> implements IXxlJobInfoService { public class XxlJobInfoServiceImpl extends ServiceImpl<XxlJobInfoMapper, XxlJobInfo> implements IXxlJobInfoService {
@Autowired @Autowired
IXxlJobInfoService xxlJobInfoService;
@Autowired
IXxlJobGroupService xxlJobGroupService; IXxlJobGroupService xxlJobGroupService;
//关键词执行器 //关键词执行器
...@@ -99,7 +97,7 @@ public class XxlJobInfoServiceImpl extends ServiceImpl<XxlJobInfoMapper, XxlJobI ...@@ -99,7 +97,7 @@ public class XxlJobInfoServiceImpl extends ServiceImpl<XxlJobInfoMapper, XxlJobI
} }
// 关联的信息源编码 // 关联的信息源编码
xxlJobInfo.setInfoSourceCode(keyWordsDTO.getWordsCode()); xxlJobInfo.setInfoSourceCode(keyWordsDTO.getWordsCode());
xxlJobInfoService.save(xxlJobInfo); save(xxlJobInfo);
} }
...@@ -178,7 +176,7 @@ public class XxlJobInfoServiceImpl extends ServiceImpl<XxlJobInfoMapper, XxlJobI ...@@ -178,7 +176,7 @@ public class XxlJobInfoServiceImpl extends ServiceImpl<XxlJobInfoMapper, XxlJobI
} }
// 关联的信息源编码 // 关联的信息源编码
xxlJobInfo.setInfoSourceCode(event.getEventCode()); xxlJobInfo.setInfoSourceCode(event.getEventCode());
xxlJobInfoService.save(xxlJobInfo); save(xxlJobInfo);
} }
...@@ -209,7 +207,7 @@ public class XxlJobInfoServiceImpl extends ServiceImpl<XxlJobInfoMapper, XxlJobI ...@@ -209,7 +207,7 @@ public class XxlJobInfoServiceImpl extends ServiceImpl<XxlJobInfoMapper, XxlJobI
if (!ObjectUtils.isEmpty(infosourceCode)) { if (!ObjectUtils.isEmpty(infosourceCode)) {
QueryWrapper<XxlJobInfo> xxlJobInfoQueryWrapper = new QueryWrapper<>(); QueryWrapper<XxlJobInfo> xxlJobInfoQueryWrapper = new QueryWrapper<>();
xxlJobInfoQueryWrapper.eq("info_source_code", infosourceCode); xxlJobInfoQueryWrapper.eq("info_source_code", infosourceCode);
xxlJobInfoService.remove(xxlJobInfoQueryWrapper); remove(xxlJobInfoQueryWrapper);
} }
} }
......
...@@ -7,8 +7,13 @@ spring: ...@@ -7,8 +7,13 @@ spring:
multipart: multipart:
max-request-size: 1024MB max-request-size: 1024MB
max-file-size: 100MB max-file-size: 100MB
web:
resources: resources:
static-locations: file:${user.dir}/event/ static-locations:
- file:${user.dir}/icon/
freemarker:
template-loader-path:
- classpath:/template
datasource: datasource:
druid: druid:
stat-view-servlet: stat-view-servlet:
...@@ -42,7 +47,6 @@ spring: ...@@ -42,7 +47,6 @@ spring:
filters: stat,wall,slf4j filters: stat,wall,slf4j
# 通过connectProperties属性来打开mergeSql功能;慢SQL记录 # 通过connectProperties属性来打开mergeSql功能;慢SQL记录
connectionProperties: druid.stat.mergeSql\=true;druid.stat.slowSqlMillis\=5000 connectionProperties: druid.stat.mergeSql\=true;druid.stat.slowSqlMillis\=5000
datasource: datasource:
master: master:
url: jdbc:mysql://114.116.44.11:3306/clb_project?useUnicode=true&characterEncoding=utf-8&AllowPublicKeyRetrieval=True&serverTimezone=Asia/Shanghai&autoReconnect=true&rewriteBatchedStatements=true url: jdbc:mysql://114.116.44.11:3306/clb_project?useUnicode=true&characterEncoding=utf-8&AllowPublicKeyRetrieval=True&serverTimezone=Asia/Shanghai&autoReconnect=true&rewriteBatchedStatements=true
...@@ -55,8 +59,7 @@ spring: ...@@ -55,8 +59,7 @@ spring:
password: qwer@9988&zzsn password: qwer@9988&zzsn
driver-class-name: com.mysql.cj.jdbc.Driver driver-class-name: com.mysql.cj.jdbc.Driver
elasticsearch: elasticsearch:
rest: uris: [ "114.115.215.250:9700","114.116.19.92:9700","114.115.215.96:9200","114.116.36.231:9200" ]
uris: [ "114.115.215.250:9700","114.116.19.92:9700","114.116.54.108:9200" ]
username: elastic username: elastic
password: zzsn9988 password: zzsn9988
connection-timeout: 300000 connection-timeout: 300000
...@@ -85,16 +88,17 @@ spring: ...@@ -85,16 +88,17 @@ spring:
producer: # 生产者 producer: # 生产者
retries: 3 # 设置大于0的值,则客户端会将发送失败的记录重新发送 retries: 3 # 设置大于0的值,则客户端会将发送失败的记录重新发送
batch-size: 16384 batch-size: 16384
#缓存大小,默认32M
buffer-memory: 335544324 buffer-memory: 335544324
acks: 1 acks: 1
# 指定消息key和消息体的编解码方式 # 指定消息key和消息体的编解码方式
key-serializer: org.apache.kafka.common.serialization.StringSerializer key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer value-serializer: org.apache.kafka.common.serialization.StringSerializer
# listener: properties:
# ack-mode: manual_immediate max.request.size: 5242880 #5M
consumer: consumer:
#用于标识此使用者所属的使用者组的唯一字符串 #用于标识此使用者所属的使用者组的唯一字符串
group-id: subject-analysis-group group-id: event-analysis-group
#当Kafka中没有初始偏移量或者服务器上不再存在当前偏移量时该怎么办,默认值为latest,表示自动将偏移重置为最新的偏移量 #当Kafka中没有初始偏移量或者服务器上不再存在当前偏移量时该怎么办,默认值为latest,表示自动将偏移重置为最新的偏移量
#可选的值为latest, earliest, none #可选的值为latest, earliest, none
auto-offset-reset: latest auto-offset-reset: latest
...@@ -106,20 +110,25 @@ spring: ...@@ -106,20 +110,25 @@ spring:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
#值的反序列化器类,实现类实现了接口org.apache.kafka.common.serialization.Deserializer #值的反序列化器类,实现类实现了接口org.apache.kafka.common.serialization.Deserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
mybatis-plus: mybatis-plus:
mapper-locations: classpath*:com/zzsn/event/mapper/xml/*.xml,classpath*:com/zzsn/event/xxljob/mapper/xml/*.xml mapper-locations: classpath*:com/zzsn/event/**/xml/*.xml
configuration: configuration:
log-impl: org.apache.ibatis.logging.stdout.StdOutImpl log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
map-underscore-to-camel-case: true map-underscore-to-camel-case: true
obs:
ak: VEHN7D0TJ9316H8AHCAV
sk: heR353lvSWVPNU8pe2QxDtd8GDsO5L6PGH5eUoQY
endPoint: https://obs.cn-north-1.myhuaweicloud.com
scheduling: scheduling:
enable: false enable: false
serviceProject: serviceProject:
url: https://clb.ciglobal.cn/clb-api/datapull/ url: https://clb.ciglobal.cn/clb-api/datapull/
files: files:
storage: ${user.dir}/event/ storage: ${user.dir}/icon/
scoreRule: scoreRule:
weekScore: 10 weekScore: 10
monthScore: 5 monthScore: 5
yearScore: 3 yearScore: 3
beforeYearScore: 1 beforeYearScore: 1
img:
preview: http://192.168.1.28:1688/event/
...@@ -7,8 +7,10 @@ spring: ...@@ -7,8 +7,10 @@ spring:
multipart: multipart:
max-request-size: 1024MB max-request-size: 1024MB
max-file-size: 100MB max-file-size: 100MB
web:
resources: resources:
static-locations: file:${user.dir}/event/ static-locations:
- file:${user.dir}/icon/
datasource: datasource:
druid: druid:
stat-view-servlet: stat-view-servlet:
...@@ -42,7 +44,6 @@ spring: ...@@ -42,7 +44,6 @@ spring:
filters: stat,wall,slf4j filters: stat,wall,slf4j
# 通过connectProperties属性来打开mergeSql功能;慢SQL记录 # 通过connectProperties属性来打开mergeSql功能;慢SQL记录
connectionProperties: druid.stat.mergeSql\=true;druid.stat.slowSqlMillis\=5000 connectionProperties: druid.stat.mergeSql\=true;druid.stat.slowSqlMillis\=5000
datasource: datasource:
master: master:
url: jdbc:mysql://114.116.44.11:3306/clb_project?useUnicode=true&characterEncoding=utf-8&AllowPublicKeyRetrieval=True&serverTimezone=Asia/Shanghai&autoReconnect=true&rewriteBatchedStatements=true url: jdbc:mysql://114.116.44.11:3306/clb_project?useUnicode=true&characterEncoding=utf-8&AllowPublicKeyRetrieval=True&serverTimezone=Asia/Shanghai&autoReconnect=true&rewriteBatchedStatements=true
...@@ -55,7 +56,6 @@ spring: ...@@ -55,7 +56,6 @@ spring:
password: qwer@9988&zzsn password: qwer@9988&zzsn
driver-class-name: com.mysql.cj.jdbc.Driver driver-class-name: com.mysql.cj.jdbc.Driver
elasticsearch: elasticsearch:
rest:
uris: [ "192.168.0.149:9700","192.168.0.35:9700","192.168.200.108:9200","192.168.200.222:9200" ] uris: [ "192.168.0.149:9700","192.168.0.35:9700","192.168.200.108:9200","192.168.200.222:9200" ]
username: elastic username: elastic
password: zzsn9988 password: zzsn9988
...@@ -69,7 +69,7 @@ spring: ...@@ -69,7 +69,7 @@ spring:
zkNodes: 114.115.159.144:2181 zkNodes: 114.115.159.144:2181
requiredAcks: 1 requiredAcks: 1
redis: redis:
database: 7 database: 0
host: 114.116.90.53 host: 114.116.90.53
lettuce: lettuce:
pool: pool:
...@@ -85,16 +85,17 @@ spring: ...@@ -85,16 +85,17 @@ spring:
producer: # 生产者 producer: # 生产者
retries: 3 # 设置大于0的值,则客户端会将发送失败的记录重新发送 retries: 3 # 设置大于0的值,则客户端会将发送失败的记录重新发送
batch-size: 16384 batch-size: 16384
#缓存大小,默认32M
buffer-memory: 335544324 buffer-memory: 335544324
acks: 1 acks: 1
# 指定消息key和消息体的编解码方式 # 指定消息key和消息体的编解码方式
key-serializer: org.apache.kafka.common.serialization.StringSerializer key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer value-serializer: org.apache.kafka.common.serialization.StringSerializer
# listener: properties:
# ack-mode: manual_immediate max.request.size: 5242880 #5M
consumer: consumer:
#用于标识此使用者所属的使用者组的唯一字符串 #用于标识此使用者所属的使用者组的唯一字符串
group-id: subject-analysis-group group-id: event-analysis-group
#当Kafka中没有初始偏移量或者服务器上不再存在当前偏移量时该怎么办,默认值为latest,表示自动将偏移重置为最新的偏移量 #当Kafka中没有初始偏移量或者服务器上不再存在当前偏移量时该怎么办,默认值为latest,表示自动将偏移重置为最新的偏移量
#可选的值为latest, earliest, none #可选的值为latest, earliest, none
auto-offset-reset: latest auto-offset-reset: latest
...@@ -106,19 +107,25 @@ spring: ...@@ -106,19 +107,25 @@ spring:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
#值的反序列化器类,实现类实现了接口org.apache.kafka.common.serialization.Deserializer #值的反序列化器类,实现类实现了接口org.apache.kafka.common.serialization.Deserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
mybatis-plus: mybatis-plus:
mapper-locations: classpath*:com/zzsn/event/mapper/xml/*.xml,classpath*:com/zzsn/event/xxljob/mapper/xml/*.xml mapper-locations: classpath*:com/zzsn/event/**/xml/*.xml
configuration: configuration:
log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
map-underscore-to-camel-case: true map-underscore-to-camel-case: true
obs:
ak: VEHN7D0TJ9316H8AHCAV
sk: heR353lvSWVPNU8pe2QxDtd8GDsO5L6PGH5eUoQY
endPoint: https://obs.cn-north-1.myhuaweicloud.com
scheduling: scheduling:
enable: true enable: true
serviceProject: serviceProject:
url: https://clb.ciglobal.cn/clb-api/datapull/ url: https://clb.ciglobal.cn/clb-api/datapull/
files: files:
storage: ${user.dir}/event/ storage: ${user.dir}/icon/
scoreRule: scoreRule:
weekScore: 10 weekScore: 10
monthScore: 5 monthScore: 5
yearScore: 3 yearScore: 3
beforeYearScore: 1 beforeYearScore: 1
img:
preview: http://114.115.175.45:1688/event/
\ No newline at end of file
<License>
<Data>
<Products>
<Product>Aspose.Total for Java</Product>
<Product>Aspose.Words for Java</Product>
</Products>
<EditionType>Enterprise</EditionType>
<SubscriptionExpiry>20991231</SubscriptionExpiry>
<LicenseExpiry>20991231</LicenseExpiry>
<SerialNumber>8bfe198c-7f0c-4ef8-8ff0-acc3237bf0d7</SerialNumber>
</Data>
<Signature>sNLLKGMUdF0r8O1kKilWAGdgfs2BvJb/2Xp8p5iuDVfZXmhppo+d0Ran1P9TKdjV4ABwAgKXxJ3jcQTqE/2IRfqwnPf8itN8aFZlV3TJPYeD3yWE7IT55Gz6EijUpC7aKeoohTb4w2fpox58wWoF3SNp6sK6jDfiAUGEHYJ9pjU=</Signature>
</License>
\ No newline at end of file
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论