提交 df3a629b 作者: 925993793@qq.com

自定义专题、事件分析、专题资讯报告整合

上级 c3a41750
......@@ -9,8 +9,13 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.zzsn.clb.common.model.task.dto.titr.KeyWordsDTO;
import com.zzsn.event.constant.Constants;
import com.zzsn.event.constant.Result;
import com.zzsn.event.entity.*;
import com.zzsn.event.service.*;
import com.zzsn.event.entity.Event;
import com.zzsn.event.entity.KeyWords;
import com.zzsn.event.entity.SubjectInfoSourceMap;
import com.zzsn.event.service.IEventService;
import com.zzsn.event.service.IKeyWordsService;
import com.zzsn.event.service.ISubjectInfoSourceMapService;
import com.zzsn.event.service.LabelEntityService;
import com.zzsn.event.util.HttpUtil;
import com.zzsn.event.util.ObjectUtil;
import com.zzsn.event.util.RedisUtil;
......@@ -18,7 +23,6 @@ import com.zzsn.event.util.tree.Node;
import com.zzsn.event.util.user.UserUtil;
import com.zzsn.event.util.user.UserVo;
import com.zzsn.event.vo.*;
import com.zzsn.event.xxljob.entity.XxlJobInfo;
import com.zzsn.event.xxljob.service.IXxlJobInfoService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
......@@ -56,11 +60,6 @@ public class EventManageController {
private ISubjectInfoSourceMapService subjectInfoSourceMapService;
@Autowired
private ICustomerDataPermissionMapService customerDataPermissionMapService;
@Autowired
private ISysUserDataPermissionService sysUserDataPermissionService;
@Autowired
private IKeyWordsService keyWordsService;
@Autowired
......@@ -70,6 +69,8 @@ public class EventManageController {
@Value(("${serviceProject.url:}"))
private String SERVICE_PROJECT_URL;
@Value("${kafka.topic.event.run:}")
private String EVENT_MODEL_KAFKA_CHANNEL;
/**
* 分页列表-后台管理
......@@ -130,22 +131,166 @@ public class EventManageController {
}
private void updateStatus(String eventId, Integer status) {
eventService.update(Wrappers.<Event>lambdaUpdate().eq(Event::getId, eventId).set(Event::getStatus, status));
/**
* 1.2 添加
*
* @param eventParam
* @return
*/
@PostMapping(value = "/add")
public Result<?> add(@RequestBody AddEventParam eventParam) {
KeywordsVO keywordsVO = eventParam.getKeywordsVO();
if (keywordsVO != null) {
UserVo currentUser = UserUtil.getLoginUser();
eventParam.setCreateBy(currentUser.getUsername());
Event event = eventService.saveMain(eventParam);
//新增关键词
KeyWords keyWords = keyWordsService.saveKeyword(event, keywordsVO.getKeyword(), keywordsVO.getExclusionWord());
CompletableFuture.runAsync(() -> {
//插入xxlJob
iXxlJobInfoService.eventInsert(event);
//关键词入缓存
KeyWordsDTO redisKeywordDTO = new KeyWordsDTO();
BeanUtils.copyProperties(keyWords, redisKeywordDTO);
redisKeywordDTO.setStartTime(event.getStartTime());
redisKeywordDTO.setEndTime(event.getEndTime());
redisKeywordDTO.setSearchEngines(new ArrayList<>(Constants.DEFAULT_SEARCH_ENGINE.values()));
redisUtil.set(Constants.KEY_WORDS_TO_REDIS_PREFIX + keyWords.getWordsCode(), redisKeywordDTO);
//插入xxljob
iXxlJobInfoService.keyWordsInsert(redisKeywordDTO);
//为了立即响应,关键词新增时放入消息队列
kafkaTemplate.send(Constants.KEY_WORDS_COLLECT_TOPIC, JSON.toJSONString(redisKeywordDTO));
kafkaTemplate.send(EVENT_MODEL_KAFKA_CHANNEL, event.getEventCode());
});
return Result.OK();
} else {
return Result.FAIL(500, "关键词不能为空");
}
}
/**
* 1.3 编辑
*
* @param addEventParam
* @return
*/
@PostMapping(value = "/edit")
public Result<?> edit(@RequestBody AddEventParam addEventParam) {
//关键词
KeywordsVO keywordsVO = addEventParam.getKeywordsVO();
if (keywordsVO != null) {
UserVo currentUser = UserUtil.getLoginUser();
addEventParam.setUpdateTime(new Date());
addEventParam.setUpdateBy(currentUser.getUsername());
eventService.updateMain(addEventParam);
keyWordsService.update(Wrappers.<KeyWords>lambdaUpdate().eq(KeyWords::getId, keywordsVO.getId())
.set(KeyWords::getKeyWord, keywordsVO.getKeyword())
.set(KeyWords::getExclusionWord, keywordsVO.getExclusionWord()));
CompletableFuture.runAsync(() -> {
KeyWordsDTO redisKeywordDTO = new KeyWordsDTO();
BeanUtils.copyProperties(keywordsVO, redisKeywordDTO);
redisKeywordDTO.setKeyWord(keywordsVO.getKeyword());
redisKeywordDTO.setStartTime(addEventParam.getStartTime());
redisKeywordDTO.setEndTime(addEventParam.getEndTime());
redisKeywordDTO.setSearchEngines(new ArrayList<>(Constants.DEFAULT_SEARCH_ENGINE.values()));
redisUtil.set(Constants.KEY_WORDS_TO_REDIS_PREFIX + keywordsVO.getWordsCode(), redisKeywordDTO);
//为了立即响应,关键词编辑时放入消息队列
kafkaTemplate.send(Constants.KEY_WORDS_COLLECT_TOPIC, JSON.toJSONString(redisKeywordDTO));
});
return Result.OK();
} else {
return Result.FAIL(500, "关键词不能为空");
}
}
/**
* 启用
*
* @param eventId 事件id
* @author lkg
* @date 2024/9/12
*/
@GetMapping("/enable")
public Result<?> enable(@RequestParam String eventId) {
eventService.updateStatus(eventId, 1);
return Result.OK();
}
/**
* 启用
*
* @param eventId 事件id
* @author lkg
* @date 2024/9/12
*/
@GetMapping("/disable")
public Result<?> disable(@RequestParam String eventId) {
eventService.updateStatus(eventId, 0);
return Result.OK();
}
/**
* 通过id删除
*
* @param id 事件id
* @return
*/
@GetMapping(value = "/delete")
public Result<?> delete(@RequestParam(name = "id") String id) {
eventService.deleteMain(id);
CompletableFuture.runAsync(() -> {
EventVO eventVO = eventService.queryInfo(id);
iXxlJobInfoService.deleteByInfosourceCode(eventVO.getEventCode());
//删除关键词
KeywordsVO keywordsVO = eventVO.getKeywordsVO();
if (keywordsVO != null) {
String wordsCode = keywordsVO.getWordsCode();
keyWordsService.remove(Wrappers.<KeyWords>lambdaQuery().eq(KeyWords::getWordsCode, wordsCode));
redisUtil.del(Constants.KEY_WORDS_TO_REDIS_PREFIX + wordsCode);
iXxlJobInfoService.deleteByInfosourceCode(wordsCode);
//todo 是否删除对应的资讯数据
}
});
return Result.OK();
}
/**
* 批量删除
*
* @param ids 事件id,多个用逗号隔开
* @author lkg
* @date 2024/12/17
*/
@GetMapping(value = "/deleteBatch")
public Result<?> deleteBatch(@RequestParam(name = "ids") String ids) {
String[] idArr = ids.split(",");
//删除
for (String id : idArr) {
eventService.deleteMain(id);
}
//异步删除关联关系
CompletableFuture.runAsync(() -> {
Event event = eventService.getById(eventId);
iXxlJobInfoService.update(Wrappers.<XxlJobInfo>lambdaUpdate().eq(XxlJobInfo::getInfoSourceCode, event.getEventCode())
.set(XxlJobInfo::getTriggerStatus, status));
//关键词
KeywordsVO keywordsVO = keyWordsService.keywordInfoByEventId(eventId);
iXxlJobInfoService.update(Wrappers.<XxlJobInfo>lambdaUpdate().eq(XxlJobInfo::getInfoSourceCode, keywordsVO.getWordsCode())
.set(XxlJobInfo::getTriggerStatus, status));
if (1==status){
kafkaTemplate.send(Constants.EVENT_SUBJECT_MODEL, event.getEventCode());
for (String id : idArr) {
EventVO eventVO = eventService.queryInfo(id);
iXxlJobInfoService.deleteByInfosourceCode(eventVO.getEventCode());
//删除关键词
KeywordsVO keywordsVO = eventVO.getKeywordsVO();
if (keywordsVO != null) {
String wordsCode = keywordsVO.getWordsCode();
keyWordsService.remove(Wrappers.<KeyWords>lambdaQuery().eq(KeyWords::getWordsCode, wordsCode));
redisUtil.del(Constants.KEY_WORDS_TO_REDIS_PREFIX + wordsCode);
iXxlJobInfoService.deleteByInfosourceCode(wordsCode);
}
}
});
//todo 是否删除对应的资讯数据
return Result.OK();
}
/**
* 1.7 发布
*
......@@ -162,26 +307,55 @@ public class EventManageController {
}
/**
* 项目列表
*
* @author lkg
* @date 2024/4/29
*/
@GetMapping("/projectList")
public Result<?> projectList() {
//获取当前登录用户
UserVo currentUser = UserUtil.getLoginUser();
Integer category = currentUser.getCategory();
String userId = null;
String customerId = null;
/*if (category.equals(Constants.COMMON_USER)) {
userId = currentUser.getUserId();
} else if (category.equals(Constants.ADMIN_USER)) {
customerId = currentUser.getCustomerId();
}*/
List<Node> projectList = eventService.projectList(userId, customerId);
return Result.OK(projectList);
* @param event 条件封装
* @描述 查询事件列表
* @作者 hejinyu
* @创建时间 2024/12/26
**/
@PostMapping("/listAll")
public Result<?> listAll(@RequestBody Event event) {
LambdaQueryWrapper<Event> queryWrapper = Wrappers.lambdaQuery();
setListAllQueryWrapper(event, queryWrapper);
List<Event> list = eventService.list(queryWrapper);
return Result.OK(list);
}
/**
* @param event 条件封装
* @param pageNo
* @param pageSize
* @描述 查询事件列表(分页,产业链关联事件时查询用)
* @作者 hejinyu
* @创建时间 2024/12/26
**/
@PostMapping("/pageListAll")
public Result<?> pageListAll(@RequestBody Event event, @RequestParam(name = "pageNo", defaultValue = "1") Integer pageNo,
@RequestParam(name = "pageSize", defaultValue = "10") Integer pageSize) {
LambdaQueryWrapper<Event> queryWrapper = Wrappers.lambdaQuery();
setListAllQueryWrapper(event, queryWrapper);
Page<Event> page = new Page<>(pageNo, pageSize);
IPage<Event> pageList = eventService.page(page, queryWrapper);
return Result.OK(pageList);
}
private void setListAllQueryWrapper(@RequestBody Event event, LambdaQueryWrapper<Event> queryWrapper) {
String eventType = event.getEventType();
String startDate = event.getStartDate();
String endDate = event.getEndDate();
String keyword = event.getKeyword();
queryWrapper.eq(StringUtils.isNotEmpty(eventType), Event::getEventType, eventType);
if (StringUtils.isNotEmpty(startDate)) {
queryWrapper.ge(Event::getCreateTime, startDate);
}
if (StringUtils.isNotEmpty(endDate)) {
queryWrapper.le(Event::getCreateTime, endDate);
}
if (StringUtils.isNotEmpty(keyword)) {
queryWrapper.and(q -> {
q.eq(Event::getEventType, keyword).or().like(Event::getEventName, keyword).or().eq(Event::getEventDescribe, keyword);
});
}
}
/**
......@@ -500,213 +674,4 @@ public class EventManageController {
Page<EventNewPlatVO> pageList = eventService.newPlatPageList(subjectCondition, pageNo, pageSize);
return Result.OK(pageList);
}
/**
* 1.2 添加
*
* @param eventParam
* @return
*/
@PostMapping(value = "/add")
public Result<?> add(@RequestBody AddEventParam eventParam) {
KeywordsVO keywordsVO = eventParam.getKeywordsVO();
if (keywordsVO != null) {
UserVo currentUser = UserUtil.getLoginUser();
eventParam.setCreateBy(currentUser.getUsername());
Event event = eventService.saveMain(eventParam);
//新增关键词
KeyWords keyWords = keyWordsService.saveKeyword(event, keywordsVO.getKeyword(), keywordsVO.getExclusionWord());
CompletableFuture.runAsync(() -> {
//插入xxlJob
iXxlJobInfoService.eventInsert(event);
//关键词入缓存
KeyWordsDTO redisKeywordDTO = new KeyWordsDTO();
BeanUtils.copyProperties(keyWords, redisKeywordDTO);
redisKeywordDTO.setStartTime(event.getStartTime());
redisKeywordDTO.setEndTime(event.getEndTime());
redisKeywordDTO.setSearchEngines(new ArrayList<>(Constants.DEFAULT_SEARCH_ENGINE.values()));
redisUtil.set(Constants.KEY_WORDS_TO_REDIS_PREFIX + keyWords.getWordsCode(), redisKeywordDTO);
//插入xxljob
iXxlJobInfoService.keyWordsInsert(redisKeywordDTO);
//为了立即响应,关键词新增时放入消息队列
kafkaTemplate.send(Constants.KEY_WORDS_COLLECT_TOPIC, JSON.toJSONString(redisKeywordDTO));
kafkaTemplate.send(Constants.EVENT_SUBJECT_MODEL, event.getEventCode());
});
return Result.OK();
} else {
return Result.FAIL(500, "关键词不能为空");
}
}
/**
* 1.3 编辑
*
* @param addEventParam
* @return
*/
@PostMapping(value = "/edit")
public Result<?> edit(@RequestBody AddEventParam addEventParam) {
//关键词
KeywordsVO keywordsVO = addEventParam.getKeywordsVO();
if (keywordsVO != null) {
UserVo currentUser = UserUtil.getLoginUser();
addEventParam.setUpdateTime(new Date());
addEventParam.setUpdateBy(currentUser.getUsername());
eventService.updateMain(addEventParam);
keyWordsService.update(Wrappers.<KeyWords>lambdaUpdate().eq(KeyWords::getId, keywordsVO.getId())
.set(KeyWords::getKeyWord, keywordsVO.getKeyword())
.set(KeyWords::getExclusionWord, keywordsVO.getExclusionWord()));
CompletableFuture.runAsync(() -> {
KeyWordsDTO redisKeywordDTO = new KeyWordsDTO();
BeanUtils.copyProperties(keywordsVO, redisKeywordDTO);
redisKeywordDTO.setKeyWord(keywordsVO.getKeyword());
redisKeywordDTO.setStartTime(addEventParam.getStartTime());
redisKeywordDTO.setEndTime(addEventParam.getEndTime());
redisKeywordDTO.setSearchEngines(new ArrayList<>(Constants.DEFAULT_SEARCH_ENGINE.values()));
redisUtil.set(Constants.KEY_WORDS_TO_REDIS_PREFIX + keywordsVO.getWordsCode(), redisKeywordDTO);
//为了立即响应,关键词编辑时放入消息队列
kafkaTemplate.send(Constants.KEY_WORDS_COLLECT_TOPIC, JSON.toJSONString(redisKeywordDTO));
});
return Result.OK();
} else {
return Result.FAIL(500, "关键词不能为空");
}
}
/**
* 启用
*
* @param eventId 事件id
* @author lkg
* @date 2024/9/12
*/
@GetMapping("/enable")
public Result<?> enable(@RequestParam String eventId) {
updateStatus(eventId, 1);
return Result.OK();
}
/**
* 启用
*
* @param eventId 事件id
* @author lkg
* @date 2024/9/12
*/
@GetMapping("/disable")
public Result<?> disable(@RequestParam String eventId) {
updateStatus(eventId, 0);
return Result.OK();
}
/**
* 通过id删除
*
* @param id 事件id
* @return
*/
@GetMapping(value = "/delete")
public Result<?> delete(@RequestParam(name = "id") String id) {
eventService.deleteMain(id);
CompletableFuture.runAsync(() -> {
EventVO eventVO = eventService.queryInfo(id);
iXxlJobInfoService.deleteByInfosourceCode(eventVO.getEventCode());
//删除关键词
KeywordsVO keywordsVO = eventVO.getKeywordsVO();
if (keywordsVO != null) {
String wordsCode = keywordsVO.getWordsCode();
keyWordsService.remove(Wrappers.<KeyWords>lambdaQuery().eq(KeyWords::getWordsCode, wordsCode));
redisUtil.del(Constants.KEY_WORDS_TO_REDIS_PREFIX + wordsCode);
iXxlJobInfoService.deleteByInfosourceCode(wordsCode);
//todo 是否删除对应的资讯数据
}
});
return Result.OK();
}
/**
* 批量删除
*
* @param ids 事件id,多个用逗号隔开
* @author lkg
* @date 2024/12/17
*/
@GetMapping(value = "/deleteBatch")
public Result<?> deleteBatch(@RequestParam(name = "ids") String ids) {
String[] idArr = ids.split(",");
//删除
for (String id : idArr) {
eventService.deleteMain(id);
}
//异步删除关联关系
CompletableFuture.runAsync(() -> {
for (String id : idArr) {
EventVO eventVO = eventService.queryInfo(id);
iXxlJobInfoService.deleteByInfosourceCode(eventVO.getEventCode());
//删除关键词
KeywordsVO keywordsVO = eventVO.getKeywordsVO();
if (keywordsVO != null) {
String wordsCode = keywordsVO.getWordsCode();
keyWordsService.remove(Wrappers.<KeyWords>lambdaQuery().eq(KeyWords::getWordsCode, wordsCode));
redisUtil.del(Constants.KEY_WORDS_TO_REDIS_PREFIX + wordsCode);
iXxlJobInfoService.deleteByInfosourceCode(wordsCode);
}
}
});
//todo 是否删除对应的资讯数据
return Result.OK();
}
/**
* @param event 条件封装
* @描述 查询事件列表
* @作者 hejinyu
* @创建时间 2024/12/26
**/
@PostMapping("/listAll")
public Result<?> listAll(@RequestBody Event event) {
LambdaQueryWrapper<Event> queryWrapper = Wrappers.lambdaQuery();
setListAllQueryWrapper(event, queryWrapper);
List<Event> list = eventService.list(queryWrapper);
return Result.OK(list);
}
/**
* @param event 条件封装
* @param pageNo
* @param pageSize
* @描述 查询事件列表(分页,产业链关联事件时查询用)
* @作者 hejinyu
* @创建时间 2024/12/26
**/
@PostMapping("/pageListAll")
public Result<?> pageListAll(@RequestBody Event event, @RequestParam(name = "pageNo", defaultValue = "1") Integer pageNo,
@RequestParam(name = "pageSize", defaultValue = "10") Integer pageSize) {
LambdaQueryWrapper<Event> queryWrapper = Wrappers.lambdaQuery();
setListAllQueryWrapper(event, queryWrapper);
Page<Event> page = new Page<>(pageNo, pageSize);
IPage<Event> pageList = eventService.page(page, queryWrapper);
return Result.OK(pageList);
}
private void setListAllQueryWrapper(@RequestBody Event event, LambdaQueryWrapper<Event> queryWrapper) {
String eventType = event.getEventType();
String startDate = event.getStartDate();
String endDate = event.getEndDate();
String keyword = event.getKeyword();
queryWrapper.eq(StringUtils.isNotEmpty(eventType), Event::getEventType, eventType);
if (StringUtils.isNotEmpty(startDate)) {
queryWrapper.ge(Event::getCreateTime, startDate);
}
if (StringUtils.isNotEmpty(endDate)) {
queryWrapper.le(Event::getCreateTime, endDate);
}
if (StringUtils.isNotEmpty(keyword)) {
queryWrapper.and(q -> {
q.eq(Event::getEventType, keyword).or().like(Event::getEventName, keyword).or().eq(Event::getEventDescribe, keyword);
});
}
}
}
......@@ -399,6 +399,13 @@ public class InformationController {
return Result.OK(recommendList);
}
/**
* 根据条件移除专题下数据-研究中心
*
* @param params 参数
* @author lkg
* @date 2025/2/20
*/
@PostMapping("/removeByCondition")
public Result<?> removeByCondition(@RequestBody JSONObject params){
String subjectId = params.getString("subjectId");
......@@ -407,9 +414,32 @@ public class InformationController {
}
JSONArray themeIds = params.getJSONArray("themeIds");
if (CollectionUtils.isEmpty(themeIds)) {
return Result.FAIL("主题id不能为空");
return Result.FAIL("移除的主题id集合不能为空");
}
informationService.removeByCondition(subjectId, JSONArray.parseArray(themeIds.toJSONString(), String.class));
return null;
return Result.OK();
}
/**
* 根据条件移除专题下数据-研究中心
*
* @param params 参数
* @author lkg
* @date 2025/2/20
*/
@PostMapping("/supplyByCondition")
public Result<?> supplyByCondition(@RequestBody JSONObject params){
String subjectId = params.getString("subjectId");
if (StringUtils.isEmpty(subjectId)) {
return Result.FAIL("专题id不能为空");
}
JSONArray themeIds = params.getJSONArray("themeIds");
if (CollectionUtils.isEmpty(themeIds)) {
return Result.FAIL("新增的主题id集合不能为空");
}
//todo 新增 redis塞缓存,对接数据处理
//2025-01-21:123,123,123
return Result.OK();
}
}
......@@ -8,8 +8,6 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.zzsn.event.constant.Constants;
import com.zzsn.event.constant.Result;
import com.zzsn.event.entity.Event;
import com.zzsn.event.es.EsService;
import com.zzsn.event.service.CommonService;
import com.zzsn.event.service.IEventService;
import com.zzsn.event.service.IInfoSourceService;
import com.zzsn.event.service.ISubjectTypeService;
......@@ -28,7 +26,6 @@ import org.springframework.web.bind.annotation.*;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
......@@ -53,8 +50,6 @@ public class PlatEventManageController {
@Autowired
private IInfoSourceService infoSourceService;
@Autowired
private EsService esService;
@Autowired
private EsOpUtil esOpUtil;
/**
......@@ -65,7 +60,17 @@ public class PlatEventManageController {
*/
@GetMapping("/leftTypeTree")
public Result<?> leftTree() {
List<SubjectTreeVO> tree = subjectTypeService.eventAndTypeTree();
//获取当前登录用户
UserVo currentUser = UserUtil.getLoginUser();
Integer category = currentUser.getCategory();
String userId = null;
String customerId = null;
if (category.equals(Constants.COMMON_USER)) {
userId = currentUser.getUserId();
} else if (category.equals(Constants.ADMIN_USER)) {
customerId = currentUser.getCustomerId();
}
List<SubjectTreeVO> tree = subjectTypeService.eventAndTypeTree(userId,customerId);
return Result.OK(tree);
}
......@@ -233,134 +238,6 @@ public class PlatEventManageController {
return Result.OK();
}
/**
* 专题关联信息源采集量统计-导出excel
*
* @param subjectId 专题id/专题分类id
* @param startDate 开始时间
* @param endDate 结束时间
* @author lkg
*/
@GetMapping("/exportStatisticsExcel")
public void exportStatisticsExcel(@RequestParam(name = "subjectId") String subjectId,
@RequestParam(name = "startDate") String startDate,
@RequestParam(name = "endDate") String endDate,
HttpServletResponse response) {
List<String> subjectIdList = new ArrayList<>();
try {
UserVo currentUser = UserUtil.getLoginUser();
String userId = null;
if (currentUser.getCategory().equals(Constants.COMMON_USER)) {
userId = currentUser.getUserId();
}
//查询类别id的所有明细id
if (StringUtils.isNotEmpty(subjectId) && !"0".equals(subjectId)) {
List<String> typeIds = subjectTypeService.belowIdList(subjectId, null);
subjectIdList = eventService.selectSubjectByTypeIds(userId, typeIds);
}
subjectIdList.add(subjectId);
List<List<String>> statistics = eventService.subjectStatistics(subjectIdList, startDate, endDate);
List<String> dateList = DateUtil.betweenDate(startDate, endDate);
List<String> headers = Stream.of("信息源id", "网站名称", "栏目名称", "栏目地址", "时间段内采集总量").collect(Collectors.toList());
headers.addAll(dateList);
XSSFWorkbook workbook = new XSSFWorkbook();
ExcelExportUtil.exportExcelData(workbook, 0, headers, statistics, "信息源采集信息汇总");
String name = "statisticsExcel.xls";
setResponseHeader(response, name);
ServletOutputStream outputStream = response.getOutputStream();
workbook.write(outputStream);
outputStream.flush();
outputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 事件对应采集库的资讯分页列表
*
* @param eventDataCondition 筛选条件
* @param sourceId 信息源id
* @param pageNo 当前页
* @param pageSize 返回条数
* @param column 排序字段
* @param order 排序方式
* @param isAll 是否全部
* @param video 是否视频
* @param crawler 采集器
* @param isSubject 是否专题
* @param source 来源(类别/客户)
* @author lkg
* @date 2024/5/6
*/
/* @GetMapping("/collectPageList")
public Result<?> collectPageList(InfoDataSearchCondition eventDataCondition,
@RequestParam(name = "sourceId", defaultValue = "") String sourceId,
@RequestParam(name = "pageNo", defaultValue = "1") Integer pageNo,
@RequestParam(name = "pageSize", defaultValue = "10") Integer pageSize,
@RequestParam(name = "column", defaultValue = "common") String column,
@RequestParam(name = "order", defaultValue = "desc") String order,
@RequestParam(name = "isAll", defaultValue = "0") String isAll,
@RequestParam(name = "video", defaultValue = "") String video,
@RequestParam(name = "crawler", defaultValue = "") String crawler,
@RequestParam(name = "isSubject", defaultValue = "0") String isSubject,
@RequestParam(name = "source", defaultValue = "type") String source) {
UserVo currentUser = UserUtil.getLoginUser();
String userId = null;
String tenantId = currentUser.getCustomerId();
if (currentUser.getCategory().equals(Constants.COMMON_USER)) {
userId = currentUser.getUserId();
}
IPage<EventDataVO> pageList = esService.collectPageList(eventDataCondition, sourceId, pageNo, pageSize, column, order, isAll, video, crawler, userId, tenantId, isSubject, source);
return Result.OK(pageList);
}*/
/**
* 事件对应专题库的资讯分页列表
*
* @param subjectInfoVo 筛选条件
* @param userId 用户id
* @param video 是否视频
* @param pageNo 当前页
* @param isCustomer 是否客户
* @param pageSize 返回条数
* @param column 排序字段
* @param order 排序方式
* @param crawler 采集器
* @param isSubject 是否换题
* @param labelIds 关联的标签id
* @param labelTypeIds 企业标签id
* @param relationIds 企业信用代码
* @param sourceId 信息源id
* @author lkg
* @date 2024/5/6
*/
/*@GetMapping("/subjectPageList")
public Result<?> subjectPageList(InfoDataSearchCondition subjectInfoVo,
@RequestParam(name = "userId", defaultValue = "") String userId,
@RequestParam(name = "video", defaultValue = "") String video,
@RequestParam(name = "pageNo", defaultValue = "1") Integer pageNo,
@RequestParam(name = "isCustomer", defaultValue = "0") Integer isCustomer,
@RequestParam(name = "pageSize", defaultValue = "10") Integer pageSize,
@RequestParam(name = "column", defaultValue = "common") String column,
@RequestParam(name = "order", defaultValue = "desc") String order,
@RequestParam(name = "crawler", defaultValue = "") String crawler,
@RequestParam(name = "isSubject", defaultValue = "1") String isSubject,
@RequestParam(name = "labelIds", required = false) String labelIds,
@RequestParam(name = "labelTypeIds", required = false) String labelTypeIds,
@RequestParam(name = "relationIds", required = false) String relationIds,
@RequestParam(name = "sourceId", required = false) String sourceId) throws Exception {
List<String> socialCreditCodeList = new ArrayList<>();
if (StringUtils.isNotEmpty(relationIds)) {
socialCreditCodeList = Arrays.asList(relationIds.split(","));
} else if (StringUtils.isNotEmpty(labelTypeIds)) {
socialCreditCodeList = eventService.codesByLabels(Arrays.asList(labelTypeIds.split(",")));
}
IPage<DisplayInfo> pageList = esService.subjectPageList(userId, subjectInfoVo, video, pageNo, pageSize, column, order, crawler, isSubject, labelIds, socialCreditCodeList, sourceId, isCustomer);
return Result.OK(pageList);
}*/
/**
* 专题绑定的关键词组-分页列表
......@@ -443,22 +320,6 @@ public class PlatEventManageController {
}
}
private void setResponseHeader(HttpServletResponse response, String name) {
try {
try {
name = new String(name.getBytes(), "ISO8859-1");
} catch (Exception e) {
e.printStackTrace();
}
response.setContentType("application/octet-stream;charset=ISO8859-1");
response.setHeader("Content-Disposition", "attachment;filename=" + name);
response.setHeader("Pargam", "no-cache");
response.setHeader("Cache-Control", "no-cache");
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @param event 条件封装
......
......@@ -229,6 +229,32 @@ public class EventDataController {
}
/**
* 事件配置校验
*
* @param addEventParam 参数
* @author lkg
* @date 2025/2/5
*/
@PostMapping("/configVerification")
public Result<?> configVerification(@RequestBody AddEventParam addEventParam){
Boolean verification = eventService.configVerification(addEventParam);
return Result.OK(verification);
}
/**
* 清空专题数据
*
* @param eventId 专题id
* @author lkg
* @date 2025/2/6
*/
@GetMapping("/clearData")
public Result<?> clearData(@RequestParam String eventId){
eventService.clearSubjectData(eventId);
return Result.OK("正在清空数据");
}
/**
* 通过id删除
*
* @param id 事件id
......
......@@ -137,6 +137,9 @@ public class EventHomeController {
try{
String res = HttpUtil.get(checkToken + token);
JSONObject data = JSONObject.parseObject(res).getJSONObject("data");
if(data == null){
return Result.FAIL("token失效");
}
JSONArray powerCodeSet = data.getJSONArray("powerCodeSet");
return Result.OK(powerCodeSet.contains(key));
}catch (Exception e){
......
package com.zzsn.event.controller.yjzx;
import com.alibaba.fastjson2.JSON;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.zzsn.clb.common.model.task.dto.titr.KeyWordsDTO;
import com.zzsn.event.constant.Constants;
import com.zzsn.event.constant.Result;
import com.zzsn.event.entity.Event;
import com.zzsn.event.entity.KeyWords;
import com.zzsn.event.service.IEventService;
import com.zzsn.event.service.IKeyWordsService;
import com.zzsn.event.service.ISubjectInfoSourceMapService;
import com.zzsn.event.service.LabelEntityService;
import com.zzsn.event.util.RedisUtil;
import com.zzsn.event.util.tree.Node;
import com.zzsn.event.util.user.UserUtil;
import com.zzsn.event.util.user.UserVo;
import com.zzsn.event.vo.*;
import com.zzsn.event.xxljob.entity.XxlJobInfo;
import com.zzsn.event.xxljob.service.IXxlJobInfoService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.CompletableFuture;
/**
* 事件后台管理
*
* @author lkg
* @date 2024/4/8
*/
@Slf4j
@Api(tags = "事件后台管理")
@RestController
@RequestMapping("/plat/manage")
public class NewEventManageController {
@Autowired
private IEventService eventService;
@Autowired
private LabelEntityService labelEntityService;
@Autowired
private IXxlJobInfoService iXxlJobInfoService;
@Autowired
private IKeyWordsService keyWordsService;
@Autowired
private RedisUtil redisUtil;
@Resource
private KafkaTemplate<String, String> kafkaTemplate;
@Value("${kafka.topic.event.run:}")
private String EVENT_MODEL_KAFKA_CHANNEL;
/**
* 分页列表-后台管理
*
* @param eventName 事件名称
* @param eventType 事件分类id
* @param facePublic 是否公开(0-未公开;1-已公开)
* @param publishStatus 发布状态(0-未发布;1-已发布)
* @param startTime 开始时间
* @param endTime 结束时间
* @param order 排序字段
* @param orderType 排序方式
* @param pageNo 偏移量
* @param pageSize 返回条数
* @author lkg
* @date 2024/4/8
*/
@ApiOperation(value = "事件-分页列表查询", notes = "事件-分页列表查询")
@GetMapping(value = "/pageList")
public Result<?> pageList(@RequestParam(name = "eventName", required = false) String eventName,
@RequestParam(name = "eventType", required = false) Integer eventType,
@RequestParam(name = "facePublic", required = false) Integer facePublic,
@RequestParam(name = "publishStatus", required = false) Integer publishStatus,
@RequestParam(name = "startTime", required = false) String startTime,
@RequestParam(name = "endTime", required = false) String endTime,
@RequestParam(name = "order", required = false) String order,
@RequestParam(name = "orderType", defaultValue = "asc") String orderType,
@RequestParam(name = "pageNo", defaultValue = "1") Integer pageNo,
@RequestParam(name = "pageSize", defaultValue = "10") Integer pageSize) {
IPage<EventManageVO> pageList = eventService.pageList(eventName, eventType, facePublic, publishStatus, startTime, endTime, order, orderType, pageNo, pageSize);
return Result.OK(pageList);
}
/**
* 地域信息-树型结构
*
* @param type 类别(1-国际;2-国内)
* @author lkg
* @date 2024/4/10
*/
@GetMapping("/regionTree")
public Result<?> regionTree(@RequestParam Integer type) {
List<LabelTypeVO> nodes = labelEntityService.regionTree(type);
return Result.OK(nodes);
}
/**
* 通过id查询
*
* @param id 事件id
* @return
*/
/*@GetMapping(value = "/queryById")
public Result<?> queryById(@RequestParam(name = "id") String id) {
EventVO eventVO = eventService.queryInfo(id);
return Result.OK(eventVO);
}*/
/**
* 1.2 添加
*
* @param eventParam
* @return
*/
@ApiOperation(value = "事件-添加", notes = "事件-添加")
@PostMapping(value = "/add")
public Result<?> add(@RequestBody AddEventParam eventParam) {
KeywordsVO keywordsVO = eventParam.getKeywordsVO();
if (keywordsVO != null) {
UserVo currentUser = UserUtil.getLoginUser();
eventParam.setCreateBy(currentUser.getUsername());
Event event = eventService.saveMain(eventParam);
//新增关键词
KeyWords keyWords = keyWordsService.saveKeyword(event, keywordsVO.getKeyword(), keywordsVO.getExclusionWord());
CompletableFuture.runAsync(() -> {
//插入xxlJob
iXxlJobInfoService.eventInsert(event);
//-------------关键词采集相关,需优化,采集那边说目前搜索引擎效果也不好,先这样。
//关键词入缓存
KeyWordsDTO redisKeywordDTO = new KeyWordsDTO();
BeanUtils.copyProperties(keyWords, redisKeywordDTO);
redisKeywordDTO.setStartTime(event.getStartTime());
redisKeywordDTO.setEndTime(event.getEndTime());
redisKeywordDTO.setSearchEngines(new ArrayList<>(Constants.DEFAULT_SEARCH_ENGINE.values()));
redisUtil.set(Constants.KEY_WORDS_TO_REDIS_PREFIX + keyWords.getWordsCode(), redisKeywordDTO);
//插入xxljob
iXxlJobInfoService.keyWordsInsert(redisKeywordDTO);
//为了立即响应,关键词新增时放入消息队列
kafkaTemplate.send(Constants.KEY_WORDS_COLLECT_TOPIC, JSON.toJSONString(redisKeywordDTO));
//----------------------------------------------------------------------------------------------
kafkaTemplate.send(EVENT_MODEL_KAFKA_CHANNEL, event.getEventCode());
});
return Result.OK();
} else {
return Result.FAIL(500, "关键词不能为空");
}
}
/**
* 事件配置校验
*
* @param addEventParam 参数
* @author lkg
* @date 2025/2/5
*/
@PostMapping("/configVerification")
public Result<?> configVerification(@RequestBody AddEventParam addEventParam){
Boolean verification = eventService.configVerification(addEventParam);
return Result.OK(verification);
}
/**
* 1.3 编辑
*
* @param addEventParam
* @return
*/
@ApiOperation(value = "事件-编辑", notes = "事件-编辑")
@PostMapping(value = "/edit")
public Result<?> edit(@RequestBody AddEventParam addEventParam) {
//关键词
KeywordsVO keywordsVO = addEventParam.getKeywordsVO();
if (keywordsVO != null) {
UserVo currentUser = UserUtil.getLoginUser();
addEventParam.setUpdateTime(new Date());
addEventParam.setUpdateBy(currentUser.getUsername());
eventService.updateMain(addEventParam);
keyWordsService.update(Wrappers.<KeyWords>lambdaUpdate().eq(KeyWords::getId, keywordsVO.getId())
.set(KeyWords::getKeyWord, keywordsVO.getKeyword())
.set(KeyWords::getExclusionWord, keywordsVO.getExclusionWord()));
CompletableFuture.runAsync(() -> {
KeyWordsDTO redisKeywordDTO = new KeyWordsDTO();
BeanUtils.copyProperties(keywordsVO, redisKeywordDTO);
redisKeywordDTO.setKeyWord(keywordsVO.getKeyword());
redisKeywordDTO.setStartTime(addEventParam.getStartTime());
redisKeywordDTO.setEndTime(addEventParam.getEndTime());
redisKeywordDTO.setSearchEngines(new ArrayList<>(Constants.DEFAULT_SEARCH_ENGINE.values()));
redisUtil.set(Constants.KEY_WORDS_TO_REDIS_PREFIX + keywordsVO.getWordsCode(), redisKeywordDTO);
//为了立即响应,关键词编辑时放入消息队列
kafkaTemplate.send(Constants.KEY_WORDS_COLLECT_TOPIC, JSON.toJSONString(redisKeywordDTO));
});
return Result.OK();
} else {
return Result.FAIL(500, "关键词不能为空");
}
}
/**
* 启用
*
* @param eventId 事件id
* @author lkg
* @date 2024/9/12
*/
@GetMapping("/enable")
public Result<?> enable(@RequestParam String eventId) {
eventService.updateStatus(eventId, 1);
return Result.OK();
}
/**
* 禁用
*
* @param eventId 事件id
* @author lkg
* @date 2024/9/12
*/
@GetMapping("/disable")
public Result<?> disable(@RequestParam String eventId) {
eventService.updateStatus(eventId, 0);
return Result.OK();
}
/**
* 清空专题数据
*
* @param subjectId 专题id
* @author lkg
* @date 2025/2/6
*/
@GetMapping("/clearData")
public Result<?> clearData(String subjectId){
eventService.clearSubjectData(subjectId);
return Result.OK("正在清空数据");
}
/**
* 通过id删除
*
* @param id 事件id
* @return
*/
@ApiOperation(value = "事件-通过id删除", notes = "事件-通过id删除")
@GetMapping(value = "/delete")
public Result<?> delete(@RequestParam(name = "id") String id) {
EventVO eventVO = eventService.queryInfo(id);
eventService.deleteMain(id);
CompletableFuture.runAsync(() -> {
iXxlJobInfoService.deleteByInfosourceCode(eventVO.getEventCode());
//删除关键词
KeywordsVO keywordsVO = eventVO.getKeywordsVO();
if (keywordsVO != null) {
String wordsCode = keywordsVO.getWordsCode();
keyWordsService.remove(Wrappers.<KeyWords>lambdaQuery().eq(KeyWords::getWordsCode, wordsCode));
redisUtil.del(Constants.KEY_WORDS_TO_REDIS_PREFIX + wordsCode);
iXxlJobInfoService.deleteByInfosourceCode(wordsCode);
}
});
return Result.OK();
}
/**
* 1.5 批量删除
*
* @param ids 事件id集合
* @return
*/
@ApiOperation(value = "事件-批量删除", notes = "事件-批量删除")
@GetMapping(value = "/deleteBatch")
public Result<?> deleteBatch(@RequestParam(name = "ids") String ids) {
List<CompletableFuture<Void>> asyncList = new ArrayList<>();
for (String id : ids.split(",")) {
asyncList.add(CompletableFuture.runAsync(() -> {
EventVO eventVO = eventService.queryInfo(id);
eventService.deleteMain(id);
iXxlJobInfoService.deleteByInfosourceCode(eventVO.getEventCode());
//删除关键词
KeywordsVO keywordsVO = eventVO.getKeywordsVO();
if (keywordsVO != null) {
String wordsCode = keywordsVO.getWordsCode();
keyWordsService.remove(Wrappers.<KeyWords>lambdaQuery().eq(KeyWords::getWordsCode, wordsCode));
redisUtil.del(Constants.KEY_WORDS_TO_REDIS_PREFIX + wordsCode);
iXxlJobInfoService.deleteByInfosourceCode(wordsCode);
}
}));
}
//等待全部执行完
CompletableFuture.allOf(asyncList.toArray(new CompletableFuture[0])).join();
return Result.OK();
}
/**
* 1.7 发布
*
* @return
*/
@PostMapping(value = "/publish")
public Result<?> publish(@RequestBody Event event) {
Event byId = eventService.getById(event.getId());
byId.setUpdateTime(new Date());
byId.setPublishDate(event.getPublishDate());
byId.setPublishStatus(event.getPublishStatus());
eventService.updateById(byId);
return Result.OK();
}
/**
* 模型信息列表
*
* @author lkg
* @date 2024/4/11
*/
@GetMapping("/modelList")
public Result<?> modelList() {
List<ModelVO> modelVOS = eventService.modelList();
return Result.OK(modelVOS);
}
}
......@@ -124,7 +124,7 @@ public class Event {
/**
* 状态(0-禁用;1-启用)
*/
private Integer status = 1;
private Integer status;
/**
* 专题最近一次分析时间
*/
......
......@@ -226,7 +226,7 @@ public class EsService {
*/
public IPage<SubjectDataVo> frontListByPage(List<String> subjectIdList, String searchWord, String position, Integer category,
String labelId, String column, String order, int pageNo, int pageSize) {
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
//设置分页参数
searchSourceBuilder.size(pageSize);
......@@ -340,7 +340,7 @@ public class EsService {
* @date 2024/4/10
*/
public List<SubjectDataVo> queryRecommendList(String subjectId, String id, String title, Integer pageNo, Integer pageSize) {
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
//设置分页参数
int offset = (pageNo - 1) * pageSize;
......@@ -354,7 +354,7 @@ public class EsService {
searchSourceBuilder.fetchSource(fetchFields, null);
//创建查询对象
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
boolQuery.must(QueryBuilders.termQuery("checkStatus", 1));
//boolQuery.must(QueryBuilders.termQuery("checkStatus", 1));
boolQuery.must(QueryBuilders.termQuery("subjectId.keyword", subjectId));
boolQuery.must(QueryBuilders.matchQuery("title", title));
boolQuery.mustNot(QueryBuilders.termQuery("id", id));
......@@ -526,7 +526,7 @@ public class EsService {
*/
public List<SubjectDataVo> exportDataList(List<String> subjectIdList, String searchWord, String position, Integer category,
List<String> articleIdList, String column, String order, Integer type, Integer pageSize) {
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
if (CollectionUtils.isNotEmpty(articleIdList)) {
pageSize = articleIdList.size();
......@@ -729,11 +729,10 @@ public class EsService {
//排序规则
String column = searchCondition.getColumn();
String order = searchCondition.getOrder();
if (searchCondition.getTopSortValid() == 1) {
searchSourceBuilder.sort("topNum", SortOrder.DESC);
}
switch (column) {
case "topNum":
searchSourceBuilder.sort("topNum", SortOrder.DESC);
searchSourceBuilder.sort("publishDate", SortOrder.DESC);
break;
case "score":
if (order.equals("asc")) {
searchSourceBuilder.sort("score", SortOrder.ASC);
......@@ -793,7 +792,7 @@ public class EsService {
List<String> list = new ArrayList<>();
List<String> idList = Arrays.asList(ids.split(","));
List<String> beInStorageList = idList.stream().map(uniqueCode -> subjectId + uniqueCode).collect(Collectors.toList());
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
//创建查询对象
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
boolQuery.must(QueryBuilders.termsQuery("id", beInStorageList));
......@@ -950,7 +949,7 @@ public class EsService {
*/
public List<EventViewVO> viewpointList(String eventId, String relationId) {
List<EventViewVO> dataList = new ArrayList<>();
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
//排序方式 按发布时间升序
searchSourceBuilder.sort("publishDate", SortOrder.DESC);
......@@ -1004,11 +1003,9 @@ public class EsService {
int topNum = 0;
SearchRequest searchRequest;
if (StringUtils.isEmpty(index)) {
String[] indexArr = EsIndexUtil.getIndexLatelyTwoYear(Constants.SUBJECT_INDEX);
searchRequest = new SearchRequest(indexArr);
} else {
searchRequest = new SearchRequest(index);
index = Constants.SUBJECT_INDEX;
}
searchRequest = new SearchRequest(index);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
//设置分页参数
searchSourceBuilder.size(1);
......@@ -1042,7 +1039,7 @@ public class EsService {
//调用判重接口
public boolean duplicationByTitleOrSourceAddress(DisplayInfo displayInfo) {
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
BoolQueryBuilder shouldBoolQuery = QueryBuilders.boolQuery();
......@@ -1209,7 +1206,7 @@ public class EsService {
regionIds.add(node.getId());
}
//根据地区的id从ES查询相关的记录
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.trackTotalHits(true);
searchSourceBuilder.size(0);
......@@ -1298,7 +1295,7 @@ public class EsService {
*/
public List<CountVO> groupByOrigin(String subjectId, String rangField, String startDate, String endDate, int size) {
List<CountVO> data = new ArrayList<>();
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.trackTotalHits(true);
searchSourceBuilder.size(0);
......@@ -1445,7 +1442,7 @@ public class EsService {
*/
public List<CountVO> overView(InfoDataSearchCondition searchCondition) {
List<CountVO> list = new ArrayList<>();
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.trackTotalHits(true);
searchSourceBuilder.size(0);
......@@ -1500,7 +1497,7 @@ public class EsService {
*/
public List<CountVO> groupByDate(InfoDataSearchCondition searchCondition) {
List<CountVO> list = new ArrayList<>();
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.trackTotalHits(true);
searchSourceBuilder.size(0);
......@@ -1588,7 +1585,7 @@ public class EsService {
*/
public List<CountVO> groupByInfoSourceTag(InfoDataSearchCondition searchCondition) {
List<CountVO> list = new ArrayList<>();
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.trackTotalHits(true);
searchSourceBuilder.size(0);
......@@ -1634,7 +1631,7 @@ public class EsService {
*/
public List<CountVO> keywordsCount(String subjectId, List<String> excludeKeywords, Integer size) {
List<CountVO> list = new ArrayList<>();
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.trackTotalHits(true);
searchSourceBuilder.size(0);
......@@ -1681,7 +1678,7 @@ public class EsService {
*/
private List<CountVO> groupByTerm(InfoDataSearchCondition searchCondition, String groupName, String field, boolean sort) {
List<CountVO> list = new ArrayList<>();
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexLatelyTwoYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.trackTotalHits(true);
searchSourceBuilder.size(0);
......
......@@ -122,5 +122,13 @@ public interface SubjectTypeMapper extends BaseMapper<SubjectType> {
*/
Integer typeBindEventCount(@Param("typeIds") List<String> typeIds);
List<SubjectTreeVO> eventAndTypeTree();
/**
* 可用的事件和事件分类列表
*
* @param userId 用户id
* @param customerId 客户id
* @author lkg
* @date 2024/4/29
*/
List<SubjectTreeVO> eventAndTypeTree(@Param("userId") String userId, @Param("customerId") String customerId);
}
......@@ -34,6 +34,8 @@
t.relation_events,
t.event_type,
t.sort_order,
t.yn_collect,
t.first_open_time,
stm.type_id as subjectTypeId,
psm.project_id,
c.type_name,
......
......@@ -158,17 +158,35 @@
select x.* from (
select s.id,s.type_name as name,s.pid,'false' as ynSubject,null as startTime,null as endTime,s.create_time
from subject_type s
<if test="userId !=null and userId != ''">
inner join sys_user_data_permission dp on s.id=dp.permission_id and dp.user_id = #{userId}
</if>
<if test="customerId !=null and customerId != ''">
inner join customer_data_permission_map mp on s.id=mp.permission_id and mp.customer_id = #{customerId}
</if>
where s.category = 2 and s.status = 1
union
select n.id,n.name,m.id as pid,'true' as ynSubject,n.start_time,n.end_time,n.create_time from
(
select s.id,s.type_name as name,s.pid from subject_type s
<if test="userId !=null and userId != ''">
inner join sys_user_data_permission dp on s.id=dp.permission_id and dp.user_id = #{userId}
</if>
<if test="customerId !=null and customerId != ''">
inner join customer_data_permission_map mp on s.id=mp.permission_id and mp.customer_id = #{customerId}
</if>
where s.category = 2 and s.status = 1
) m
inner join subject_type_map stm on m.id = stm.type_id
inner join
(
select s.id,s.event_name as name,s.start_time,s.end_time,s.create_time from event s
<if test="userId !=null and userId != ''">
inner join sys_user_data_permission dp on s.id=dp.permission_id and dp.user_id = #{userId}
</if>
<if test="customerId !=null and customerId != ''">
inner join customer_data_permission_map mp on s.id=mp.permission_id and mp.customer_id = #{customerId}
</if>
where s.status = 1
) n on stm.subject_id = n.id
) x
......
package com.zzsn.event.service;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.zzsn.event.entity.*;
import com.zzsn.event.enums.CodePrefixEnum;
import com.zzsn.event.util.CodeGenerateUtil;
......@@ -56,6 +57,7 @@ public class EventSimpleService {
Event event = new Event();
BeanUtils.copyProperties(addEventVO, event);
event.setStatus(1);
event.setFirstOpenTime(new Date());
Date endTime = event.getEndTime();
if (endTime == null) {
endTime = DateUtil.addDate(new Date(), 15);
......@@ -96,7 +98,8 @@ public class EventSimpleService {
BeanUtils.copyProperties(addEventVO, event);
eventService.updateById(event);
CompletableFuture.runAsync(()->{
//地域关系绑定
//地域绑定关系修改
eventRegionMapService.remove(Wrappers.<EventRegionMap>lambdaQuery().eq(EventRegionMap::getEventId, eventId));
List<RegionVO> regionList = addEventVO.getRegionList();
addRegionMap(eventId, regionList);
});
......
......@@ -120,5 +120,13 @@ public interface ISubjectTypeService extends IService<SubjectType> {
*/
List<SubjectTreeVO> subjectAndCustomerTree(String userId,String customerId);
List<SubjectTreeVO> eventAndTypeTree();
/**
* 可用的事件和事件分类列表
*
* @param userId 用户id
* @param customerId 客户id
* @author lkg
* @date 2024/4/29
*/
List<SubjectTreeVO> eventAndTypeTree(String userId, String customerId);
}
......@@ -194,6 +194,13 @@ public interface InformationService {
*/
void importDataInfo(List<List<String>> lists, String subjectId);
/**
* 移除专题下的数据-研究中心
*
* @param subjectId 专题id
* @param themeIds 主题(关联标签)id集合
* @author lkg
* @date 2025/2/20
*/
void removeByCondition(String subjectId, List<String> themeIds);
}
......@@ -55,7 +55,7 @@ public class EsStatisticsServiceImpl implements EsStatisticsService {
long totalCount = 0L;
long max = 0L;
String maxTime = null;
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = formatSourceBuilder(subjectId, null, startTime, endTime, false);
searchSourceBuilder.size(0);
searchSourceBuilder.trackTotalHits(true);
......@@ -103,7 +103,7 @@ public class EsStatisticsServiceImpl implements EsStatisticsService {
CountVO countVO = new CountVO();
countVO.setName(name);
long count = 0L;
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = formatSourceBuilder(subjectId, labelId, startTime, endTime, false);
searchSourceBuilder.size(0);
searchSourceBuilder.trackTotalHits(true);
......@@ -133,7 +133,7 @@ public class EsStatisticsServiceImpl implements EsStatisticsService {
LabelEntity labelEntity = labelEntityService.getById(labelId);
countVO.setLabelId(labelEntity.getId());
countVO.setName(labelEntity.getName());
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = formatSourceBuilder(subjectId, labelId, startTime, endTime, false);
searchSourceBuilder.size(0);
searchSourceBuilder.trackTotalHits(true);
......@@ -172,7 +172,7 @@ public class EsStatisticsServiceImpl implements EsStatisticsService {
@Override
public List<CountVO> origin(String subjectId, String startTime, String endTime) {
List<CountVO> list = new ArrayList<>();
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = formatSourceBuilder(subjectId, null, startTime, endTime, true);
searchSourceBuilder.size(0);
searchSourceBuilder.trackTotalHits(true);
......@@ -323,7 +323,7 @@ public class EsStatisticsServiceImpl implements EsStatisticsService {
@Override
public List<CountVO> flowData(String subjectId, String startTime, String endTime, Integer type) {
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = formatSourceBuilder(subjectId, null, startTime, endTime, false);
searchSourceBuilder.size(0);
searchSourceBuilder.trackTotalHits(true);
......@@ -359,7 +359,7 @@ public class EsStatisticsServiceImpl implements EsStatisticsService {
@Override
public long totalCount(List<String> eventIdList, String startTime, String endTime) {
long count = 0;
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
String eventIds = String.join(",", eventIdList);
SearchSourceBuilder searchSourceBuilder = formatSourceBuilder(eventIds, null, startTime, endTime, false);
searchSourceBuilder.size(0);
......
......@@ -76,7 +76,9 @@ public class EventExtractTaskServiceImpl extends ServiceImpl<EventExtractTaskMap
StringBuilder stringBuilder = new StringBuilder();
for (String s : columnId.split(",")) {
Node column = columnMap.get(s);
stringBuilder.append(",").append(column.getName());
if (column != null) {
stringBuilder.append(",").append(column.getName());
}
}
taskVO.setColumnName(stringBuilder.substring(1));
}
......
......@@ -105,9 +105,8 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
@Resource
private KafkaTemplate<String, String> kafkaTemplate;
@Autowired
private CommonService commonService;
@Autowired
private EsService esService;
@Autowired
private RedisUtil redisUtil;
@Autowired
private PythonUtil pythonUtil;
......@@ -241,7 +240,9 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
long t1 = System.currentTimeMillis();
count(pageList);
log.info("数量统计总共耗时====" + (System.currentTimeMillis() - t1));
Page<EventNewPlatVO> pageData = new Page<>(pageNo, pageSize);
//获取总条数
Integer count = baseMapper.newPlatCount(subjectCondition);
Page<EventNewPlatVO> pageData = new Page<>(pageNo, pageSize,count);
pageData.setRecords(pageList);
return pageData;
}
......@@ -271,7 +272,7 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
Map<String, String> map = new HashMap<>();
List<String> eventIdList = new ArrayList<>();
pageList.forEach(e -> eventIdList.add(e.getId()));
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
//根据设备查询设备的相关信息
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.from(0);
......@@ -462,7 +463,7 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
this.update(updateWrapper);
CompletableFuture.runAsync(() -> {
//调用python接口
pythonUtil.clearDuplicateHistory(eventId);
pythonUtil.clearDuplicateHistory(Collections.singletonList(eventId));
//清空专题数据
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
boolQuery.must(QueryBuilders.termQuery("subjectId.keyword", eventId));
......@@ -751,7 +752,7 @@ public class EventServiceImpl extends ServiceImpl<EventMapper, Event> implements
//查询每个专题的数量
private Map<String, Integer> subjectInfoCountMap(List<String> subjectIdList, List<Integer> checkStatusList) {
Map<String, Integer> map = new HashMap<>(subjectIdList.size());
SearchRequest searchRequest = new SearchRequest(EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX));
SearchRequest searchRequest = new SearchRequest(Constants.SUBJECT_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
//默认最大数量是10000,设置为true后,显示准确数量
searchSourceBuilder.trackTotalHits(true);
......
......@@ -88,7 +88,7 @@ public class InformationServiceImpl implements InformationService {
@Autowired
private SysDictItemService sysDictItemService;
@Autowired
private IClbSysAttachmentService csAttachmentService;
private PythonUtil pythonUtil;
@Override
......@@ -117,8 +117,11 @@ public class InformationServiceImpl implements InformationService {
subjectIdList = subjectTypeMapService.selectEventByTypeIds(typeIds);
}
}
if (CollectionUtils.isEmpty(subjectIdList)) {
return page;
}
try {
IPage<SpecialInformation> specialInformationIPage = esService.pageListByCondition(searchCondition,subjectIdList);
IPage<SpecialInformation> specialInformationIPage = esService.pageListByCondition(searchCondition, subjectIdList);
long total = specialInformationIPage.getTotal();
if (total > 0) {
List<DisplayInfo> dataList = new ArrayList<>();
......@@ -127,7 +130,7 @@ public class InformationServiceImpl implements InformationService {
List<SpecialInformation> records = specialInformationIPage.getRecords();
for (SpecialInformation specialInformation : records) {
DisplayInfo info = new DisplayInfo();
BeanUtils.copyProperties(specialInformation,info);
BeanUtils.copyProperties(specialInformation, info);
info.setPublishDate(EsDateUtil.esFieldDateMapping(info.getPublishDate()));
/*LambdaQueryWrapper<CollectionMap> query = Wrappers.lambdaQuery();
query.eq(CollectionMap::getUserId, userVo.getId());
......@@ -151,7 +154,7 @@ public class InformationServiceImpl implements InformationService {
}
@Override
public DisplayInfo queryInfo(Integer type,String index, String id) {
public DisplayInfo queryInfo(Integer type, String index, String id) {
DisplayInfo info = (DisplayInfo) esOpUtil.getInfoById(index, id, DisplayInfo.class);
info.setDbIndex(index);
if (StringUtils.isNotEmpty(info.getContentWithTag())) {
......@@ -185,7 +188,7 @@ public class InformationServiceImpl implements InformationService {
info.setYnArticle(false);
}
CompletableFuture.runAsync(()-> addReadNum(info, index));
CompletableFuture.runAsync(() -> addReadNum(info, index));
}
return info;
}
......@@ -198,7 +201,7 @@ public class InformationServiceImpl implements InformationService {
Label dataSet = new Label();
String dataSetId = searchCondition.getDataSetId();
dataSet.setRelationId(dataSetId);
Map<String,List<SpecialInformation>> map = new HashMap<>();
Map<String, List<SpecialInformation>> map = new HashMap<>();
for (int i = 1; ; i++) {
searchCondition.setPageNo(i);
List<SpecialInformation> informationList = esService.informationList(searchCondition);
......@@ -316,7 +319,7 @@ public class InformationServiceImpl implements InformationService {
}
@Override
public void add(DisplayInfo displayInfo,Integer category, UserVo userVo) {
public void add(DisplayInfo displayInfo, Integer category, UserVo userVo) {
SpecialInformation specialInformation = new SpecialInformation();
BeanUtils.copyProperties(displayInfo, specialInformation);
String id = specialInformation.getSubjectId() + codeGenerateUtil.geneIdNo(Constants.DATA_ADD_ID, 8);
......@@ -357,7 +360,7 @@ public class InformationServiceImpl implements InformationService {
e.setAttachmentId(attachmentInfo.getId().toString());
});
}
String index = EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX);
String index = Constants.SUBJECT_INDEX;
esOpUtil.docSavaByEntity(index, specialInformation.getId(), specialInformation);
// 发送数据生命周期日志 入专题库 新增消息到kafka
final Subjectdatabase subjectdatabase = new Subjectdatabase();
......@@ -384,7 +387,7 @@ public class InformationServiceImpl implements InformationService {
}
@Override
public void updateInfo(JSONObject jsonObject,UserVo userVo) {
public void updateInfo(JSONObject jsonObject, UserVo userVo) {
SpecialInformation specialInformation = JSON.parseObject(JSON.toJSONString(jsonObject.get("data")), SpecialInformation.class);
Integer category = (Integer) jsonObject.get("category");
String index = specialInformation.getDbIndex();
......@@ -407,11 +410,11 @@ public class InformationServiceImpl implements InformationService {
esOpUtil.docUpdateById(index, specialInformation.getId(), JSON.toJSONString(esData));
//修改信息源的原创新
if (StringUtils.isNotBlank(specialInformation.getOriginalSource())) {
infoSourceService.update(Wrappers.<InfoSource>lambdaUpdate().eq(InfoSource::getId,specialInformation.getSid())
.set(InfoSource::getOriginalSource,specialInformation.getOriginalSource()));
infoSourceService.update(Wrappers.<InfoSource>lambdaUpdate().eq(InfoSource::getId, specialInformation.getSid())
.set(InfoSource::getOriginalSource, specialInformation.getOriginalSource()));
}
//发送 人工操作 日志
CompletableFuture.runAsync(() -> sendManualKafkaMsg(category,specialInformation, EnumOperateWay.EDITED, null,userVo));
CompletableFuture.runAsync(() -> sendManualKafkaMsg(category, specialInformation, EnumOperateWay.EDITED, null, userVo));
}
@Override
......@@ -422,9 +425,9 @@ public class InformationServiceImpl implements InformationService {
for (Map<String, String> m : ids) {
String index = m.get("index");
String id = m.get("id");
Map<String,Object> updateFields = new HashMap<>();
updateFields.put("checkStatus",checkStatus);
updateFields.put("deleteFlag",0);
Map<String, Object> updateFields = new HashMap<>();
updateFields.put("checkStatus", checkStatus);
updateFields.put("deleteFlag", 0);
String operateDate = EsDateUtil.esFieldDateFormat(DateUtil.dateToString(new Date()));
updateFields.put("updateDate", operateDate);
updateFields.put("checkDate", operateDate);
......@@ -484,9 +487,9 @@ public class InformationServiceImpl implements InformationService {
for (Map<String, String> m : ids) {
String index = m.get("index");
String id = m.get("id");
Map<String,Object> updateFields = new HashMap<>();
updateFields.put("checkStatus",0);
updateFields.put("deleteFlag",deleteFlag);
Map<String, Object> updateFields = new HashMap<>();
updateFields.put("checkStatus", 0);
updateFields.put("deleteFlag", deleteFlag);
String operateDate = EsDateUtil.esFieldDateFormat(DateUtil.dateToString(new Date()));
updateFields.put("updateDate", operateDate);
updateFields.put("checkDate", operateDate);
......@@ -542,27 +545,27 @@ public class InformationServiceImpl implements InformationService {
}
@Override
public void topInfo(SubjectInfoVo subjectInfoVo,UserVo userVo) {
public void topInfo(SubjectInfoVo subjectInfoVo, UserVo userVo) {
Integer category = subjectInfoVo.getCategory();
String index = subjectInfoVo.getIndex();
Integer type = subjectInfoVo.getType();
String id = subjectInfoVo.getId();
Map<String,Object> updateFields = new HashMap<>();
Map<String, Object> updateFields = new HashMap<>();
if (type == 0) {
updateFields.put("topNum",0);
updateFields.put("topNum", 0);
} else {
//先查询出库里面最大的置顶id
int topNum = esService.getTopNum(index,subjectInfoVo.getSubjectId());
updateFields.put("topNum",topNum + 1);
int topNum = esService.getTopNum(index, subjectInfoVo.getSubjectId());
updateFields.put("topNum", topNum + 1);
}
esOpUtil.updateById(index, id, updateFields);
//发送 人工操作 日志
CompletableFuture.runAsync(() -> {
SpecialInformation specialInformation = (SpecialInformation) esOpUtil.getInfoById(index, subjectInfoVo.getId(), SpecialInformation.class);
if (type == 0) {
sendManualKafkaMsg(category,specialInformation, EnumOperateWay.TOP_CANCELED, null,userVo);
sendManualKafkaMsg(category, specialInformation, EnumOperateWay.TOP_CANCELED, null, userVo);
} else {
sendManualKafkaMsg(category,specialInformation, EnumOperateWay.TOP, null,userVo);
sendManualKafkaMsg(category, specialInformation, EnumOperateWay.TOP, null, userVo);
}
});
}
......@@ -589,9 +592,9 @@ public class InformationServiceImpl implements InformationService {
Integer category = collectionInfo.getCategory();
SpecialInformation specialInformation = (SpecialInformation) esOpUtil.getInfoById(collectionInfo.getIndex(), collectionInfo.getArticleId(), SpecialInformation.class);
if ("1".equals(type)) {
sendManualKafkaMsg(category,specialInformation, EnumOperateWay.COLLECTED, null,userVo);
sendManualKafkaMsg(category, specialInformation, EnumOperateWay.COLLECTED, null, userVo);
} else {
sendManualKafkaMsg(category,specialInformation, EnumOperateWay.COLLECT_CANCELED, null,userVo);
sendManualKafkaMsg(category, specialInformation, EnumOperateWay.COLLECT_CANCELED, null, userVo);
}
});
}
......@@ -615,7 +618,7 @@ public class InformationServiceImpl implements InformationService {
@Override
public List<SubjectDataVo> recommendList(String subjectId, String id, String title, Integer pageNo, Integer pageSize) {
return esService.queryRecommendList(subjectId,id,title,pageNo,pageSize);
return esService.queryRecommendList(subjectId, id, title, pageNo, pageSize);
}
//导入发布库数据
......@@ -693,7 +696,7 @@ public class InformationServiceImpl implements InformationService {
}
specialInformation.setCreateDate(cn.hutool.core.date.DateUtil.format(new Date(), "yyyy-MM-dd'T'HH:mm:ss"));
specialInformation.setProcessDate(specialInformation.getCreateDate());
String index = EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX);
String index = Constants.SUBJECT_INDEX;
esOpUtil.docSavaByEntity(index, specialInformation.getId(), specialInformation);
clbFileOperationLog.getClbFileOperationLogDetails().add(clbFileOperationLogDetailsService.buildFailDetails(info.get(1), info.get(5), info.get(7)));
} catch (NumberFormatException e) {
......@@ -708,7 +711,7 @@ public class InformationServiceImpl implements InformationService {
@Override
public void importDataInfo(List<List<String>> lists, String subjectId) {
if (CollectionUtil.isNotEmpty(lists)) {
String index = EsIndexUtil.getIndexYear(Constants.SUBJECT_INDEX);
String index = Constants.SUBJECT_INDEX;
List<SpecialInformation> dataList = new ArrayList<>();
for (List<String> info : lists) {
SpecialInformation specialInformation = new SpecialInformation();
......@@ -764,6 +767,48 @@ public class InformationServiceImpl implements InformationService {
@Override
public void removeByCondition(String subjectId, List<String> themeIds) {
InfoDataSearchCondition searchCondition = new InfoDataSearchCondition();
searchCondition.setSubjectId(subjectId);
searchCondition.setLabelIds(themeIds);
searchCondition.setPageSize(300);
String[] fetchFields = new String[]{"id", "labels"};
searchCondition.setFetchFields(fetchFields);
List<SpecialInformation> dataList = new ArrayList<>();
List<SpecialInformation> removeList = new ArrayList<>();
for (int i = 1; ; i++) {
searchCondition.setPageNo(i);
List<SpecialInformation> informationList = esService.informationList(searchCondition);
log.info("本次循环-{},数据量为-{}", i, informationList.size());
if (CollectionUtils.isEmpty(informationList)) {
break;
}
for (SpecialInformation specialInformation : informationList) {
List<Label> labels = specialInformation.getLabels();
if (labels.size() == 1) {
removeList.add(specialInformation);
} else {
List<Label> newLabels = new ArrayList<>();
for (Label label : labels) {
String labelRelationId = label.getRelationId();
if (!themeIds.contains(labelRelationId)) {
newLabels.add(label);
}
}
specialInformation.setLabels(newLabels);
dataList.add(specialInformation);
}
}
}
if (CollectionUtils.isNotEmpty(removeList)) {
Map<String, List<SpecialInformation>> removeMap = removeList.stream().collect(Collectors.groupingBy(SpecialInformation::getDbIndex));
esOpUtil.docRemoveBulk(removeMap);
}
if (CollectionUtils.isNotEmpty(dataList)) {
Map<String, List<SpecialInformation>> updateMap = removeList.stream().collect(Collectors.groupingBy(SpecialInformation::getDbIndex));
updateMap.forEach((k, v) -> esOpUtil.docUpdateBulk(k,v));
}
//python 去重接口
pythonUtil.clearDuplicateHistory(themeIds);
}
......@@ -803,14 +848,14 @@ public class InformationServiceImpl implements InformationService {
} else {
readNum = readNum + 1;
}
Map<String,Object> updateFieldMap = new HashMap<>();
updateFieldMap.put("readNum",readNum);
Map<String, Object> updateFieldMap = new HashMap<>();
updateFieldMap.put("readNum", readNum);
esOpUtil.updateById(index, displayInfo.getId(), updateFieldMap);
}
}
//发送 人工操作 日志
private void sendManualKafkaMsg(Integer category,SpecialInformation specialInformation, EnumOperateWay enumOperateWay, List<String> labelNameList,UserVo userVo) {
private void sendManualKafkaMsg(Integer category, SpecialInformation specialInformation, EnumOperateWay enumOperateWay, List<String> labelNameList, UserVo userVo) {
// 发送数据生命周期日志 数据审核 编辑标签消息到kafka
final DataLifecycleLog manualOperateLog;
final Subjectdatabase subjectdatabase = new Subjectdatabase();
......@@ -867,7 +912,7 @@ public class InformationServiceImpl implements InformationService {
data.setInfoSourceType(typeNum);
}
private List<Label> getInfoSourceLabel(String origin,String sourceAddress){
private List<Label> getInfoSourceLabel(String origin, String sourceAddress) {
List<String> sidList = new ArrayList<>();
if (StringUtils.isNotEmpty(origin)) {
LambdaQueryWrapper<InfoSource> queryWrapper = Wrappers.lambdaQuery();
......@@ -897,7 +942,7 @@ public class InformationServiceImpl implements InformationService {
LabelItemMapVO clbLabelItem = clbLabelItems.get(0);
label.setSourceId(clbLabelItem.getEntityCode());
label.setLabelMark(clbLabelItem.getLabelCode());
label.setRelationId(clbLabelItem.getLabelCode() + "-" +clbLabelItem.getLabelItemCode());
label.setRelationId(clbLabelItem.getLabelCode() + "-" + clbLabelItem.getLabelItemCode());
} else {//默认其他
label.setLabelMark("LABEL-20250102-0006");
label.setRelationId("LABEL-20250102-0006-LV-20250102-0026");
......
......@@ -421,7 +421,7 @@ public class SubjectSimpleServiceImpl implements SubjectSimpleService {
subjectService.update(updateWrapper);
CompletableFuture.runAsync(() -> {
//调用python接口
pythonUtil.clearDuplicateHistory(subjectId);
pythonUtil.clearDuplicateHistory(Collections.singletonList(subjectId));
//清空专题数据
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
boolQuery.must(QueryBuilders.termQuery("subjectId.keyword", subjectId));
......
......@@ -121,9 +121,9 @@ public class SubjectTypeServiceImpl extends ServiceImpl<SubjectTypeMapper, Subje
}
@Override
public List<SubjectTreeVO> eventAndTypeTree() {
public List<SubjectTreeVO> eventAndTypeTree(String userId, String customerId) {
List<SubjectTreeVO> tree = new ArrayList<>();
List<SubjectTreeVO> subjectTreeVOS = baseMapper.eventAndTypeTree();
List<SubjectTreeVO> subjectTreeVOS = baseMapper.eventAndTypeTree(userId,customerId);
if (CollectionUtils.isNotEmpty(subjectTreeVOS)) {
tree = TreeUtil.tree(subjectTreeVOS, "0");
}
......
......@@ -550,6 +550,30 @@ public class EsOpUtil<T> {
}
}
/**
* 批量删除
*
* @param removeMap 删除参数
* @author lkg
* @date 2024/12/19
*/
public void docRemoveBulk(Map<String, List<SpecialInformation>> removeMap){
BulkRequest bulkRequest = new BulkRequest();
for (Map.Entry<String, List<SpecialInformation>> entry : removeMap.entrySet()) {
String index = entry.getKey();
for (SpecialInformation information : entry.getValue()) {
DeleteRequest deleteRequest = new DeleteRequest(index, information.getId());
bulkRequest.add(deleteRequest);
}
}
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
try {
client.bulk(bulkRequest, RequestOptions.DEFAULT);
} catch (Exception e) {
log.error("数据删除失败!",e);
}
}
/**
* @Description 判断该专题下的内容是否重复导入
......
......@@ -437,7 +437,8 @@ public class HttpUtil {
post(url,new ByteArrayInputStream(bytes),filename);
}
public static void post(String url,InputStream inputStream ,String filename) throws IOException {
public static byte[] post(String url,InputStream inputStream ,String filename) throws IOException {
byte[] result = null;
try {
//创建HttpClient
CloseableHttpClient httpClient = HttpClients.createDefault();
......@@ -455,14 +456,12 @@ public class HttpUtil {
HttpEntity responseEntity = response.getEntity();
if(responseEntity != null){
//将响应的内容转换成字符串
String result = EntityUtils.toString(responseEntity, StandardCharsets.UTF_8);
//此处根据服务器返回的参数转换,这里返回的是JSON格式
JSONObject output = JSON.parseObject(result);
// System.out.println(output.toJSONString());
result = EntityUtils.toByteArray(responseEntity);
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
public static String sendPost(String url, Map<String, Object> params,String charset,int ExTime) {
String content = "";
......
......@@ -102,23 +102,23 @@ public class PythonUtil {
/**
* 清空去重服务历史数据
*
* @param subjectId 专题id
* @param subjectIds 专题id集合
* @author lkg
* @date 2025/2/10
*/
public void clearDuplicateHistory(String subjectId) {
Map<String, String> params = new HashMap<>();
params.put("subjectId", subjectId);
public void clearDuplicateHistory(List<String> subjectIds) {
JSONObject params = new JSONObject();
params.put("subjectId", subjectIds);
Map<String,String> headers = new HashMap<>();
headers.put("Content-Type", "application/json;charset=UTF-8");
headers.put("Accept", "application/json");
headers.put("Authorization", "!0gwY$5S@5V&A_+XEu)");
try {
String response = HttpUtil.doGet(clearDuplicateHistoryUrl, params, headers, "UTF-8");
String response = HttpUtil.doPostWithHeader(clearDuplicateHistoryUrl, params,30000 , headers);
JSONObject jsonObject = JSONObject.parseObject(response);
String isHandleSuccess = jsonObject.getString("isHandleSuccess");
if (isHandleSuccess.equals("true")) {
log.info("专题-{},清空数据,调用python接口清空去重服务历史数据请求发送成功",subjectId);
log.info("专题-{},清空数据,调用python接口清空去重服务历史数据请求发送成功",JSON.toJSONString(subjectIds));
} else {
log.info("python清空去重服务历史数据接口异常:{}",jsonObject.getString("logs"));
}
......
......@@ -41,6 +41,8 @@ public class AddEventVO {
private Integer facePublic;
/**事件创建方式(1-用户直接创建;2-基于挖掘的事件创建)*/
private Integer category;
/**是否开启采集(1-是;0-否)*/
private Integer ynCollect;
/**事件地域信息*/
List<RegionVO> regionList;
......
package com.zzsn.event.vo;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.zzsn.event.entity.EventTag;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.util.Date;
import java.util.List;
......@@ -55,6 +57,11 @@ public class EventVO {
private String projectId;
/**排序号*/
private Integer sortOrder;
/**是否开启采集(1-是;0-否)*/
private Integer ynCollect;
/**第一次启用时间*/
@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd HH:mm:ss")
@DateTimeFormat(pattern="yyyy-MM-dd HH:mm:ss")
private Date firstOpenTime;
......
......@@ -102,9 +102,11 @@ public class InfoDataSearchCondition {
//排序参数
//排序字段
private String column = "common";
private String column;
//排序方式 asc/desc
private String order = "desc";
private String order;
//置顶排序是否起效(1-是;0-否)
private Integer topSortValid = 0;
//分页参数
//当前页
......
......@@ -69,8 +69,8 @@ spring:
stream:
kafka:
binder:
brokers: 1.95.78.131:9092
zkNodes: 1.95.78.131:2181
brokers: 1.95.78.131:17092
zkNodes: 1.95.78.131:12181
requiredAcks: 1
redis:
database: 7
......@@ -85,7 +85,7 @@ spring:
port: 6380
password: RPHZgkDQ4zGJ
kafka:
bootstrap-servers: 1.95.78.131:9092
bootstrap-servers: 1.95.78.131:17092
producer: # 生产者
retries: 3 # 设置大于0的值,则客户端会将发送失败的记录重新发送
batch-size: 16384
......@@ -97,6 +97,12 @@ spring:
value-serializer: org.apache.kafka.common.serialization.StringSerializer
properties:
max.request.size: 335544324 #32M
security:
protocol: SASL_PLAINTEXT
sasl:
mechanism: PLAIN
jaas:
config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-zzsn9988";
consumer:
#用于标识此使用者所属的使用者组的唯一字符串
group-id: event-analysis-group-dev
......@@ -111,6 +117,13 @@ spring:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
#值的反序列化器类,实现类实现了接口org.apache.kafka.common.serialization.Deserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
properties:
security:
protocol: SASL_PLAINTEXT
sasl:
mechanism: PLAIN
jaas:
config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-zzsn9988";
main:
allow-circular-references: true
mybatis-plus:
......
......@@ -66,8 +66,8 @@ spring:
stream:
kafka:
binder:
brokers: 1.95.78.131:9092
zkNodes: 1.95.78.131:2181
brokers: 192.168.0.180:7092
zkNodes: 192.168.0.180:12181
requiredAcks: 1
redis:
database: 0
......@@ -82,7 +82,7 @@ spring:
port: 6380
password: RPHZgkDQ4zGJ
kafka:
bootstrap-servers: 1.95.78.131:9092
bootstrap-servers: 192.168.0.180:7092
producer: # 生产者
retries: 3 # 设置大于0的值,则客户端会将发送失败的记录重新发送
batch-size: 16384
......@@ -94,6 +94,12 @@ spring:
value-serializer: org.apache.kafka.common.serialization.StringSerializer
properties:
max.request.size: 335544324 #32M
security:
protocol: SASL_PLAINTEXT
sasl:
mechanism: PLAIN
jaas:
config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-zzsn9988";
consumer:
#用于标识此使用者所属的使用者组的唯一字符串
group-id: event-analysis-group
......@@ -108,6 +114,13 @@ spring:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
#值的反序列化器类,实现类实现了接口org.apache.kafka.common.serialization.Deserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
properties:
security:
protocol: SASL_PLAINTEXT
sasl:
mechanism: PLAIN
jaas:
config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-zzsn9988";
main:
allow-circular-references: true
mybatis-plus:
......
......@@ -71,8 +71,8 @@ spring:
stream:
kafka:
binder:
brokers: 1.95.78.131:9092
zkNodes: 1.95.78.131:2181
brokers: 1.95.78.131:17092
zkNodes: 1.95.78.131:12181
requiredAcks: 1
redis:
database: 7
......@@ -87,7 +87,7 @@ spring:
port: 6379
password: zzsn9988
kafka:
bootstrap-servers: 1.95.78.131:9092
bootstrap-servers: 1.95.78.131:17092
producer: # 生产者
retries: 3 # 设置大于0的值,则客户端会将发送失败的记录重新发送
batch-size: 16384
......@@ -99,6 +99,12 @@ spring:
value-serializer: org.apache.kafka.common.serialization.StringSerializer
properties:
max.request.size: 335544324 #32M
security:
protocol: SASL_PLAINTEXT
sasl:
mechanism: PLAIN
jaas:
config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-zzsn9988";
consumer:
#用于标识此使用者所属的使用者组的唯一字符串
group-id: event-analysis-group-dev
......@@ -113,6 +119,13 @@ spring:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
#值的反序列化器类,实现类实现了接口org.apache.kafka.common.serialization.Deserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
properties:
security:
protocol: SASL_PLAINTEXT
sasl:
mechanism: PLAIN
jaas:
config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-zzsn9988";
main:
allow-circular-references: true
mybatis-plus:
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论