feat:重构上传策略兼容一事一流程数据解析
This commit is contained in:
parent
60a1e1df14
commit
80b1745ddc
|
@ -22,8 +22,11 @@ public enum ChatTypeVo {
|
||||||
emerg(4,"emerg","应急助手问答"),
|
emerg(4,"emerg","应急助手问答"),
|
||||||
|
|
||||||
/**诊断代码查询**/
|
/**诊断代码查询**/
|
||||||
codeQuery(5,"codeQuery","诊断代码查询");
|
codeQuery(5,"codeQuery","诊断代码查询"),
|
||||||
;
|
/**
|
||||||
|
* 一事一流程数据处理
|
||||||
|
*/
|
||||||
|
ysylcDataProc(6,"ysylcDataProc","一事一流程数据处理");
|
||||||
|
|
||||||
private final int code;
|
private final int code;
|
||||||
private final String name;
|
private final String name;
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
package com.bjtds.brichat.service;
|
||||||
|
|
||||||
|
import io.github.guoshiqiufeng.dify.workflow.dto.request.WorkflowRunRequest;
|
||||||
|
import org.springframework.web.multipart.MultipartFile;
|
||||||
|
|
||||||
|
public interface WorkFlowService {
|
||||||
|
|
||||||
|
String runWorkflowByFile(String userId, String apiKey, MultipartFile file);
|
||||||
|
}
|
|
@ -12,6 +12,8 @@ import com.bjtds.brichat.mapper.postgresql.DifyDatasetsMapper;
|
||||||
import com.bjtds.brichat.mapper.postgresql.DifyUploadFileMapper;
|
import com.bjtds.brichat.mapper.postgresql.DifyUploadFileMapper;
|
||||||
import com.bjtds.brichat.service.DatasetsDocService;
|
import com.bjtds.brichat.service.DatasetsDocService;
|
||||||
import com.bjtds.brichat.service.DocAnalysisStrategyService;
|
import com.bjtds.brichat.service.DocAnalysisStrategyService;
|
||||||
|
import com.bjtds.brichat.service.TApiKeyService;
|
||||||
|
import com.bjtds.brichat.service.WorkFlowService;
|
||||||
import com.bjtds.brichat.service.dify.DifyDatasetApiService;
|
import com.bjtds.brichat.service.dify.DifyDatasetApiService;
|
||||||
import com.bjtds.brichat.util.Constants;
|
import com.bjtds.brichat.util.Constants;
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
@ -22,6 +24,8 @@ import io.github.guoshiqiufeng.dify.dataset.dto.request.document.SubChunkSegment
|
||||||
import io.github.guoshiqiufeng.dify.dataset.dto.response.DatasetInfoResponse;
|
import io.github.guoshiqiufeng.dify.dataset.dto.response.DatasetInfoResponse;
|
||||||
import io.github.guoshiqiufeng.dify.dataset.dto.response.UploadFileInfoResponse;
|
import io.github.guoshiqiufeng.dify.dataset.dto.response.UploadFileInfoResponse;
|
||||||
import io.github.guoshiqiufeng.dify.dataset.enums.document.ParentModeEnum;
|
import io.github.guoshiqiufeng.dify.dataset.enums.document.ParentModeEnum;
|
||||||
|
import io.github.guoshiqiufeng.dify.workflow.client.DifyWorkflowClient;
|
||||||
|
import io.github.guoshiqiufeng.dify.workflow.dto.request.WorkflowRunRequest;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.poi.hwpf.HWPFDocument;
|
import org.apache.poi.hwpf.HWPFDocument;
|
||||||
import org.apache.poi.hwpf.extractor.WordExtractor;
|
import org.apache.poi.hwpf.extractor.WordExtractor;
|
||||||
|
@ -52,6 +56,7 @@ import org.springframework.web.multipart.MultipartFile;
|
||||||
import javax.annotation.Resource;
|
import javax.annotation.Resource;
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@ -68,6 +73,13 @@ public class DifyDatasetApiServiceImpl implements DifyDatasetApiService {
|
||||||
@Qualifier("redisTemplate")
|
@Qualifier("redisTemplate")
|
||||||
private RedisTemplate<String, Object> redisTemplate;
|
private RedisTemplate<String, Object> redisTemplate;
|
||||||
|
|
||||||
|
@Resource
|
||||||
|
private WorkFlowService workFlowService;
|
||||||
|
|
||||||
|
|
||||||
|
@Resource
|
||||||
|
private TApiKeyService tApiKeyService;
|
||||||
|
|
||||||
//开源组件
|
//开源组件
|
||||||
@Resource
|
@Resource
|
||||||
private DifyDataset difyDatasetService;
|
private DifyDataset difyDatasetService;
|
||||||
|
@ -548,6 +560,12 @@ public class DifyDatasetApiServiceImpl implements DifyDatasetApiService {
|
||||||
List<DifyMetadata> docMetadatas = res.getDocMetadatas();
|
List<DifyMetadata> docMetadatas = res.getDocMetadatas();
|
||||||
for (DifyMetadata metadata : docMetadatas) {
|
for (DifyMetadata metadata : docMetadatas) {
|
||||||
if (metadata.getName().equals("ysylc_json_url")) {
|
if (metadata.getName().equals("ysylc_json_url")) {
|
||||||
|
//异步执行
|
||||||
|
CompletableFuture.runAsync(() -> {
|
||||||
|
String apiKey = tApiKeyService.getApiKeyFromCache("ysylcDataProc");
|
||||||
|
workFlowService.runWorkflowByFile(documentId,apiKey,file);
|
||||||
|
});
|
||||||
|
|
||||||
DifyMetadata newMetadata = new DifyMetadata();
|
DifyMetadata newMetadata = new DifyMetadata();
|
||||||
newMetadata.setId(metadata.getId());
|
newMetadata.setId(metadata.getId());
|
||||||
newMetadata.setType(metadata.getType());
|
newMetadata.setType(metadata.getType());
|
||||||
|
|
|
@ -0,0 +1,57 @@
|
||||||
|
package com.bjtds.brichat.service.impl;
|
||||||
|
|
||||||
|
import com.bjtds.brichat.service.WorkFlowService;
|
||||||
|
import io.github.guoshiqiufeng.dify.chat.DifyChat;
|
||||||
|
import io.github.guoshiqiufeng.dify.chat.dto.request.FileUploadRequest;
|
||||||
|
import io.github.guoshiqiufeng.dify.chat.dto.response.FileUploadResponse;
|
||||||
|
import io.github.guoshiqiufeng.dify.workflow.DifyWorkflow;
|
||||||
|
import io.github.guoshiqiufeng.dify.workflow.dto.request.WorkflowRunRequest;
|
||||||
|
import io.github.guoshiqiufeng.dify.workflow.dto.response.WorkflowRunResponse;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.web.multipart.MultipartFile;
|
||||||
|
import javax.annotation.Resource;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Service
|
||||||
|
public class WorkFlowServiceImpl implements WorkFlowService {
|
||||||
|
|
||||||
|
|
||||||
|
@Resource
|
||||||
|
private DifyChat difyChatService;
|
||||||
|
|
||||||
|
@Resource
|
||||||
|
private DifyWorkflow workflow;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String runWorkflowByFile(String userId, String apiKey, MultipartFile file) {
|
||||||
|
//1.上传文件
|
||||||
|
FileUploadRequest fileUploadRequest = new FileUploadRequest();
|
||||||
|
fileUploadRequest.setFile(file);
|
||||||
|
fileUploadRequest.setUserId(userId);
|
||||||
|
fileUploadRequest.setApiKey(apiKey);
|
||||||
|
|
||||||
|
FileUploadResponse fileUploadResponse = difyChatService.fileUpload(fileUploadRequest);
|
||||||
|
log.info("文件上传成功,文件id:{}",fileUploadResponse.getName());
|
||||||
|
|
||||||
|
//2.运行工作流
|
||||||
|
WorkflowRunRequest workflowRunRequest = new WorkflowRunRequest();
|
||||||
|
workflowRunRequest.setUserId(userId);
|
||||||
|
workflowRunRequest.setApiKey(apiKey);
|
||||||
|
Map<String, Object> filesMap = new HashMap<>();
|
||||||
|
filesMap.put("type","document");
|
||||||
|
filesMap.put("upload_file_id",fileUploadResponse.getId());
|
||||||
|
filesMap.put("transfer_method","local_file");
|
||||||
|
filesMap.put("url","");
|
||||||
|
Map<String, Object> inputMap = new HashMap<>();
|
||||||
|
inputMap.put("files",filesMap);
|
||||||
|
workflowRunRequest.setInputs(inputMap);
|
||||||
|
|
||||||
|
WorkflowRunResponse workflowRunResponse = workflow.runWorkflow(workflowRunRequest);
|
||||||
|
log.info("一事一流程工作流运行成功,工作流id:{}",workflowRunResponse.getData());
|
||||||
|
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,33 +0,0 @@
|
||||||
package com.bjtds.brichat;
|
|
||||||
|
|
||||||
import com.bjtds.brichat.entity.dto.KnowledgeBaseDto;
|
|
||||||
import com.bjtds.brichat.entity.dto.RecordDto;
|
|
||||||
import com.bjtds.brichat.util.Constants;
|
|
||||||
import com.bjtds.brichat.util.RetrievalUtil;
|
|
||||||
import org.junit.Test;
|
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
|
||||||
import org.springframework.boot.test.context.SpringBootTest;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@SpringBootTest
|
|
||||||
public class RetrievalTest {
|
|
||||||
|
|
||||||
RetrievalUtil retrievalUtil;
|
|
||||||
|
|
||||||
|
|
||||||
private String difyUrl = "http://192.168.8.253:16780";
|
|
||||||
|
|
||||||
|
|
||||||
private static String apiKey = "dataset-0Hij9IwoWYbJe1vvwVh8y7DS";
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testRetrieval() throws Exception {
|
|
||||||
String datasetPath = difyUrl + Constants.DATABASE_API;
|
|
||||||
KnowledgeBaseDto knowledgeBaseDto = new KnowledgeBaseDto();
|
|
||||||
knowledgeBaseDto.setQuery("控制器手柄故障");
|
|
||||||
knowledgeBaseDto.setSearchMethod("hybrid_search");
|
|
||||||
List<RecordDto> recordDtos = retrievalUtil.getRetrieval(datasetPath,apiKey,"bbcce315-d7cb-4b40-85eb-4805070bea4d",knowledgeBaseDto);
|
|
||||||
System.out.println(recordDtos);
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue