Merge pull request 'xiaohucoding' (#17) from xiaohucoding into main
Reviewed-on: #17
This commit is contained in:
@ -3,23 +3,31 @@ package com.bipt.intelligentapplicationorchestrationservice.controller;
|
||||
import com.bipt.intelligentapplicationorchestrationservice.pojo.AlgorithmInfo;
|
||||
import com.bipt.intelligentapplicationorchestrationservice.pojo.OptResult;
|
||||
import com.bipt.intelligentapplicationorchestrationservice.service.AlgorithmInfoService;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
@Tag(name ="算法创建相关接口")
|
||||
@RestController
|
||||
@RequestMapping("/api/algorithm")
|
||||
@Slf4j
|
||||
@CrossOrigin(origins = "http://localhost:3000")
|
||||
public class AlgorithmInfoController {
|
||||
|
||||
@Autowired
|
||||
private AlgorithmInfoService algorithmInfoService;
|
||||
|
||||
@Autowired
|
||||
private ObjectMapper objectMapper;
|
||||
|
||||
@GetMapping("/{id}")
|
||||
public ResponseEntity<AlgorithmInfo> getById(@PathVariable Long id) {
|
||||
AlgorithmInfo algorithmInfo = algorithmInfoService.getById(id);
|
||||
@ -63,15 +71,51 @@ public class AlgorithmInfoController {
|
||||
ResponseEntity.badRequest().body("Delete failed");
|
||||
}
|
||||
|
||||
/**
|
||||
* 算法创建
|
||||
*/
|
||||
@PostMapping
|
||||
@PostMapping(consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
|
||||
@Operation(summary ="算法创建")
|
||||
public OptResult save(@RequestBody AlgorithmInfo algorithmInfo){
|
||||
log.info("新增算法",algorithmInfo);
|
||||
algorithmInfoService.save(algorithmInfo);
|
||||
return OptResult.success("算法创建成功");
|
||||
public OptResult save(@RequestParam("algorithm") String algorithmJson,
|
||||
@RequestPart(value = "algorithmFile") MultipartFile file) {
|
||||
try {
|
||||
AlgorithmInfo algorithmInfo = objectMapper.readValue(algorithmJson, AlgorithmInfo.class);
|
||||
log.info("新增算法: {}, 文件: {}", algorithmInfo, (file != null ? file.getOriginalFilename() : "无文件"));
|
||||
algorithmInfoService.save(algorithmInfo, file);
|
||||
return OptResult.success("算法创建成功");
|
||||
} catch (IOException e) {
|
||||
log.error("JSON转换失败", e);
|
||||
return OptResult.error("新增算法失败: " + e.getMessage());
|
||||
} catch (Exception e) {
|
||||
log.error("新增算法失败", e);
|
||||
return OptResult.error("新增算法失败: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据算法名称模糊查询算法信息
|
||||
* @param keyword 模糊查询关键词
|
||||
* @return 符合条件的算法信息列表
|
||||
*/
|
||||
@GetMapping("/search")
|
||||
public ResponseEntity<List<AlgorithmInfo>> searchByName(@RequestParam String keyword) {
|
||||
List<AlgorithmInfo> algorithmInfos = algorithmInfoService.getByNameLike(keyword);
|
||||
return ResponseEntity.ok(algorithmInfos);
|
||||
}
|
||||
/**
|
||||
* 算法运行
|
||||
*/
|
||||
@PostMapping("/run/{id}")
|
||||
@Operation(summary = "运行")
|
||||
public OptResult run(@PathVariable Long id,@RequestBody String param){
|
||||
log.info("运行",id);
|
||||
String result = algorithmInfoService.run(id,param);
|
||||
return OptResult.success("运行成功"+result);
|
||||
}
|
||||
/**
|
||||
* 前端列表返回算法名称
|
||||
*/
|
||||
@GetMapping("/names")
|
||||
@Operation(summary = "列表返回算法名称")
|
||||
public List<String> getNames(){
|
||||
return algorithmInfoService.getAllNames();
|
||||
}
|
||||
|
||||
}
|
@ -6,28 +6,44 @@ import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
@Tag(name ="数据集相关接口")
|
||||
@RestController
|
||||
@RequestMapping("/dataset")
|
||||
@Slf4j
|
||||
@CrossOrigin(origins = "http://localhost:3000")
|
||||
public class DatasetController {
|
||||
@Autowired
|
||||
private DatasetService datasetService;
|
||||
|
||||
/**
|
||||
* 新增数据集
|
||||
* 新增数据集(整合文件上传)
|
||||
* @param datasetDTO
|
||||
* @param file
|
||||
* @return
|
||||
*/
|
||||
@Operation(summary ="新增数据集")
|
||||
@PostMapping
|
||||
public OptResult save(@RequestBody DatasetDTO datasetDTO) {
|
||||
log.info("新增数据集:{}", datasetDTO);
|
||||
datasetService.save(datasetDTO);
|
||||
return OptResult.success();
|
||||
@Operation(summary = "新增数据集")
|
||||
@PostMapping(consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
|
||||
public OptResult save(
|
||||
@ModelAttribute("dataset") DatasetDTO datasetDTO,
|
||||
@RequestPart(value = "file", required = false) MultipartFile file) {
|
||||
log.info("新增数据集: {}, 文件: {}", datasetDTO, (file != null ? file.getOriginalFilename() : "无文件"));
|
||||
try {
|
||||
datasetService.save(datasetDTO, file);
|
||||
return OptResult.success();
|
||||
} catch (Exception e) {
|
||||
log.error("新增数据集失败", e);
|
||||
return OptResult.error("新增数据集失败: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -43,17 +59,17 @@ public class DatasetController {
|
||||
return OptResult.success(pageResult);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 修改数据集
|
||||
* @param datasetDTO
|
||||
* @return
|
||||
*/
|
||||
@Operation(summary ="修改数据集")
|
||||
@PutMapping
|
||||
public OptResult update(@RequestBody DatasetDTO datasetDTO){
|
||||
log.info("修改数据集",datasetDTO);
|
||||
datasetService.update(datasetDTO);
|
||||
@PutMapping(consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
|
||||
public OptResult update(@ModelAttribute("dataset") DatasetDTO datasetDTO,
|
||||
@RequestPart(value = "file", required = false) MultipartFile file){
|
||||
log.info("修改数据集{}, 文件: {}", datasetDTO, (file != null ? file.getOriginalFilename() : "无文件"));
|
||||
datasetService.update(datasetDTO,file);
|
||||
return OptResult.success();
|
||||
}
|
||||
|
||||
@ -72,4 +88,34 @@ public class DatasetController {
|
||||
return OptResult.success("批量删除成功");
|
||||
}
|
||||
|
||||
/**
|
||||
* 下载数据集
|
||||
* @param datasetId 数据集ID
|
||||
* @return 数据集文件
|
||||
*/
|
||||
@Operation(summary = "下载数据集")
|
||||
@GetMapping("/download/{datasetId}")
|
||||
public ResponseEntity<byte[]> downloadDataset(@PathVariable Long datasetId) {
|
||||
log.info("下载数据集,ID:{}", datasetId);
|
||||
try {
|
||||
// TODO: 调用分布式存储系统的接口获取数据集文件的输入流
|
||||
InputStream inputStream = datasetService.downloadDataset(datasetId);
|
||||
if (inputStream == null) {
|
||||
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
|
||||
}
|
||||
byte[] fileBytes = inputStream.readAllBytes();
|
||||
inputStream.close();
|
||||
|
||||
// 设置响应头
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_OCTET_STREAM);
|
||||
headers.setContentDispositionFormData("attachment", "dataset_" + datasetId + ".zip");
|
||||
|
||||
return new ResponseEntity<>(fileBytes, headers, HttpStatus.OK);
|
||||
} catch (IOException e) {
|
||||
log.error("下载数据集失败", e);
|
||||
return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -29,4 +29,12 @@ public interface AlgorithmInfoMapper {
|
||||
|
||||
@Delete("DELETE FROM algorithm_info WHERE id = #{id}")
|
||||
int deleteById(Long id);
|
||||
}
|
||||
@Select("select description from algorithm_info where id = #{id}")
|
||||
String getDescriptionById(Long id);
|
||||
@Select("select algorithm_file from algorithm_info where id = #{id}")
|
||||
String getFileById(Long id);
|
||||
@Select("select algorithm_name from algorithm_info")
|
||||
List<String> getAllNames();
|
||||
|
||||
List<AlgorithmInfo> selectByNameLike(String keyword);
|
||||
}
|
@ -19,6 +19,7 @@ public class DatasetDTO implements Serializable {
|
||||
private Long datasetId;
|
||||
private String datasetName;
|
||||
private int datasetType;
|
||||
private int datasetStatus;
|
||||
private String dsPath;
|
||||
// private Map<String,String> args;
|
||||
private String args;
|
||||
|
@ -20,11 +20,11 @@ public class DatasetPageQueryDTO implements Serializable{
|
||||
private int page;
|
||||
private int pageSize;
|
||||
private String datasetName;
|
||||
private int datasetType;
|
||||
/* private int datasetType;
|
||||
private int datasetStatus;
|
||||
private String dsPath;
|
||||
private String args;
|
||||
private LocalDateTime createTime;
|
||||
private LocalDateTime updateTime;
|
||||
private LocalDateTime updateTime;*/
|
||||
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ import java.util.Map;
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class DatasetVO implements Serializable {
|
||||
private Long datasetId;
|
||||
private String datasetName;
|
||||
private Integer datasetType;
|
||||
private Integer datasetStatus;
|
||||
|
@ -1,6 +1,7 @@
|
||||
package com.bipt.intelligentapplicationorchestrationservice.service;
|
||||
|
||||
import com.bipt.intelligentapplicationorchestrationservice.pojo.AlgorithmInfo;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@ -12,6 +13,11 @@ public interface AlgorithmInfoService {
|
||||
boolean delete(Long id);
|
||||
boolean validateAlgorithmInfo(AlgorithmInfo algorithmInfo);
|
||||
|
||||
void save(AlgorithmInfo algorithmInfo);
|
||||
void save(AlgorithmInfo algorithmInfo, MultipartFile file);
|
||||
|
||||
String run(Long id, String param);
|
||||
|
||||
List<String> getAllNames();
|
||||
|
||||
List<AlgorithmInfo> getByNameLike(String keyword);
|
||||
}
|
@ -3,18 +3,22 @@ package com.bipt.intelligentapplicationorchestrationservice.service;
|
||||
import com.bipt.intelligentapplicationorchestrationservice.pojo.DatasetDTO;
|
||||
import com.bipt.intelligentapplicationorchestrationservice.pojo.DatasetPageQueryDTO;
|
||||
import com.bipt.intelligentapplicationorchestrationservice.pojo.PageResult;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author hky
|
||||
*/
|
||||
public interface DatasetService {
|
||||
void save(DatasetDTO datasetDTO);
|
||||
void save(DatasetDTO datasetDTO, MultipartFile file);
|
||||
|
||||
void update(DatasetDTO datasetDTO);
|
||||
void update(DatasetDTO datasetDTO, MultipartFile file);
|
||||
|
||||
PageResult pageQuery(DatasetPageQueryDTO dataSetPageQueryDTO);
|
||||
|
||||
void deleteBatch(List<Long> datasetIds);
|
||||
|
||||
InputStream downloadDataset(Long datasetId);
|
||||
}
|
||||
|
@ -3,19 +3,28 @@ package com.bipt.intelligentapplicationorchestrationservice.service.Impl;
|
||||
import com.bipt.intelligentapplicationorchestrationservice.mapper.AlgorithmInfoMapper;
|
||||
import com.bipt.intelligentapplicationorchestrationservice.pojo.AlgorithmInfo;
|
||||
import com.bipt.intelligentapplicationorchestrationservice.service.AlgorithmInfoService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.InputStreamReader;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
@Service
|
||||
@Slf4j
|
||||
public class AlgorithmInfoServiceImpl implements AlgorithmInfoService {
|
||||
|
||||
@Autowired
|
||||
private AlgorithmInfoMapper algorithmInfoMapper;
|
||||
|
||||
@Value("${algorithm.upload.dir:/tmp/algorithm-files/}") // 默认上传目录
|
||||
private String uploadDir;
|
||||
@Override
|
||||
public AlgorithmInfo getById(Long id) {
|
||||
return algorithmInfoMapper.selectById(id);
|
||||
@ -61,22 +70,80 @@ public class AlgorithmInfoServiceImpl implements AlgorithmInfoService {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* 算法创建
|
||||
* @param algorithmInfo
|
||||
*/
|
||||
@Override
|
||||
@Transactional
|
||||
public void save(AlgorithmInfo algorithmInfo) {
|
||||
public void save(AlgorithmInfo algorithmInfo, MultipartFile file) {
|
||||
String algorithmName = algorithmInfo.getAlgorithmName();
|
||||
//查找表里是否有重复的算法,如果有则报错
|
||||
// 检查同名算法
|
||||
AlgorithmInfo duplicateName = algorithmInfoMapper.selectByName(algorithmName);
|
||||
if (duplicateName != null){
|
||||
throw new RuntimeException("算法已存在");
|
||||
if (duplicateName != null) {
|
||||
throw new RuntimeException("算法已存在,请去修改算法");
|
||||
}
|
||||
//todo 算法文件分布式存入分布式存储中
|
||||
|
||||
// 只接收文件但不进行保存操作
|
||||
if (file != null && !file.isEmpty()) {
|
||||
log.info("已接收文件: {}", file.getOriginalFilename());
|
||||
log.info("文件大小: {} 字节", file.getSize());
|
||||
log.info("文件类型: {}", file.getContentType());
|
||||
// 临时设置一个空路径(避免数据库保存空值)
|
||||
//todo 保存到分布式存储
|
||||
algorithmInfo.setAlgorithmFile("");
|
||||
}
|
||||
|
||||
algorithmInfo.setCreateTime(LocalDateTime.now());
|
||||
// 保存算法信息到数据库(注意:此时algorithmFile字段为空)
|
||||
algorithmInfoMapper.insert(algorithmInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String run(Long id, String param) {
|
||||
String file = algorithmInfoMapper.getFileById(id);
|
||||
StringBuilder result = new StringBuilder(); // 用于存储结果
|
||||
|
||||
try {
|
||||
// 构建命令,将 param 作为参数传递给 Python 脚本
|
||||
ProcessBuilder pb = new ProcessBuilder("python", file, param);
|
||||
Process process = pb.start();
|
||||
|
||||
// 读取标准输出(脚本执行结果)
|
||||
BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(process.getInputStream()));
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
result.append(line).append("\n");
|
||||
}
|
||||
|
||||
// 读取错误输出
|
||||
BufferedReader errorReader = new BufferedReader(
|
||||
new InputStreamReader(process.getErrorStream()));
|
||||
String errorLine;
|
||||
while ((errorLine = errorReader.readLine()) != null) {
|
||||
result.append("Error: ").append(errorLine).append("\n");
|
||||
}
|
||||
|
||||
int exitCode = process.waitFor();
|
||||
result.append("Exit Code: ").append(exitCode);
|
||||
|
||||
} catch (Exception e) {
|
||||
result.append("执行异常: ").append(e.getMessage());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return result.toString(); // 返回完整结果
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getAllNames() {
|
||||
return algorithmInfoMapper.getAllNames();
|
||||
}
|
||||
|
||||
/**
|
||||
* 模糊查询
|
||||
* @param keyword
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public List<AlgorithmInfo> getByNameLike(String keyword) {
|
||||
return algorithmInfoMapper.selectByNameLike(keyword);
|
||||
}
|
||||
}
|
@ -12,12 +12,17 @@ import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import static com.bipt.intelligentapplicationorchestrationservice.enumeration.DatasetType.FROM_DATABASE;
|
||||
import static com.bipt.intelligentapplicationorchestrationservice.enumeration.DatasetType.UPLOAD;
|
||||
|
||||
@Service
|
||||
@Slf4j
|
||||
@ -25,63 +30,65 @@ public class DatasetServiceImpl implements DatasetService {
|
||||
@Autowired
|
||||
private DatasetMapper datasetMapper;
|
||||
|
||||
/**
|
||||
* 新增数据集
|
||||
* @param datasetDTO
|
||||
*/
|
||||
@Override
|
||||
@Transactional
|
||||
public void save(DatasetDTO datasetDTO) {
|
||||
//判断数据集类型,如果是本地上传则保存,若用调用数据仓库进入下一步
|
||||
// 获取数据集类型
|
||||
public void save(DatasetDTO datasetDTO, MultipartFile file) {
|
||||
DatasetType datasetType = DatasetType.fromCode(datasetDTO.getDatasetType());
|
||||
// 根据类型处理数据
|
||||
switch (datasetType) {
|
||||
case UPLOAD:
|
||||
//TODO 保存到分布式文件系统
|
||||
break;
|
||||
case FROM_DATABASE:
|
||||
String args = datasetDTO.getArgs();
|
||||
//TODO 根据筛选条件调用数据仓库中的数据
|
||||
if (datasetType == UPLOAD && file != null && !file.isEmpty()) {
|
||||
try {
|
||||
//todo存入分布式文件系统
|
||||
|
||||
//TODO 调用数据仓库保存到分布式文件系统
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("不支持的数据集类型: " + datasetType);
|
||||
// 生成文件名(建议使用 UUID 避免重复)
|
||||
String fileName = UUID.randomUUID() + "-" + file.getOriginalFilename();
|
||||
// 假设使用本地存储(实际需对接分布式文件系统,如 MinIO、OSS 等)
|
||||
String filePath = "/your/storage/path/" + fileName;
|
||||
// 保存文件到磁盘(示例代码,需处理 IO 异常)
|
||||
file.transferTo(new File(filePath));
|
||||
// 更新数据集路径
|
||||
datasetDTO.setDsPath(filePath); // 取消注释,并确保 datasetDTO 有 setDsPath 方法
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("文件上传失败: " + e.getMessage());
|
||||
}
|
||||
} else if (datasetType == FROM_DATABASE){
|
||||
//todo 从数据仓库中查询 并选择筛选条件
|
||||
}
|
||||
|
||||
// 保存数据集实体
|
||||
DatasetEntity datasetEntity = new DatasetEntity();
|
||||
BeanUtils.copyProperties(datasetDTO,datasetEntity);
|
||||
datasetEntity.setDatasetStatus(StatusConstant.ENABLE);
|
||||
BeanUtils.copyProperties(datasetDTO, datasetEntity);
|
||||
datasetEntity.setCreateTime(LocalDateTime.now());
|
||||
datasetEntity.setUpdateTime(LocalDateTime.now());
|
||||
datasetMapper.insert(datasetEntity);
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 修改数据集
|
||||
*
|
||||
* @param datasetDTO
|
||||
* @param file
|
||||
*/
|
||||
@Override
|
||||
@Transactional
|
||||
public void update(DatasetDTO datasetDTO) {
|
||||
/*DatasetEntity datasetEntity = new DatasetEntity();
|
||||
BeanUtils.copyProperties(datasetDTO,datasetEntity);*/
|
||||
public void update(DatasetDTO datasetDTO, MultipartFile file) {
|
||||
DatasetType datasetType = DatasetType.fromCode(datasetDTO.getDatasetType());
|
||||
// 根据类型处理数据
|
||||
switch (datasetType) {
|
||||
case UPLOAD:
|
||||
//TODO 覆盖保存到分布式文件系统中
|
||||
break;
|
||||
case FROM_DATABASE:
|
||||
//TODO 覆盖数据文件
|
||||
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("不支持的数据集类型: " + datasetType);
|
||||
if (datasetType == UPLOAD && file != null && !file.isEmpty()) {
|
||||
try {
|
||||
// 生成文件名(建议使用 UUID 避免重复)
|
||||
String fileName = UUID.randomUUID() + "-" + file.getOriginalFilename();
|
||||
// 假设使用本地存储(实际需对接分布式文件系统,如 MinIO、OSS 等)
|
||||
String filePath = "/your/storage/path/" + fileName;
|
||||
// 保存文件到磁盘(示例代码,需处理 IO 异常)
|
||||
file.transferTo(new File(filePath));
|
||||
// 更新数据集路径
|
||||
datasetDTO.setDsPath(filePath); // 取消注释,并确保 datasetDTO 有 setDsPath 方法
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("文件上传失败: " + e.getMessage());
|
||||
}
|
||||
} else if (datasetType == FROM_DATABASE){
|
||||
//todo 从数据仓库中查询 并选择筛选条件
|
||||
}
|
||||
|
||||
DatasetEntity datasetEntity = new DatasetEntity();
|
||||
BeanUtils.copyProperties(datasetDTO,datasetEntity);
|
||||
datasetEntity.setUpdateTime(LocalDateTime.now());
|
||||
@ -112,4 +119,14 @@ public class DatasetServiceImpl implements DatasetService {
|
||||
datasetMapper.deleteBatch(datasetIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* 下载
|
||||
* @param datasetId
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public InputStream downloadDataset(Long datasetId) {
|
||||
// TODO: 调用分布式存储系统的接口获取数据集文件的输入流
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -40,4 +40,10 @@ logging.level.org.springframework.web=DEBUG
|
||||
logging.level.com.bipt.intelligentapplicationorchestrationservice.mapper=DEBUG
|
||||
mybatis.configuration.log-impl=org.apache.ibatis.logging.slf4j.Slf4jImpl
|
||||
|
||||
management.health.rabbit.enabled=false
|
||||
management.health.rabbit.enabled=false
|
||||
|
||||
# 单个文件最大大小
|
||||
spring.servlet.multipart.max-file-size=10MB
|
||||
# 单次请求最大大小(可包含多个文件)
|
||||
spring.servlet.multipart.max-request-size=15MB
|
||||
|
||||
|
8
src/main/resources/mapper/AlgorithmInfoMapper.xml
Normal file
8
src/main/resources/mapper/AlgorithmInfoMapper.xml
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
<mapper namespace="com.bipt.intelligentapplicationorchestrationservice.mapper.AlgorithmInfoMapper">
|
||||
|
||||
<select id="selectByNameLike" resultType="com.bipt.intelligentapplicationorchestrationservice.pojo.AlgorithmInfo">
|
||||
SELECT * FROM algorithm_info WHERE algorithm_name LIKE CONCAT('%', #{keyword}, '%')
|
||||
</select>
|
||||
</mapper>
|
@ -44,24 +44,6 @@
|
||||
<if test="datasetName != null and datasetName!=''">
|
||||
dataset_name LIKE CONCAT('%', #{datasetName}, '%')
|
||||
</if>
|
||||
<if test="datasetType != null">
|
||||
and dataset_type=#{datasetType}
|
||||
</if>
|
||||
<if test="datasetStatus != null">
|
||||
and dataset_status=#{datasetStatus}
|
||||
</if>
|
||||
<if test="dsPath != null">
|
||||
and ds_path=#{dsPath}
|
||||
</if>
|
||||
<if test="args != null">
|
||||
and args=#{args}
|
||||
</if>
|
||||
<if test="createTime != null">
|
||||
and create_time=#{createTime}
|
||||
</if>
|
||||
<if test="updateTime != null">
|
||||
and update_time=#{updateTime}
|
||||
</if>
|
||||
</where>
|
||||
</select>
|
||||
</mapper>
|
Reference in New Issue
Block a user