vdi/nex-be/src/main/java/com/unisinsight/project/controller/FileChunkController.java

302 lines
12 KiB
Java
Raw Normal View History

2025-08-05 09:22:16 +00:00
package com.unisinsight.project.controller;
2025-08-08 08:11:01 +00:00
import com.unisinsight.project.entity.dao.Image;
import com.unisinsight.project.mapper.ImageMapper;
import com.unisinsight.project.util.DigestUtil;
2025-08-05 09:22:16 +00:00
import io.swagger.annotations.*;
2025-08-08 08:11:01 +00:00
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
2025-08-05 09:22:16 +00:00
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
2025-08-08 08:11:01 +00:00
import javax.annotation.Resource;
2025-08-05 09:22:16 +00:00
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
/**
*
*/
@RestController
@RequestMapping("/api/files")
@Api(tags = "文件分片上传接口")
2025-08-08 08:11:01 +00:00
@Slf4j
2025-08-05 09:22:16 +00:00
public class FileChunkController {
// 临时目录,用于存储上传的分片
2025-08-08 08:11:01 +00:00
@Value("${file.upload.temp-dir:${java.io.tmpdir}/chunked-uploads}")
2025-08-05 09:22:16 +00:00
private String tempDir;
// 最终文件存储目录
2025-08-08 08:11:01 +00:00
@Value("${file.upload.dir:${user.home}/uploads}")
2025-08-05 09:22:16 +00:00
private String uploadDir;
// 存储每个文件的分片信息
private final Map<String, FileUploadInfo> fileUploadMap = new ConcurrentHashMap<>();
2025-08-08 08:11:01 +00:00
@Resource
private ImageMapper imageMapper;
2025-08-05 09:22:16 +00:00
/**
*
2025-08-08 08:11:01 +00:00
*
* @param chunk
* @param fileId
2025-08-05 09:22:16 +00:00
* @param chunkNumber (1)
* @param totalChunks
2025-08-08 08:11:01 +00:00
* @param fileName
* @param totalSize
2025-08-05 09:22:16 +00:00
* @return
*/
@PostMapping("/upload-chunk")
@ApiOperation(value = "上传文件分片", notes = "上传单个文件分片,当所有分片上传完成后自动合并文件")
@ApiImplicitParams({
@ApiImplicitParam(name = "chunk", value = "文件分片", required = true, dataType = "__File", paramType = "form"),
2025-08-08 08:11:01 +00:00
@ApiImplicitParam(name = "chunk_size", value = "文件分片大小", required = true, dataType = "int", paramType = "query"),
@ApiImplicitParam(name = "chunk_md5", value = "文件分片md5", required = true, dataType = "String", paramType = "query"),
@ApiImplicitParam(name = "file_id", value = "文件唯一标识符", required = true, dataType = "String", paramType = "query"),
@ApiImplicitParam(name = "shard_index", value = "当前分片编号(从1开始)", required = true, dataType = "int", paramType = "query"),
@ApiImplicitParam(name = "shard_total", value = "总分片数", required = true, dataType = "int", paramType = "query"),
@ApiImplicitParam(name = "file_name", value = "原始文件名", required = true, dataType = "String", paramType = "query"),
@ApiImplicitParam(name = "file_size", value = "文件总大小", required = true, dataType = "long", paramType = "query")
2025-08-05 09:22:16 +00:00
})
@ApiResponses({
@ApiResponse(code = 200, message = "上传成功"),
@ApiResponse(code = 500, message = "服务器内部错误")
})
public ResponseEntity<Map<String, Object>> uploadChunk(
@RequestParam("chunk") MultipartFile chunk,
2025-08-08 08:11:01 +00:00
@RequestParam("chunk_size") int chunkSize,
@RequestParam("chunk_md5") String chunkMd5,
@RequestParam("file_id") String fileId,
@RequestParam("shard_index") int chunkNumber,
@RequestParam("shard_total") int totalChunks,
@RequestParam("file_name") String fileName,
@RequestParam("file_size") long totalSize) {
2025-08-05 09:22:16 +00:00
Map<String, Object> response = new HashMap<>();
2025-08-08 08:11:01 +00:00
2025-08-05 09:22:16 +00:00
try {
2025-08-08 08:11:01 +00:00
String md5 = DigestUtil.encryptMd5(chunk.getBytes());
if (!chunkMd5.equals(md5)) {
log.info("分片文件md5校验失败,chunkMd5:{},md5:{}", chunkMd5, md5);
throw new RuntimeException("分片文件md5校验失败");
}
2025-08-05 09:22:16 +00:00
// 创建临时目录
Path fileTempDir = Paths.get(tempDir, fileId);
if (!Files.exists(fileTempDir)) {
Files.createDirectories(fileTempDir);
}
2025-08-08 08:11:01 +00:00
log.info("创建临时目录: {}", fileTempDir);
log.info("上传分片文件: {}", fileName);
2025-08-05 09:22:16 +00:00
// 保存分片文件
String chunkFileName = String.format("%05d.part", chunkNumber);
2025-08-08 08:11:01 +00:00
log.info("保存分片文件: {}", chunkFileName);
2025-08-05 09:22:16 +00:00
Path chunkFilePath = fileTempDir.resolve(chunkFileName);
chunk.transferTo(chunkFilePath);
// 更新文件上传信息
2025-08-08 08:11:01 +00:00
FileUploadInfo uploadInfo = fileUploadMap.computeIfAbsent(fileId,
id -> new FileUploadInfo(id, fileName, totalChunks, totalSize));
2025-08-05 09:22:16 +00:00
uploadInfo.addUploadedChunk(chunkNumber);
2025-08-08 08:11:01 +00:00
// log.info("更新文件上传信息: {}", JSONUtil.toJsonStr(uploadInfo));
2025-08-05 09:22:16 +00:00
// 检查是否所有分片都已上传
if (uploadInfo.isUploadComplete()) {
// 合并文件
Path finalDir = Paths.get(uploadDir);
if (!Files.exists(finalDir)) {
Files.createDirectories(finalDir);
}
2025-08-08 08:11:01 +00:00
log.info("合并文件: {}", finalDir);
2025-08-05 09:22:16 +00:00
Path finalFilePath = finalDir.resolve(fileName);
2025-08-08 08:11:01 +00:00
log.info("合并所有分片文件: {}", finalFilePath.getFileName());
2025-08-05 09:22:16 +00:00
mergeChunks(fileId, finalFilePath, totalChunks);
// 清理临时文件
2025-08-08 08:11:01 +00:00
log.info("清理临时文件: {}", fileId);
2025-08-05 09:22:16 +00:00
cleanupTempFiles(fileId);
2025-08-08 08:11:01 +00:00
2025-08-05 09:22:16 +00:00
// 从上传映射中移除
fileUploadMap.remove(fileId);
2025-08-08 08:11:01 +00:00
Image image = new Image();
image.setImageName(fileName);
image.setStoragePath(uploadDir);
image.setImageStatus(1);
int insert = imageMapper.insert(image);
log.info("镜像新增insert:{}", insert);
if (insert == 1) {
response.put("status", "completed");
response.put("message", "文件上传并合并完成");
response.put("filePath", finalFilePath.toString());
} else {
throw new RuntimeException("文件上传失败");
}
2025-08-05 09:22:16 +00:00
} else {
response.put("status", "uploading");
response.put("message", "分片上传成功");
response.put("uploadedChunks", uploadInfo.getUploadedChunks().size());
response.put("totalChunks", totalChunks);
}
response.put("success", true);
return ResponseEntity.ok(response);
} catch (Exception e) {
response.put("success", false);
2025-08-08 08:11:01 +00:00
response.put("status", "error");
2025-08-05 09:22:16 +00:00
response.put("message", "上传失败: " + e.getMessage());
2025-08-08 08:11:01 +00:00
log.info("上次失败清理临时文件: {}", fileId);
try {
cleanupTempFiles(fileId);
cleanUploadFile(fileName);
} catch (IOException ex) {
log.error("清理临时文件失败,fileId:{}, {}", fileId, ex.getMessage(), ex);
}
2025-08-05 09:22:16 +00:00
return ResponseEntity.status(500).body(response);
}
}
/**
*
2025-08-08 08:11:01 +00:00
*
2025-08-05 09:22:16 +00:00
* @param fileId
* @return
*/
@GetMapping("/upload-status/{fileId}")
@ApiOperation("查询文件上传状态")
public ResponseEntity<Map<String, Object>> getUploadStatus(@PathVariable String fileId) {
Map<String, Object> response = new HashMap<>();
2025-08-08 08:11:01 +00:00
2025-08-05 09:22:16 +00:00
FileUploadInfo uploadInfo = fileUploadMap.get(fileId);
if (uploadInfo == null) {
// 检查文件是否已经完成上传并合并
try {
Path finalFilePath = Paths.get(uploadDir, fileId);
if (Files.exists(finalFilePath)) {
response.put("status", "completed");
response.put("message", "文件上传已完成");
response.put("filePath", finalFilePath.toString());
} else {
response.put("status", "not_found");
response.put("message", "文件上传信息不存在");
}
} catch (Exception e) {
response.put("status", "error");
response.put("message", "查询状态失败: " + e.getMessage());
}
} else {
response.put("status", "uploading");
response.put("uploadedChunks", uploadInfo.getUploadedChunks().size());
response.put("totalChunks", uploadInfo.getTotalChunks());
response.put("progress", (double) uploadInfo.getUploadedChunks().size() / uploadInfo.getTotalChunks());
}
2025-08-08 08:11:01 +00:00
2025-08-05 09:22:16 +00:00
response.put("success", true);
return ResponseEntity.ok(response);
}
/**
*
2025-08-08 08:11:01 +00:00
*
* @param fileId
* @param outputPath
2025-08-05 09:22:16 +00:00
* @param totalChunks
* @throws IOException IO
*/
private void mergeChunks(String fileId, Path outputPath, int totalChunks) throws IOException {
try (OutputStream outputStream = Files.newOutputStream(outputPath)) {
Path fileTempDir = Paths.get(tempDir, fileId);
2025-08-08 08:11:01 +00:00
2025-08-05 09:22:16 +00:00
// 按顺序合并分片
for (int i = 1; i <= totalChunks; i++) {
String chunkFileName = String.format("%05d.part", i);
Path chunkPath = fileTempDir.resolve(chunkFileName);
2025-08-08 08:11:01 +00:00
2025-08-05 09:22:16 +00:00
if (!Files.exists(chunkPath)) {
throw new IOException("缺少分片文件: " + chunkFileName);
}
2025-08-08 08:11:01 +00:00
2025-08-05 09:22:16 +00:00
// 将分片内容追加到输出文件
Files.copy(chunkPath, outputStream);
}
}
}
/**
*
2025-08-08 08:11:01 +00:00
*
2025-08-05 09:22:16 +00:00
* @param fileId
* @throws IOException IO
*/
private void cleanupTempFiles(String fileId) throws IOException {
Path fileTempDir = Paths.get(tempDir, fileId);
if (Files.exists(fileTempDir)) {
// 递归删除临时目录及其内容
Files.walk(fileTempDir)
2025-08-08 08:11:01 +00:00
.sorted(Comparator.reverseOrder())
.map(Path::toFile)
.forEach(File::delete);
}
}
/**
*
*
* @param fileName
* @throws IOException IO
*/
private void cleanUploadFile(String fileName) throws IOException {
Path filePath = Paths.get(uploadDir, fileName);
if (Files.exists(filePath)) {
// 删除文件
Files.delete(filePath);
log.info("已删除文件: {}", filePath);
} else {
log.warn("文件不存在,无需删除: {}", filePath);
2025-08-05 09:22:16 +00:00
}
}
2025-08-08 08:11:01 +00:00
2025-08-05 09:22:16 +00:00
/**
*
*/
2025-08-08 08:11:01 +00:00
@Data
2025-08-05 09:22:16 +00:00
private static class FileUploadInfo {
private final String fileId;
private final String fileName;
private final int totalChunks;
private final long totalSize;
private final Set<Integer> uploadedChunks;
public FileUploadInfo(String fileId, String fileName, int totalChunks, long totalSize) {
this.fileId = fileId;
this.fileName = fileName;
this.totalChunks = totalChunks;
this.totalSize = totalSize;
this.uploadedChunks = ConcurrentHashMap.newKeySet();
}
public void addUploadedChunk(int chunkNumber) {
uploadedChunks.add(chunkNumber);
}
public boolean isUploadComplete() {
return uploadedChunks.size() == totalChunks;
}
}
}