Compare commits
6 Commits
80b2f1b39a
...
bb16fb2369
| Author | SHA1 | Date | |
|---|---|---|---|
| bb16fb2369 | |||
| 9aa7dd7a2e | |||
| d6a791f59f | |||
| 109b5220b2 | |||
| bda89202ba | |||
| ee31f74aef |
@@ -28,12 +28,12 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 亲属资产信息Controller
|
||||
* 亲属资产信息导入Controller
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-03-12
|
||||
*/
|
||||
@Tag(name = "亲属资产信息管理")
|
||||
@Tag(name = "亲属资产信息导入管理")
|
||||
@RestController
|
||||
@RequestMapping("/ccdi/assetInfo")
|
||||
public class CcdiAssetInfoController extends BaseController {
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
package com.ruoyi.info.collection.controller;
|
||||
|
||||
import com.ruoyi.common.annotation.Log;
|
||||
import com.ruoyi.common.core.controller.BaseController;
|
||||
import com.ruoyi.common.core.domain.AjaxResult;
|
||||
import com.ruoyi.common.core.page.TableDataInfo;
|
||||
import com.ruoyi.common.enums.BusinessType;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiBaseStaffAssetInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.BaseStaffAssetImportFailureVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportResultVO;
|
||||
import com.ruoyi.info.collection.service.ICcdiBaseStaffAssetImportService;
|
||||
import com.ruoyi.info.collection.utils.EasyExcelUtil;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import jakarta.annotation.Resource;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.springframework.security.access.prepost.PreAuthorize;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 员工资产信息导入Controller
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-03-13
|
||||
*/
|
||||
@Tag(name = "员工资产信息导入管理")
|
||||
@RestController
|
||||
@RequestMapping("/ccdi/baseStaff/asset")
|
||||
public class CcdiBaseStaffAssetImportController extends BaseController {
|
||||
|
||||
@Resource
|
||||
private ICcdiBaseStaffAssetImportService baseStaffAssetImportService;
|
||||
|
||||
/**
|
||||
* 下载导入模板
|
||||
*/
|
||||
@Operation(summary = "下载员工资产导入模板")
|
||||
@PostMapping("/importTemplate")
|
||||
public void importTemplate(HttpServletResponse response) {
|
||||
EasyExcelUtil.importTemplateWithDictDropdown(response, CcdiBaseStaffAssetInfoExcel.class, "员工资产信息");
|
||||
}
|
||||
|
||||
/**
|
||||
* 导入员工资产信息
|
||||
*/
|
||||
@Operation(summary = "导入员工资产信息")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:employee:import')")
|
||||
@Log(title = "员工资产信息", businessType = BusinessType.IMPORT)
|
||||
@PostMapping("/importData")
|
||||
public AjaxResult importData(MultipartFile file) throws Exception {
|
||||
List<CcdiBaseStaffAssetInfoExcel> list = EasyExcelUtil.importExcel(file.getInputStream(), CcdiBaseStaffAssetInfoExcel.class);
|
||||
if (list == null || list.isEmpty()) {
|
||||
return warn("至少需要一条数据");
|
||||
}
|
||||
|
||||
String taskId = baseStaffAssetImportService.importAssetInfo(list);
|
||||
ImportResultVO result = new ImportResultVO();
|
||||
result.setTaskId(taskId);
|
||||
result.setStatus("PROCESSING");
|
||||
result.setMessage("导入任务已提交,正在后台处理");
|
||||
return AjaxResult.success("导入任务已提交,正在后台处理", result);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询导入状态
|
||||
*/
|
||||
@Operation(summary = "查询员工资产导入状态")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:employee:import')")
|
||||
@GetMapping("/importStatus/{taskId}")
|
||||
public AjaxResult getImportStatus(@PathVariable String taskId) {
|
||||
return success(baseStaffAssetImportService.getImportStatus(taskId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询导入失败记录
|
||||
*/
|
||||
@Operation(summary = "查询员工资产导入失败记录")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:employee:import')")
|
||||
@GetMapping("/importFailures/{taskId}")
|
||||
public TableDataInfo getImportFailures(
|
||||
@PathVariable String taskId,
|
||||
@RequestParam(defaultValue = "1") Integer pageNum,
|
||||
@RequestParam(defaultValue = "10") Integer pageSize) {
|
||||
List<BaseStaffAssetImportFailureVO> failures = baseStaffAssetImportService.getImportFailures(taskId);
|
||||
int fromIndex = (pageNum - 1) * pageSize;
|
||||
int toIndex = Math.min(fromIndex + pageSize, failures.size());
|
||||
if (fromIndex >= failures.size()) {
|
||||
return getDataTable(new ArrayList<>(), failures.size());
|
||||
}
|
||||
return getDataTable(failures.subList(fromIndex, toIndex), failures.size());
|
||||
}
|
||||
}
|
||||
@@ -24,8 +24,8 @@ public class CcdiAssetInfoExcel implements Serializable {
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/** 关系人证件号 */
|
||||
@ExcelProperty(value = "关系人证件号*", index = 0)
|
||||
/** 亲属证件号 */
|
||||
@ExcelProperty(value = "亲属证件号*", index = 0)
|
||||
@ColumnWidth(22)
|
||||
@Required
|
||||
@TextFormat
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
package com.ruoyi.info.collection.domain.excel;
|
||||
|
||||
import com.alibaba.excel.annotation.ExcelProperty;
|
||||
import com.alibaba.excel.annotation.write.style.ColumnWidth;
|
||||
import com.ruoyi.common.annotation.DictDropdown;
|
||||
import com.ruoyi.common.annotation.Required;
|
||||
import com.ruoyi.common.annotation.TextFormat;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* 员工资产信息Excel导入导出对象
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-03-13
|
||||
*/
|
||||
@Data
|
||||
public class CcdiBaseStaffAssetInfoExcel implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/** 员工身份证号 */
|
||||
@ExcelProperty(value = "员工身份证号*", index = 0)
|
||||
@ColumnWidth(22)
|
||||
@Required
|
||||
@TextFormat
|
||||
private String personId;
|
||||
|
||||
/** 资产大类 */
|
||||
@ExcelProperty(value = "资产大类*", index = 1)
|
||||
@ColumnWidth(16)
|
||||
@Required
|
||||
private String assetMainType;
|
||||
|
||||
/** 资产小类 */
|
||||
@ExcelProperty(value = "资产小类*", index = 2)
|
||||
@ColumnWidth(18)
|
||||
@Required
|
||||
private String assetSubType;
|
||||
|
||||
/** 资产名称 */
|
||||
@ExcelProperty(value = "资产名称*", index = 3)
|
||||
@ColumnWidth(24)
|
||||
@Required
|
||||
private String assetName;
|
||||
|
||||
/** 产权占比 */
|
||||
@ExcelProperty(value = "产权占比", index = 4)
|
||||
@ColumnWidth(12)
|
||||
private BigDecimal ownershipRatio;
|
||||
|
||||
/** 购买/评估日期 */
|
||||
@ExcelProperty(value = "购买/评估日期", index = 5)
|
||||
@ColumnWidth(16)
|
||||
private Date purchaseEvalDate;
|
||||
|
||||
/** 资产原值 */
|
||||
@ExcelProperty(value = "资产原值", index = 6)
|
||||
@ColumnWidth(16)
|
||||
private BigDecimal originalValue;
|
||||
|
||||
/** 当前估值 */
|
||||
@ExcelProperty(value = "当前估值*", index = 7)
|
||||
@ColumnWidth(16)
|
||||
@Required
|
||||
private BigDecimal currentValue;
|
||||
|
||||
/** 估值截止日期 */
|
||||
@ExcelProperty(value = "估值截止日期", index = 8)
|
||||
@ColumnWidth(16)
|
||||
private Date valuationDate;
|
||||
|
||||
/** 资产状态 */
|
||||
@ExcelProperty(value = "资产状态*", index = 9)
|
||||
@ColumnWidth(14)
|
||||
@DictDropdown(dictType = "ccdi_asset_status")
|
||||
@Required
|
||||
private String assetStatus;
|
||||
|
||||
/** 备注 */
|
||||
@ExcelProperty(value = "备注", index = 10)
|
||||
@ColumnWidth(28)
|
||||
private String remarks;
|
||||
}
|
||||
@@ -15,8 +15,8 @@ import java.math.BigDecimal;
|
||||
@Schema(description = "亲属资产信息导入失败记录")
|
||||
public class AssetImportFailureVO {
|
||||
|
||||
/** 关系人证件号 */
|
||||
@Schema(description = "关系人证件号")
|
||||
/** 亲属证件号 */
|
||||
@Schema(description = "亲属证件号")
|
||||
private String personId;
|
||||
|
||||
/** 资产大类 */
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
package com.ruoyi.info.collection.domain.vo;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Data;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
|
||||
/**
|
||||
* 员工资产信息导入失败记录VO
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-03-13
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "员工资产信息导入失败记录")
|
||||
public class BaseStaffAssetImportFailureVO {
|
||||
|
||||
/** 员工身份证号 */
|
||||
@Schema(description = "员工身份证号")
|
||||
private String personId;
|
||||
|
||||
/** 资产大类 */
|
||||
@Schema(description = "资产大类")
|
||||
private String assetMainType;
|
||||
|
||||
/** 资产小类 */
|
||||
@Schema(description = "资产小类")
|
||||
private String assetSubType;
|
||||
|
||||
/** 资产名称 */
|
||||
@Schema(description = "资产名称")
|
||||
private String assetName;
|
||||
|
||||
/** 产权占比 */
|
||||
@Schema(description = "产权占比")
|
||||
private BigDecimal ownershipRatio;
|
||||
|
||||
/** 当前估值 */
|
||||
@Schema(description = "当前估值")
|
||||
private BigDecimal currentValue;
|
||||
|
||||
/** 资产状态 */
|
||||
@Schema(description = "资产状态")
|
||||
private String assetStatus;
|
||||
|
||||
/** 错误信息 */
|
||||
@Schema(description = "错误信息")
|
||||
private String errorMessage;
|
||||
}
|
||||
@@ -82,4 +82,12 @@ public interface CcdiAssetInfoMapper extends BaseMapper<CcdiAssetInfo> {
|
||||
* @return 归属映射
|
||||
*/
|
||||
List<Map<String, String>> selectOwnerCandidatesByRelationCertNos(@Param("relationCertNos") List<String> relationCertNos);
|
||||
|
||||
/**
|
||||
* 按员工身份证号查询员工本人归属候选
|
||||
*
|
||||
* @param idCards 员工身份证号列表
|
||||
* @return 归属映射
|
||||
*/
|
||||
List<Map<String, String>> selectOwnerCandidatesByBaseStaffIdCards(@Param("idCards") List<String> idCards);
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import com.ruoyi.info.collection.domain.vo.ImportStatusVO;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 员工资产信息异步导入 服务层
|
||||
* 亲属资产信息异步导入 服务层
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-03-12
|
||||
@@ -23,7 +23,7 @@ public interface ICcdiAssetInfoImportService {
|
||||
String importAssetInfo(List<CcdiAssetInfoExcel> excelList);
|
||||
|
||||
/**
|
||||
* 异步导入员工资产数据
|
||||
* 异步导入亲属资产数据
|
||||
*
|
||||
* @param excelList Excel实体列表
|
||||
* @param taskId 任务ID
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
package com.ruoyi.info.collection.service;
|
||||
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiBaseStaffAssetInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.BaseStaffAssetImportFailureVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportStatusVO;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 员工资产信息异步导入 服务层
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-03-13
|
||||
*/
|
||||
public interface ICcdiBaseStaffAssetImportService {
|
||||
|
||||
/**
|
||||
* 启动异步导入任务
|
||||
*
|
||||
* @param excelList Excel实体列表
|
||||
* @return 任务ID
|
||||
*/
|
||||
String importAssetInfo(List<CcdiBaseStaffAssetInfoExcel> excelList);
|
||||
|
||||
/**
|
||||
* 异步导入员工资产数据
|
||||
*
|
||||
* @param excelList Excel实体列表
|
||||
* @param taskId 任务ID
|
||||
* @param userName 用户名
|
||||
*/
|
||||
void importAssetInfoAsync(List<CcdiBaseStaffAssetInfoExcel> excelList, String taskId, String userName);
|
||||
|
||||
/**
|
||||
* 查询导入状态
|
||||
*
|
||||
* @param taskId 任务ID
|
||||
* @return 导入状态
|
||||
*/
|
||||
ImportStatusVO getImportStatus(String taskId);
|
||||
|
||||
/**
|
||||
* 查询导入失败记录
|
||||
*
|
||||
* @param taskId 任务ID
|
||||
* @return 失败记录列表
|
||||
*/
|
||||
List<BaseStaffAssetImportFailureVO> getImportFailures(String taskId);
|
||||
}
|
||||
@@ -27,7 +27,6 @@ import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* 亲属资产信息异步导入服务层处理
|
||||
@@ -164,6 +163,9 @@ public class CcdiAssetInfoImportServiceImpl implements ICcdiAssetInfoImportServi
|
||||
|
||||
private Map<String, Set<String>> buildOwnerMap(List<String> personIds) {
|
||||
Map<String, Set<String>> result = new LinkedHashMap<>();
|
||||
if (personIds == null || personIds.isEmpty()) {
|
||||
return result;
|
||||
}
|
||||
mergeOwnerMappings(result, assetInfoMapper.selectOwnerCandidatesByRelationCertNos(personIds));
|
||||
return result;
|
||||
}
|
||||
@@ -184,7 +186,7 @@ public class CcdiAssetInfoImportServiceImpl implements ICcdiAssetInfoImportServi
|
||||
|
||||
private void validateExcel(CcdiAssetInfoExcel excel) {
|
||||
if (StringUtils.isEmpty(excel.getPersonId())) {
|
||||
throw new RuntimeException("关系人证件号不能为空");
|
||||
throw new RuntimeException("亲属证件号不能为空");
|
||||
}
|
||||
if (StringUtils.isEmpty(excel.getAssetMainType())) {
|
||||
throw new RuntimeException("资产大类不能为空");
|
||||
|
||||
@@ -0,0 +1,226 @@
|
||||
package com.ruoyi.info.collection.service.impl;
|
||||
|
||||
import com.alibaba.fastjson2.JSON;
|
||||
import com.ruoyi.common.utils.SecurityUtils;
|
||||
import com.ruoyi.common.utils.StringUtils;
|
||||
import com.ruoyi.info.collection.domain.CcdiAssetInfo;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiBaseStaffAssetInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.BaseStaffAssetImportFailureVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportResult;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportStatusVO;
|
||||
import com.ruoyi.info.collection.mapper.CcdiAssetInfoMapper;
|
||||
import com.ruoyi.info.collection.service.ICcdiBaseStaffAssetImportService;
|
||||
import jakarta.annotation.Resource;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.data.redis.core.RedisTemplate;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.scheduling.annotation.EnableAsync;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* 员工资产信息异步导入服务层处理
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-03-13
|
||||
*/
|
||||
@Service
|
||||
@EnableAsync
|
||||
public class CcdiBaseStaffAssetImportServiceImpl implements ICcdiBaseStaffAssetImportService {
|
||||
|
||||
private static final String STATUS_KEY_PREFIX = "import:baseStaffAsset:";
|
||||
|
||||
@Resource
|
||||
private CcdiAssetInfoMapper assetInfoMapper;
|
||||
|
||||
@Resource
|
||||
private RedisTemplate<String, Object> redisTemplate;
|
||||
|
||||
@Lazy
|
||||
@Resource
|
||||
private ICcdiBaseStaffAssetImportService baseStaffAssetImportService;
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public String importAssetInfo(List<CcdiBaseStaffAssetInfoExcel> excelList) {
|
||||
if (excelList == null || excelList.isEmpty()) {
|
||||
throw new RuntimeException("至少需要一条数据");
|
||||
}
|
||||
|
||||
String taskId = UUID.randomUUID().toString();
|
||||
Map<String, Object> statusData = new HashMap<>();
|
||||
statusData.put("taskId", taskId);
|
||||
statusData.put("status", "PROCESSING");
|
||||
statusData.put("totalCount", excelList.size());
|
||||
statusData.put("successCount", 0);
|
||||
statusData.put("failureCount", 0);
|
||||
statusData.put("progress", 0);
|
||||
statusData.put("startTime", System.currentTimeMillis());
|
||||
statusData.put("message", "正在处理...");
|
||||
|
||||
String statusKey = STATUS_KEY_PREFIX + taskId;
|
||||
redisTemplate.opsForHash().putAll(statusKey, statusData);
|
||||
redisTemplate.expire(statusKey, 7, TimeUnit.DAYS);
|
||||
|
||||
baseStaffAssetImportService.importAssetInfoAsync(excelList, taskId, currentUserName());
|
||||
return taskId;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Async
|
||||
@Transactional
|
||||
public void importAssetInfoAsync(List<CcdiBaseStaffAssetInfoExcel> excelList, String taskId, String userName) {
|
||||
List<CcdiAssetInfo> successList = new ArrayList<>();
|
||||
List<BaseStaffAssetImportFailureVO> failures = new ArrayList<>();
|
||||
|
||||
List<String> personIds = excelList.stream()
|
||||
.map(CcdiBaseStaffAssetInfoExcel::getPersonId)
|
||||
.filter(StringUtils::isNotEmpty)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
Map<String, Set<String>> ownerMap = buildOwnerMap(personIds);
|
||||
|
||||
for (CcdiBaseStaffAssetInfoExcel excel : excelList) {
|
||||
try {
|
||||
validateExcel(excel);
|
||||
Set<String> familyIds = ownerMap.get(excel.getPersonId());
|
||||
if (familyIds == null || familyIds.isEmpty()) {
|
||||
throw new RuntimeException("员工资产导入仅支持员工本人证件号");
|
||||
}
|
||||
|
||||
CcdiAssetInfo assetInfo = new CcdiAssetInfo();
|
||||
BeanUtils.copyProperties(excel, assetInfo);
|
||||
assetInfo.setFamilyId(excel.getPersonId());
|
||||
assetInfo.setPersonId(excel.getPersonId());
|
||||
assetInfo.setCreateBy(userName);
|
||||
assetInfo.setUpdateBy(userName);
|
||||
successList.add(assetInfo);
|
||||
} catch (Exception e) {
|
||||
BaseStaffAssetImportFailureVO failureVO = new BaseStaffAssetImportFailureVO();
|
||||
BeanUtils.copyProperties(excel, failureVO);
|
||||
failureVO.setErrorMessage(e.getMessage());
|
||||
failures.add(failureVO);
|
||||
}
|
||||
}
|
||||
|
||||
if (!successList.isEmpty()) {
|
||||
assetInfoMapper.insertBatch(successList);
|
||||
}
|
||||
|
||||
if (!failures.isEmpty()) {
|
||||
redisTemplate.opsForValue().set(STATUS_KEY_PREFIX + taskId + ":failures", failures, 7, TimeUnit.DAYS);
|
||||
}
|
||||
|
||||
ImportResult result = new ImportResult();
|
||||
result.setTotalCount(excelList.size());
|
||||
result.setSuccessCount(successList.size());
|
||||
result.setFailureCount(failures.size());
|
||||
updateImportStatus(taskId, failures.isEmpty() ? "SUCCESS" : "PARTIAL_SUCCESS", result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImportStatusVO getImportStatus(String taskId) {
|
||||
String key = STATUS_KEY_PREFIX + taskId;
|
||||
if (Boolean.FALSE.equals(redisTemplate.hasKey(key))) {
|
||||
throw new RuntimeException("任务不存在或已过期");
|
||||
}
|
||||
|
||||
Map<Object, Object> statusMap = redisTemplate.opsForHash().entries(key);
|
||||
ImportStatusVO statusVO = new ImportStatusVO();
|
||||
statusVO.setTaskId((String) statusMap.get("taskId"));
|
||||
statusVO.setStatus((String) statusMap.get("status"));
|
||||
statusVO.setTotalCount((Integer) statusMap.get("totalCount"));
|
||||
statusVO.setSuccessCount((Integer) statusMap.get("successCount"));
|
||||
statusVO.setFailureCount((Integer) statusMap.get("failureCount"));
|
||||
statusVO.setProgress((Integer) statusMap.get("progress"));
|
||||
statusVO.setStartTime((Long) statusMap.get("startTime"));
|
||||
statusVO.setEndTime((Long) statusMap.get("endTime"));
|
||||
statusVO.setMessage((String) statusMap.get("message"));
|
||||
return statusVO;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BaseStaffAssetImportFailureVO> getImportFailures(String taskId) {
|
||||
Object failuresObj = redisTemplate.opsForValue().get(STATUS_KEY_PREFIX + taskId + ":failures");
|
||||
if (failuresObj == null) {
|
||||
return List.of();
|
||||
}
|
||||
return JSON.parseArray(JSON.toJSONString(failuresObj), BaseStaffAssetImportFailureVO.class);
|
||||
}
|
||||
|
||||
private Map<String, Set<String>> buildOwnerMap(List<String> personIds) {
|
||||
Map<String, Set<String>> result = new LinkedHashMap<>();
|
||||
if (personIds == null || personIds.isEmpty()) {
|
||||
return result;
|
||||
}
|
||||
mergeOwnerMappings(result, assetInfoMapper.selectOwnerCandidatesByBaseStaffIdCards(personIds));
|
||||
return result;
|
||||
}
|
||||
|
||||
private void mergeOwnerMappings(Map<String, Set<String>> result, List<Map<String, String>> mappings) {
|
||||
if (mappings == null) {
|
||||
return;
|
||||
}
|
||||
for (Map<String, String> mapping : mappings) {
|
||||
String personId = mapping.get("personId");
|
||||
String familyId = mapping.get("familyId");
|
||||
if (StringUtils.isEmpty(personId) || StringUtils.isEmpty(familyId)) {
|
||||
continue;
|
||||
}
|
||||
result.computeIfAbsent(personId, key -> new java.util.LinkedHashSet<>()).add(familyId);
|
||||
}
|
||||
}
|
||||
|
||||
private void validateExcel(CcdiBaseStaffAssetInfoExcel excel) {
|
||||
if (StringUtils.isEmpty(excel.getPersonId())) {
|
||||
throw new RuntimeException("员工身份证号不能为空");
|
||||
}
|
||||
if (StringUtils.isEmpty(excel.getAssetMainType())) {
|
||||
throw new RuntimeException("资产大类不能为空");
|
||||
}
|
||||
if (StringUtils.isEmpty(excel.getAssetSubType())) {
|
||||
throw new RuntimeException("资产小类不能为空");
|
||||
}
|
||||
if (StringUtils.isEmpty(excel.getAssetName())) {
|
||||
throw new RuntimeException("资产名称不能为空");
|
||||
}
|
||||
if (excel.getCurrentValue() == null) {
|
||||
throw new RuntimeException("当前估值不能为空");
|
||||
}
|
||||
if (StringUtils.isEmpty(excel.getAssetStatus())) {
|
||||
throw new RuntimeException("资产状态不能为空");
|
||||
}
|
||||
}
|
||||
|
||||
private void updateImportStatus(String taskId, String status, ImportResult result) {
|
||||
Map<String, Object> statusData = new HashMap<>();
|
||||
statusData.put("status", status);
|
||||
statusData.put("successCount", result.getSuccessCount());
|
||||
statusData.put("failureCount", result.getFailureCount());
|
||||
statusData.put("progress", 100);
|
||||
statusData.put("endTime", System.currentTimeMillis());
|
||||
statusData.put("message", "SUCCESS".equals(status)
|
||||
? "全部成功!共导入" + result.getTotalCount() + "条数据"
|
||||
: "成功" + result.getSuccessCount() + "条,失败" + result.getFailureCount() + "条");
|
||||
redisTemplate.opsForHash().putAll(STATUS_KEY_PREFIX + taskId, statusData);
|
||||
}
|
||||
|
||||
private String currentUserName() {
|
||||
try {
|
||||
return SecurityUtils.getUsername();
|
||||
} catch (Exception e) {
|
||||
return "system";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,4 +89,15 @@
|
||||
</foreach>
|
||||
</select>
|
||||
|
||||
<select id="selectOwnerCandidatesByBaseStaffIdCards" resultType="map">
|
||||
SELECT
|
||||
id_card AS personId,
|
||||
id_card AS familyId
|
||||
FROM ccdi_base_staff
|
||||
WHERE id_card IN
|
||||
<foreach collection="idCards" item="idCard" open="(" separator="," close=")">
|
||||
#{idCard}
|
||||
</foreach>
|
||||
</select>
|
||||
|
||||
</mapper>
|
||||
|
||||
@@ -109,7 +109,7 @@ class CcdiAssetInfoControllerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void importTemplate_shouldUseRelativeAssetTemplateName() {
|
||||
void importTemplate_shouldUseFamilyAssetTemplateName() {
|
||||
try (MockedStatic<EasyExcelUtil> mocked = mockStatic(EasyExcelUtil.class)) {
|
||||
controller.importTemplate(null);
|
||||
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
package com.ruoyi.info.collection.controller;
|
||||
|
||||
import com.ruoyi.common.constant.HttpStatus;
|
||||
import com.ruoyi.common.core.domain.AjaxResult;
|
||||
import com.ruoyi.common.core.page.TableDataInfo;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiBaseStaffAssetInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.BaseStaffAssetImportFailureVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportResultVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportStatusVO;
|
||||
import com.ruoyi.info.collection.service.ICcdiBaseStaffAssetImportService;
|
||||
import com.ruoyi.info.collection.utils.EasyExcelUtil;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.MockedStatic;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.mock.web.MockMultipartFile;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.mockStatic;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class CcdiBaseStaffAssetImportControllerTest {
|
||||
|
||||
@InjectMocks
|
||||
private CcdiBaseStaffAssetImportController controller;
|
||||
|
||||
@Mock
|
||||
private ICcdiBaseStaffAssetImportService baseStaffAssetImportService;
|
||||
|
||||
@Test
|
||||
void importData_shouldReturnWarnWhenExcelHasNoRows() throws Exception {
|
||||
MockMultipartFile file = new MockMultipartFile(
|
||||
"file",
|
||||
"base-staff-asset-empty.xlsx",
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
"empty".getBytes(StandardCharsets.UTF_8)
|
||||
);
|
||||
|
||||
try (MockedStatic<EasyExcelUtil> mocked = mockStatic(EasyExcelUtil.class)) {
|
||||
mocked.when(() -> EasyExcelUtil.importExcel(any(InputStream.class), eq(CcdiBaseStaffAssetInfoExcel.class)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
AjaxResult result = controller.importData(file);
|
||||
|
||||
assertEquals(HttpStatus.WARN, result.get(AjaxResult.CODE_TAG));
|
||||
assertEquals("至少需要一条数据", result.get(AjaxResult.MSG_TAG));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void importData_shouldReturnSuccessWhenTaskCreated() throws Exception {
|
||||
MockMultipartFile file = new MockMultipartFile(
|
||||
"file",
|
||||
"base-staff-asset.xlsx",
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
"asset".getBytes(StandardCharsets.UTF_8)
|
||||
);
|
||||
CcdiBaseStaffAssetInfoExcel excel = new CcdiBaseStaffAssetInfoExcel();
|
||||
excel.setPersonId("320101199001010011");
|
||||
when(baseStaffAssetImportService.importAssetInfo(List.of(excel))).thenReturn("task-1");
|
||||
|
||||
try (MockedStatic<EasyExcelUtil> mocked = mockStatic(EasyExcelUtil.class)) {
|
||||
mocked.when(() -> EasyExcelUtil.importExcel(any(InputStream.class), eq(CcdiBaseStaffAssetInfoExcel.class)))
|
||||
.thenReturn(List.of(excel));
|
||||
|
||||
AjaxResult result = controller.importData(file);
|
||||
|
||||
assertEquals(HttpStatus.SUCCESS, result.get(AjaxResult.CODE_TAG));
|
||||
assertEquals("导入任务已提交,正在后台处理", result.get(AjaxResult.MSG_TAG));
|
||||
ImportResultVO data = (ImportResultVO) result.get(AjaxResult.DATA_TAG);
|
||||
assertEquals("task-1", data.getTaskId());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void getImportStatus_shouldDelegateToImportService() {
|
||||
ImportStatusVO statusVO = new ImportStatusVO();
|
||||
statusVO.setTaskId("task-2");
|
||||
when(baseStaffAssetImportService.getImportStatus("task-2")).thenReturn(statusVO);
|
||||
|
||||
AjaxResult result = controller.getImportStatus("task-2");
|
||||
|
||||
assertEquals(HttpStatus.SUCCESS, result.get(AjaxResult.CODE_TAG));
|
||||
assertEquals(statusVO, result.get(AjaxResult.DATA_TAG));
|
||||
}
|
||||
|
||||
@Test
|
||||
void getImportFailures_shouldReturnPagedRows() {
|
||||
BaseStaffAssetImportFailureVO failure1 = new BaseStaffAssetImportFailureVO();
|
||||
failure1.setPersonId("A1");
|
||||
BaseStaffAssetImportFailureVO failure2 = new BaseStaffAssetImportFailureVO();
|
||||
failure2.setPersonId("A2");
|
||||
when(baseStaffAssetImportService.getImportFailures("task-3")).thenReturn(List.of(failure1, failure2));
|
||||
|
||||
TableDataInfo result = controller.getImportFailures("task-3", 2, 1);
|
||||
|
||||
assertEquals(2, result.getTotal());
|
||||
assertEquals(1, result.getRows().size());
|
||||
assertEquals("A2", ((BaseStaffAssetImportFailureVO) result.getRows().get(0)).getPersonId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void importTemplate_shouldUseBaseStaffAssetTemplateName() {
|
||||
try (MockedStatic<EasyExcelUtil> mocked = mockStatic(EasyExcelUtil.class)) {
|
||||
controller.importTemplate(null);
|
||||
|
||||
mocked.verify(() -> EasyExcelUtil.importTemplateWithDictDropdown(null, CcdiBaseStaffAssetInfoExcel.class, "员工资产信息"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -96,6 +96,24 @@ class CcdiAssetInfoImportServiceImplTest {
|
||||
assertEquals("320101199001010011", captor.getValue().get(0).getPersonId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void importAssetInfoAsync_shouldFailWhenEmployeeIdCardIsUsedForFamilyAssetImport() {
|
||||
CcdiAssetInfoExcel excel = buildExcel("320101199001010011", "房产");
|
||||
when(redisTemplate.opsForHash()).thenReturn(hashOperations);
|
||||
when(redisTemplate.opsForValue()).thenReturn(valueOperations);
|
||||
when(assetInfoMapper.selectOwnerCandidatesByRelationCertNos(List.of("320101199001010011")))
|
||||
.thenReturn(List.of());
|
||||
|
||||
service.importAssetInfoAsync(List.of(excel), "task-self", "tester");
|
||||
|
||||
verify(assetInfoMapper, never()).insertBatch(any());
|
||||
ArgumentCaptor<Object> failureCaptor = ArgumentCaptor.forClass(Object.class);
|
||||
verify(valueOperations).set(eq("import:assetInfo:task-self:failures"), failureCaptor.capture(), eq(7L), eq(TimeUnit.DAYS));
|
||||
AssetImportFailureVO failure = (AssetImportFailureVO) ((List<?>) failureCaptor.getValue()).get(0);
|
||||
assertEquals("320101199001010011", failure.getPersonId());
|
||||
assertTrue(failure.getErrorMessage().contains("未找到亲属资产归属员工"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void importAssetInfoAsync_shouldResolveFamilyIdFromFamilyRelationIdCard() {
|
||||
CcdiAssetInfoExcel excel = buildExcel("320101199201010022", "车辆");
|
||||
|
||||
@@ -0,0 +1,145 @@
|
||||
package com.ruoyi.info.collection.service;
|
||||
|
||||
import com.ruoyi.info.collection.domain.CcdiAssetInfo;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiBaseStaffAssetInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.BaseStaffAssetImportFailureVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportStatusVO;
|
||||
import com.ruoyi.info.collection.mapper.CcdiAssetInfoMapper;
|
||||
import com.ruoyi.info.collection.service.impl.CcdiBaseStaffAssetImportServiceImpl;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.data.redis.core.HashOperations;
|
||||
import org.springframework.data.redis.core.RedisTemplate;
|
||||
import org.springframework.data.redis.core.ValueOperations;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyMap;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class CcdiBaseStaffAssetImportServiceImplTest {
|
||||
|
||||
@InjectMocks
|
||||
private CcdiBaseStaffAssetImportServiceImpl service;
|
||||
|
||||
@Mock
|
||||
private CcdiAssetInfoMapper assetInfoMapper;
|
||||
|
||||
@Mock
|
||||
private RedisTemplate<String, Object> redisTemplate;
|
||||
|
||||
@Mock
|
||||
private ICcdiBaseStaffAssetImportService baseStaffAssetImportService;
|
||||
|
||||
@Mock
|
||||
private HashOperations<String, Object, Object> hashOperations;
|
||||
|
||||
@Mock
|
||||
private ValueOperations<String, Object> valueOperations;
|
||||
|
||||
@Test
|
||||
void importAssetInfo_shouldUseDedicatedBaseStaffAssetTaskKeys() {
|
||||
List<CcdiBaseStaffAssetInfoExcel> excelList = List.of(buildExcel("320101199001010011", "房产"));
|
||||
when(redisTemplate.opsForHash()).thenReturn(hashOperations);
|
||||
|
||||
String taskId = service.importAssetInfo(excelList);
|
||||
|
||||
verify(hashOperations).putAll(eq("import:baseStaffAsset:" + taskId), anyMap());
|
||||
verify(redisTemplate).expire("import:baseStaffAsset:" + taskId, 7, TimeUnit.DAYS);
|
||||
verify(baseStaffAssetImportService).importAssetInfoAsync(eq(excelList), eq(taskId), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void importAssetInfoAsync_shouldImportWhenEmployeeIdCardExists() {
|
||||
CcdiBaseStaffAssetInfoExcel excel = buildExcel("320101199001010011", "房产");
|
||||
when(redisTemplate.opsForHash()).thenReturn(hashOperations);
|
||||
when(assetInfoMapper.selectOwnerCandidatesByBaseStaffIdCards(List.of("320101199001010011")))
|
||||
.thenReturn(List.of(owner("320101199001010011", "320101199001010011")));
|
||||
|
||||
service.importAssetInfoAsync(List.of(excel), "task-1", "tester");
|
||||
|
||||
ArgumentCaptor<List<CcdiAssetInfo>> captor = ArgumentCaptor.forClass(List.class);
|
||||
verify(assetInfoMapper).insertBatch(captor.capture());
|
||||
assertEquals("320101199001010011", captor.getValue().get(0).getFamilyId());
|
||||
assertEquals("320101199001010011", captor.getValue().get(0).getPersonId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void importAssetInfoAsync_shouldFailWhenFamilyCertificateIsUsed() {
|
||||
CcdiBaseStaffAssetInfoExcel excel = buildExcel("320101199201010022", "车辆");
|
||||
when(redisTemplate.opsForHash()).thenReturn(hashOperations);
|
||||
when(redisTemplate.opsForValue()).thenReturn(valueOperations);
|
||||
when(assetInfoMapper.selectOwnerCandidatesByBaseStaffIdCards(List.of("320101199201010022")))
|
||||
.thenReturn(List.of());
|
||||
|
||||
service.importAssetInfoAsync(List.of(excel), "task-2", "tester");
|
||||
|
||||
verify(assetInfoMapper, never()).insertBatch(any());
|
||||
ArgumentCaptor<Object> failureCaptor = ArgumentCaptor.forClass(Object.class);
|
||||
verify(valueOperations).set(eq("import:baseStaffAsset:task-2:failures"), failureCaptor.capture(), eq(7L), eq(TimeUnit.DAYS));
|
||||
BaseStaffAssetImportFailureVO failure = (BaseStaffAssetImportFailureVO) ((List<?>) failureCaptor.getValue()).get(0);
|
||||
assertEquals("320101199201010022", failure.getPersonId());
|
||||
assertTrue(failure.getErrorMessage().contains("员工资产导入仅支持员工本人证件号"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void getImportStatusAndFailures_shouldUseBaseStaffAssetPrefixes() {
|
||||
when(redisTemplate.opsForHash()).thenReturn(hashOperations);
|
||||
when(redisTemplate.opsForValue()).thenReturn(valueOperations);
|
||||
when(redisTemplate.hasKey("import:baseStaffAsset:task-3")).thenReturn(true);
|
||||
when(hashOperations.entries("import:baseStaffAsset:task-3")).thenReturn(Map.of(
|
||||
"taskId", "task-3",
|
||||
"status", "SUCCESS",
|
||||
"totalCount", 1,
|
||||
"successCount", 1,
|
||||
"failureCount", 0,
|
||||
"progress", 100,
|
||||
"startTime", 1L,
|
||||
"endTime", 2L,
|
||||
"message", "全部成功"
|
||||
));
|
||||
BaseStaffAssetImportFailureVO failureVO = new BaseStaffAssetImportFailureVO();
|
||||
failureVO.setPersonId("320101199001010099");
|
||||
failureVO.setErrorMessage("员工资产导入仅支持员工本人证件号");
|
||||
when(valueOperations.get("import:baseStaffAsset:task-3:failures")).thenReturn(List.of(failureVO));
|
||||
|
||||
ImportStatusVO statusVO = service.getImportStatus("task-3");
|
||||
List<BaseStaffAssetImportFailureVO> failures = service.getImportFailures("task-3");
|
||||
|
||||
assertEquals("task-3", statusVO.getTaskId());
|
||||
assertEquals("SUCCESS", statusVO.getStatus());
|
||||
assertNotNull(failures);
|
||||
assertEquals(1, failures.size());
|
||||
assertEquals("320101199001010099", failures.get(0).getPersonId());
|
||||
}
|
||||
|
||||
private CcdiBaseStaffAssetInfoExcel buildExcel(String personId, String assetMainType) {
|
||||
CcdiBaseStaffAssetInfoExcel excel = new CcdiBaseStaffAssetInfoExcel();
|
||||
excel.setPersonId(personId);
|
||||
excel.setAssetMainType(assetMainType);
|
||||
excel.setAssetSubType(assetMainType + "小类");
|
||||
excel.setAssetName(assetMainType + "名称");
|
||||
excel.setCurrentValue(new BigDecimal("100.00"));
|
||||
excel.setAssetStatus("正常");
|
||||
return excel;
|
||||
}
|
||||
|
||||
private Map<String, String> owner(String personId, String familyId) {
|
||||
return Map.of("personId", personId, "familyId", familyId);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,253 @@
|
||||
# 员工资产导入与亲属资产导入拆分后端实施计划
|
||||
|
||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||
|
||||
**Goal:** 将员工资产导入与亲属资产导入拆成两套独立后端接口与服务,确保员工页仅导入本人资产、亲属页仅导入亲属资产。
|
||||
|
||||
**Architecture:** 保留现有亲属资产导入控制器与服务作为“亲属专用导入链路”,新增一套员工资产专用导入控制器、Excel 模型、失败记录 VO 与异步导入服务。两套链路分别使用不同权限、模板和归属匹配规则,不再共享“本人/亲属兜底识别”逻辑。
|
||||
|
||||
**Tech Stack:** Java 21, Spring Boot 3, MyBatis Plus, Redis, EasyExcel, JUnit 5, Mockito
|
||||
|
||||
---
|
||||
|
||||
### Task 1: 固化拆分后的导入规则测试
|
||||
|
||||
**Files:**
|
||||
- Modify: `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiAssetInfoImportServiceImplTest.java`
|
||||
- Create: `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiBaseStaffAssetImportServiceImplTest.java`
|
||||
- Modify: `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/controller/CcdiAssetInfoControllerTest.java`
|
||||
- Create: `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/controller/CcdiBaseStaffAssetImportControllerTest.java`
|
||||
|
||||
**Step 1: 写亲属资产导入失败测试**
|
||||
|
||||
- 为 `CcdiAssetInfoImportServiceImplTest` 增加用例:
|
||||
- 员工本人身份证号导入时失败
|
||||
- 失败文案为亲属资产专用文案
|
||||
- 为 `CcdiAssetInfoControllerTest` 校验模板标题为“亲属资产信息”
|
||||
|
||||
**Step 2: 运行测试确认当前失败**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn test -pl ccdi-info-collection -am "-Dtest=CcdiAssetInfoImportServiceImplTest,CcdiAssetInfoControllerTest" "-Dsurefire.failIfNoSpecifiedTests=false"
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 新增断言失败
|
||||
- 失败原因是当前实现仍允许员工本人资产命中
|
||||
|
||||
**Step 3: 写员工资产导入测试**
|
||||
|
||||
- 新增 `CcdiBaseStaffAssetImportServiceImplTest`
|
||||
- 覆盖以下场景:
|
||||
- 员工本人身份证号导入成功
|
||||
- 亲属证件号导入失败
|
||||
- Redis key 使用员工资产独立前缀
|
||||
- 新增 `CcdiBaseStaffAssetImportControllerTest`
|
||||
- 覆盖模板标题、任务创建、状态与失败记录查询
|
||||
|
||||
**Step 4: 再次运行测试确认失败点准确**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn test -pl ccdi-info-collection -am "-Dtest=CcdiBaseStaffAssetImportServiceImplTest,CcdiBaseStaffAssetImportControllerTest" "-Dsurefire.failIfNoSpecifiedTests=false"
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 因类与实现尚不存在而失败
|
||||
|
||||
**Step 5: 提交测试脚手架**
|
||||
|
||||
```bash
|
||||
git add ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiAssetInfoImportServiceImplTest.java ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiBaseStaffAssetImportServiceImplTest.java ccdi-info-collection/src/test/java/com/ruoyi/info/collection/controller/CcdiAssetInfoControllerTest.java ccdi-info-collection/src/test/java/com/ruoyi/info/collection/controller/CcdiBaseStaffAssetImportControllerTest.java
|
||||
git commit -m "补充资产导入拆分后端失败测试"
|
||||
```
|
||||
|
||||
### Task 2: 收敛亲属资产导入为亲属专用链路
|
||||
|
||||
**Files:**
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiAssetInfoController.java`
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiAssetInfoImportServiceImpl.java`
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/excel/CcdiAssetInfoExcel.java`
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/AssetImportFailureVO.java`
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/mapper/CcdiAssetInfoMapper.java`
|
||||
- Modify: `ccdi-info-collection/src/main/resources/mapper/info/collection/CcdiAssetInfoMapper.xml`
|
||||
|
||||
**Step 1: 最小实现修改**
|
||||
|
||||
- 将 `CcdiAssetInfoImportServiceImpl` 的归属匹配改为只调用 `selectOwnerCandidatesByRelationCertNos`
|
||||
- 删除员工本人兜底逻辑
|
||||
- 恢复亲属资产专用错误文案
|
||||
- 将 Excel 首列表头改为“亲属证件号*”
|
||||
- 将 controller 标题、swagger 文案、日志标题改回亲属资产专用表述
|
||||
- 权限仅保留 `ccdi:staffFmyRelation:import`
|
||||
|
||||
**Step 2: 运行亲属资产相关测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn test -pl ccdi-info-collection -am "-Dtest=CcdiAssetInfoImportServiceImplTest,CcdiAssetInfoControllerTest" "-Dsurefire.failIfNoSpecifiedTests=false"
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 亲属资产测试全部通过
|
||||
|
||||
**Step 3: 检查无多余员工导入逻辑残留**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
git grep -n "selectOwnerCandidatesByPersonIds|hasAnyPermi('ccdi:employee:import" -- "ccdi-info-collection/src/main/java/**/*.java" "ccdi-info-collection/src/main/resources/**/*.xml"
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `CcdiAssetInfo*` 相关文件不再包含员工资产导入特有逻辑
|
||||
|
||||
**Step 4: 提交**
|
||||
|
||||
```bash
|
||||
git add ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiAssetInfoController.java ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiAssetInfoImportServiceImpl.java ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/excel/CcdiAssetInfoExcel.java ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/AssetImportFailureVO.java ccdi-info-collection/src/main/java/com/ruoyi/info/collection/mapper/CcdiAssetInfoMapper.java ccdi-info-collection/src/main/resources/mapper/info/collection/CcdiAssetInfoMapper.xml
|
||||
git commit -m "收敛亲属资产导入为亲属专用逻辑"
|
||||
```
|
||||
|
||||
### Task 3: 新增员工资产导入后端接口与模型
|
||||
|
||||
**Files:**
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiBaseStaffAssetImportController.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/ICcdiBaseStaffAssetImportService.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiBaseStaffAssetImportServiceImpl.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/excel/CcdiBaseStaffAssetInfoExcel.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/BaseStaffAssetImportFailureVO.java`
|
||||
|
||||
**Step 1: 写最小接口定义**
|
||||
|
||||
- 定义员工资产导入 service 接口:
|
||||
- `importAssetInfo`
|
||||
- `importAssetInfoAsync`
|
||||
- `getImportStatus`
|
||||
- `getImportFailures`
|
||||
|
||||
**Step 2: 写 controller 最小实现**
|
||||
|
||||
- 提供以下接口:
|
||||
- `POST /ccdi/baseStaff/asset/importTemplate`
|
||||
- `POST /ccdi/baseStaff/asset/importData`
|
||||
- `GET /ccdi/baseStaff/asset/importStatus/{taskId}`
|
||||
- `GET /ccdi/baseStaff/asset/importFailures/{taskId}`
|
||||
- 权限使用 `ccdi:employee:import`
|
||||
|
||||
**Step 3: 写 Excel 与失败记录模型**
|
||||
|
||||
- `CcdiBaseStaffAssetInfoExcel` 首列使用“员工身份证号*”
|
||||
- `BaseStaffAssetImportFailureVO` 字段与员工资产模板保持一致
|
||||
|
||||
**Step 4: 运行员工资产 controller 测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn test -pl ccdi-info-collection -am "-Dtest=CcdiBaseStaffAssetImportControllerTest" "-Dsurefire.failIfNoSpecifiedTests=false"
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- controller 测试通过
|
||||
|
||||
**Step 5: 提交**
|
||||
|
||||
```bash
|
||||
git add ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiBaseStaffAssetImportController.java ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/ICcdiBaseStaffAssetImportService.java ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiBaseStaffAssetImportServiceImpl.java ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/excel/CcdiBaseStaffAssetInfoExcel.java ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/BaseStaffAssetImportFailureVO.java
|
||||
git commit -m "新增员工资产导入后端接口"
|
||||
```
|
||||
|
||||
### Task 4: 实现员工资产归属匹配与异步导入
|
||||
|
||||
**Files:**
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiBaseStaffAssetImportServiceImpl.java`
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/mapper/CcdiAssetInfoMapper.java`
|
||||
- Modify: `ccdi-info-collection/src/main/resources/mapper/info/collection/CcdiAssetInfoMapper.xml`
|
||||
|
||||
**Step 1: 写最小匹配实现**
|
||||
|
||||
- 在 mapper 中新增 `selectOwnerCandidatesByBaseStaffIdCards`
|
||||
- SQL 只查询 `ccdi_base_staff.id_card`
|
||||
- service 只按员工身份证号匹配
|
||||
- 不查亲属关系表
|
||||
|
||||
**Step 2: 写导入成功逻辑**
|
||||
|
||||
- 复制 Excel 到 `CcdiAssetInfo`
|
||||
- 强制 `familyId = personId = 员工身份证号`
|
||||
- 使用独立 Redis 前缀,例如 `import:baseStaffAsset:`
|
||||
|
||||
**Step 3: 写失败逻辑**
|
||||
|
||||
- 未命中员工表时报错
|
||||
- 失败文案使用员工资产专用文案
|
||||
|
||||
**Step 4: 运行员工资产 service 测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn test -pl ccdi-info-collection -am "-Dtest=CcdiBaseStaffAssetImportServiceImplTest" "-Dsurefire.failIfNoSpecifiedTests=false"
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 员工资产 service 测试通过
|
||||
|
||||
**Step 5: 提交**
|
||||
|
||||
```bash
|
||||
git add ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiBaseStaffAssetImportServiceImpl.java ccdi-info-collection/src/main/java/com/ruoyi/info/collection/mapper/CcdiAssetInfoMapper.java ccdi-info-collection/src/main/resources/mapper/info/collection/CcdiAssetInfoMapper.xml
|
||||
git commit -m "实现员工资产导入归属匹配"
|
||||
```
|
||||
|
||||
### Task 5: 执行回归验证
|
||||
|
||||
**Files:**
|
||||
- Modify: `docs/plans/2026-03-13-employee-family-asset-import-split-design.md`
|
||||
|
||||
**Step 1: 运行后端定向测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn test -pl ccdi-info-collection -am "-Dtest=CcdiAssetInfoImportServiceImplTest,CcdiAssetInfoControllerTest,CcdiBaseStaffAssetImportServiceImplTest,CcdiBaseStaffAssetImportControllerTest" "-Dsurefire.failIfNoSpecifiedTests=false"
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 相关测试全部通过
|
||||
|
||||
**Step 2: 做源码断言检查**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
git grep -n "/ccdi/baseStaff/asset|/ccdi/assetInfo" -- "ccdi-info-collection/src/main/java/**/*.java"
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 员工与亲属两套接口都存在
|
||||
- 路由职责清晰
|
||||
|
||||
**Step 3: 更新设计文档的实现状态说明**
|
||||
|
||||
- 在设计文档末尾补充“已完成实现验证”的简短说明
|
||||
|
||||
**Step 4: 提交**
|
||||
|
||||
```bash
|
||||
git add docs/plans/2026-03-13-employee-family-asset-import-split-design.md
|
||||
git commit -m "完成资产导入拆分后端验证"
|
||||
```
|
||||
@@ -0,0 +1,253 @@
|
||||
# 员工资产导入与亲属资产导入拆分设计
|
||||
|
||||
## 背景
|
||||
|
||||
当前员工信息维护页 [ccdiBaseStaff/index.vue](/D:/ccdi/ccdi/ruoyi-ui/src/views/ccdiBaseStaff/index.vue) 与员工亲属关系维护页 [ccdiStaffFmyRelation/index.vue](/D:/ccdi/ccdi/ruoyi-ui/src/views/ccdiStaffFmyRelation/index.vue) 共用了 `/ccdi/assetInfo/*` 资产导入接口,导致两类业务边界混淆:
|
||||
|
||||
- 员工页的“导入资产信息”应只导入员工本人资产
|
||||
- 亲属页的“导入亲属资产信息”应只导入员工亲属资产
|
||||
- 当前共享接口无法同时满足这两条规则,模板、权限、失败文案也容易串用
|
||||
|
||||
用户已确认:
|
||||
|
||||
- 保留员工页“导入资产信息”按钮
|
||||
- 员工资产导入与亲属资产导入必须彻底拆分
|
||||
- 员工亲属资产导入功能只能导入员工亲属的资产信息,不能更新员工的
|
||||
|
||||
## 目标
|
||||
|
||||
- 将员工资产导入与亲属资产导入拆成两条独立链路
|
||||
- 员工页只支持员工本人资产导入
|
||||
- 亲属页只支持亲属资产导入
|
||||
- 模板、接口、权限、失败提示、任务状态缓存全部分离
|
||||
|
||||
## 非目标
|
||||
|
||||
- 不改造员工资产手工维护功能
|
||||
- 不改造亲属资产手工维护功能
|
||||
- 不新增独立资产菜单页面
|
||||
- 不调整 `ccdi_asset_info` 表结构
|
||||
|
||||
## 现状
|
||||
|
||||
当前共用资产导入能力集中在:
|
||||
|
||||
- 前端 API:[ccdiAssetInfo.js](/D:/ccdi/ccdi/ruoyi-ui/src/api/ccdiAssetInfo.js)
|
||||
- 员工页调用点:[ccdiBaseStaff/index.vue](/D:/ccdi/ccdi/ruoyi-ui/src/views/ccdiBaseStaff/index.vue)
|
||||
- 亲属页调用点:[ccdiStaffFmyRelation/index.vue](/D:/ccdi/ccdi/ruoyi-ui/src/views/ccdiStaffFmyRelation/index.vue)
|
||||
- 后端控制器:[CcdiAssetInfoController.java](/D:/ccdi/ccdi/ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiAssetInfoController.java)
|
||||
- 后端导入服务:[CcdiAssetInfoImportServiceImpl.java](/D:/ccdi/ccdi/ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiAssetInfoImportServiceImpl.java)
|
||||
|
||||
问题点:
|
||||
|
||||
- 员工页与亲属页共用同一上传地址 `/ccdi/assetInfo/importData`
|
||||
- 共用同一模板下载地址 `/ccdi/assetInfo/importTemplate`
|
||||
- 共用同一任务状态与失败记录查询入口
|
||||
- 共用同一导入匹配规则,无法表达“员工页仅员工本人、亲属页仅亲属”
|
||||
|
||||
## 方案对比
|
||||
|
||||
### 方案一:员工资产导入、亲属资产导入完全拆分
|
||||
|
||||
- 员工页新增一套独立导入接口
|
||||
- 亲属页保留现有 `/ccdi/assetInfo/*`
|
||||
- 两边各自使用独立模板、权限、校验和失败文案
|
||||
|
||||
优点:
|
||||
|
||||
- 业务边界最清晰
|
||||
- 后续维护风险最低
|
||||
- 模板与前端交互不再串用
|
||||
|
||||
缺点:
|
||||
|
||||
- 需要新增一套员工资产导入 controller/service/api
|
||||
|
||||
### 方案二:继续共用接口,通过 `mode` 区分
|
||||
|
||||
- 前端调用同一接口
|
||||
- 后端通过 `mode=employee/family` 分支处理
|
||||
|
||||
优点:
|
||||
|
||||
- 代码新增较少
|
||||
|
||||
缺点:
|
||||
|
||||
- 控制器和 service 内分支复杂
|
||||
- 模板、权限、提示文案仍容易混淆
|
||||
- 后续扩展时回归风险高
|
||||
|
||||
### 方案三:仅修前端入口文案
|
||||
|
||||
优点:
|
||||
|
||||
- 改动最小
|
||||
|
||||
缺点:
|
||||
|
||||
- 业务问题未解决
|
||||
- 实际导入规则仍然混乱
|
||||
|
||||
## 最终方案
|
||||
|
||||
采用方案一:员工资产导入与亲属资产导入完全拆分。
|
||||
|
||||
### 员工资产导入
|
||||
|
||||
- 页面入口:员工信息维护页
|
||||
- 独立接口:
|
||||
- `POST /ccdi/baseStaff/asset/importTemplate`
|
||||
- `POST /ccdi/baseStaff/asset/importData`
|
||||
- `GET /ccdi/baseStaff/asset/importStatus/{taskId}`
|
||||
- `GET /ccdi/baseStaff/asset/importFailures/{taskId}`
|
||||
- 独立前端 API 文件:`ruoyi-ui/src/api/ccdiBaseStaffAsset.js`
|
||||
- 仅允许导入员工本人资产
|
||||
- 模板第一列要求填写员工身份证号
|
||||
- 导入成功后强制写入:
|
||||
- `family_id = 员工身份证号`
|
||||
- `person_id = 员工身份证号`
|
||||
|
||||
### 亲属资产导入
|
||||
|
||||
- 页面入口:员工亲属关系维护页
|
||||
- 保留现有接口:
|
||||
- `POST /ccdi/assetInfo/importTemplate`
|
||||
- `POST /ccdi/assetInfo/importData`
|
||||
- `GET /ccdi/assetInfo/importStatus/{taskId}`
|
||||
- `GET /ccdi/assetInfo/importFailures/{taskId}`
|
||||
- 仅允许导入员工亲属资产
|
||||
- 模板第一列要求填写亲属证件号
|
||||
- 导入成功后强制写入:
|
||||
- `family_id = 关联员工证件号`
|
||||
- `person_id = 亲属证件号`
|
||||
|
||||
## 后端设计
|
||||
|
||||
### 新增员工资产导入控制面
|
||||
|
||||
新增:
|
||||
|
||||
- `controller/CcdiBaseStaffAssetImportController.java`
|
||||
- `service/ICcdiBaseStaffAssetImportService.java`
|
||||
- `service/impl/CcdiBaseStaffAssetImportServiceImpl.java`
|
||||
- `domain/excel/CcdiBaseStaffAssetInfoExcel.java`
|
||||
- `domain/vo/BaseStaffAssetImportFailureVO.java`
|
||||
|
||||
员工资产导入匹配规则:
|
||||
|
||||
- 仅根据 `ccdi_base_staff.id_card` 匹配
|
||||
- 若未匹配到员工,导入失败
|
||||
- 不再兜底匹配亲属关系表
|
||||
- 若命中员工,则写入 `family_id = person_id = id_card`
|
||||
|
||||
亲属资产导入规则调整:
|
||||
|
||||
- [CcdiAssetInfoImportServiceImpl.java](/D:/ccdi/ccdi/ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiAssetInfoImportServiceImpl.java) 只保留亲属资产逻辑
|
||||
- 仅根据 `ccdi_staff_fmy_relation.relation_cert_no` 匹配
|
||||
- 不再匹配员工本人身份证号
|
||||
|
||||
### 权限设计
|
||||
|
||||
- 员工资产导入接口:`ccdi:employee:import`
|
||||
- 亲属资产导入接口:`ccdi:staffFmyRelation:import`
|
||||
|
||||
禁止再使用同时兼容两个权限的写法,以免接口语义再次混淆。
|
||||
|
||||
## 前端设计
|
||||
|
||||
### 员工页
|
||||
|
||||
[ccdiBaseStaff/index.vue](/D:/ccdi/ccdi/ruoyi-ui/src/views/ccdiBaseStaff/index.vue) 改为:
|
||||
|
||||
- 上传地址改为员工资产专用接口
|
||||
- 模板下载改为员工资产模板
|
||||
- 任务状态和失败记录查询改为员工资产专用接口
|
||||
- 提示文案明确为“员工资产数据导入”
|
||||
|
||||
### 亲属页
|
||||
|
||||
[ccdiStaffFmyRelation/index.vue](/D:/ccdi/ccdi/ruoyi-ui/src/views/ccdiStaffFmyRelation/index.vue) 保持:
|
||||
|
||||
- 上传地址仍为 `/ccdi/assetInfo/importData`
|
||||
- 模板下载仍为亲属资产模板
|
||||
- 文案继续强调“亲属资产”
|
||||
|
||||
## 模板设计
|
||||
|
||||
员工资产模板:
|
||||
|
||||
- 文件名:`员工资产信息模板_xxx.xlsx`
|
||||
- 首列表头:`员工身份证号*`
|
||||
|
||||
亲属资产模板:
|
||||
|
||||
- 文件名:`亲属资产信息模板_xxx.xlsx`
|
||||
- 首列表头:`亲属证件号*`
|
||||
|
||||
## 校验规则
|
||||
|
||||
### 员工资产导入
|
||||
|
||||
- 员工身份证号不能为空
|
||||
- 资产必填字段不能为空
|
||||
- 员工身份证号必须存在于 `ccdi_base_staff.id_card`
|
||||
- 若填写的是亲属证件号或其他未命中的证件号,直接失败
|
||||
|
||||
建议失败文案:
|
||||
|
||||
- `未找到员工资产归属员工`
|
||||
- 或更明确的 `员工资产导入仅支持员工本人证件号`
|
||||
|
||||
### 亲属资产导入
|
||||
|
||||
- 亲属证件号不能为空
|
||||
- 资产必填字段不能为空
|
||||
- 亲属证件号必须存在于 `ccdi_staff_fmy_relation.relation_cert_no`
|
||||
- 若填写员工本人身份证号且不存在亲属关系映射,直接失败
|
||||
- 若同一亲属证件号匹配多个员工关系,失败并提示归属不唯一
|
||||
|
||||
## 测试要求
|
||||
|
||||
后端:
|
||||
|
||||
- 员工资产导入:员工本人证件号成功
|
||||
- 员工资产导入:亲属证件号失败
|
||||
- 亲属资产导入:亲属证件号成功
|
||||
- 亲属资产导入:员工本人证件号失败
|
||||
- 两套模板标题和首列表头不同
|
||||
- 两套接口权限分别正确
|
||||
|
||||
前端:
|
||||
|
||||
- 员工页使用员工资产专用上传地址
|
||||
- 亲属页继续使用 `/ccdi/assetInfo/*`
|
||||
- 员工页下载员工资产模板
|
||||
- 亲属页下载亲属资产模板
|
||||
- 员工页和亲属页的失败记录弹窗文案不混淆
|
||||
|
||||
## 风险与回滚
|
||||
|
||||
风险:
|
||||
|
||||
- 当前仓库内已有共享资产导入代码,拆分时容易遗漏某一处调用
|
||||
- 若只拆后端不拆前端,页面会继续指向旧接口
|
||||
- 若模板文案未同步,用户仍可能误用模板
|
||||
|
||||
回滚策略:
|
||||
|
||||
- 独立提交员工资产导入新增改动
|
||||
- 独立提交亲属资产导入收敛改动
|
||||
- 任一阶段出现回归,可按提交粒度回退
|
||||
|
||||
## 实现状态
|
||||
|
||||
- 2026-03-13 已完成后端拆分实现
|
||||
- 已新增员工资产独立导入接口 `/ccdi/baseStaff/asset/*`
|
||||
- 已将 `/ccdi/assetInfo/*` 收敛为亲属资产专用接口
|
||||
- 已通过后端定向测试验证员工与亲属两套导入链路、模板名称和失败文案拆分生效
|
||||
- 2026-03-13 已完成前端拆分实现
|
||||
- 员工页已切换为员工资产专用前端 API `ruoyi-ui/src/api/ccdiBaseStaffAsset.js`
|
||||
- 员工页上传地址、模板下载、状态轮询与失败记录查询已全部改为 `/ccdi/baseStaff/asset/*`
|
||||
- 亲属页继续保持 `/ccdi/assetInfo/*` 导入链路与“亲属资产”提示文案
|
||||
- 已通过 4 个前端静态契约测试验证员工页与亲属页导入交互隔离生效
|
||||
@@ -0,0 +1,230 @@
|
||||
# 员工资产导入与亲属资产导入拆分前端实施计划
|
||||
|
||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||
|
||||
**Goal:** 将员工页与亲属页的资产导入前端交互彻底拆开,确保员工页只走员工资产导入接口,亲属页只走亲属资产导入接口。
|
||||
|
||||
**Architecture:** 保留亲属页现有资产导入状态管理,新增员工资产专用 API 封装并改造员工页上传地址、模板下载、状态轮询与失败记录查询。两边继续复用现有异步导入交互样式,但数据源和文案完全隔离。
|
||||
|
||||
**Tech Stack:** Vue 2, Element UI, Axios request wrapper, Node 静态契约测试
|
||||
|
||||
---
|
||||
|
||||
### Task 1: 固化前端拆分契约测试
|
||||
|
||||
**Files:**
|
||||
- Modify: `ruoyi-ui/tests/unit/employee-asset-api-contract.test.js`
|
||||
- Modify: `ruoyi-ui/tests/unit/employee-asset-import-ui.test.js`
|
||||
- Modify: `ruoyi-ui/tests/unit/staff-family-asset-api-contract.test.js`
|
||||
- Modify: `ruoyi-ui/tests/unit/staff-family-asset-detail-import-ui.test.js`
|
||||
|
||||
**Step 1: 写员工页失败测试**
|
||||
|
||||
- 断言员工页不再引用 `/ccdi/assetInfo/importData`
|
||||
- 断言员工页引用新的员工资产 API 文件或路由
|
||||
- 断言模板文案为“员工资产模板”
|
||||
|
||||
**Step 2: 运行员工页静态测试确认失败**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
node tests/unit/employee-asset-api-contract.test.js
|
||||
node tests/unit/employee-asset-import-ui.test.js
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 至少一个断言失败
|
||||
- 失败原因是员工页仍指向旧接口
|
||||
|
||||
**Step 3: 写亲属页保护性测试**
|
||||
|
||||
- 断言亲属页继续使用 `/ccdi/assetInfo/*`
|
||||
- 断言亲属页模板文案仍为“亲属资产”
|
||||
|
||||
**Step 4: 运行亲属页测试确认当前仍通过**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
node tests/unit/staff-family-asset-api-contract.test.js
|
||||
node tests/unit/staff-family-asset-detail-import-ui.test.js
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 现有亲属页测试通过
|
||||
|
||||
**Step 5: 提交**
|
||||
|
||||
```bash
|
||||
git add ruoyi-ui/tests/unit/employee-asset-api-contract.test.js ruoyi-ui/tests/unit/employee-asset-import-ui.test.js ruoyi-ui/tests/unit/staff-family-asset-api-contract.test.js ruoyi-ui/tests/unit/staff-family-asset-detail-import-ui.test.js
|
||||
git commit -m "补充资产导入拆分前端失败测试"
|
||||
```
|
||||
|
||||
### Task 2: 新增员工资产导入 API 封装
|
||||
|
||||
**Files:**
|
||||
- Create: `ruoyi-ui/src/api/ccdiBaseStaffAsset.js`
|
||||
- Modify: `ruoyi-ui/tests/unit/employee-asset-api-contract.test.js`
|
||||
|
||||
**Step 1: 写最小 API 文件**
|
||||
|
||||
导出以下方法:
|
||||
|
||||
```javascript
|
||||
export function importBaseStaffAssetTemplate() {}
|
||||
export function importBaseStaffAssetData(data) {}
|
||||
export function getBaseStaffAssetImportStatus(taskId) {}
|
||||
export function getBaseStaffAssetImportFailures(taskId, pageNum, pageSize) {}
|
||||
```
|
||||
|
||||
**Step 2: 接入员工专用路由**
|
||||
|
||||
- `/ccdi/baseStaff/asset/importTemplate`
|
||||
- `/ccdi/baseStaff/asset/importData`
|
||||
- `/ccdi/baseStaff/asset/importStatus/`
|
||||
- `/ccdi/baseStaff/asset/importFailures/`
|
||||
|
||||
**Step 3: 运行员工资产 API 静态测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
node tests/unit/employee-asset-api-contract.test.js
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- API 契约测试通过
|
||||
|
||||
**Step 4: 提交**
|
||||
|
||||
```bash
|
||||
git add ruoyi-ui/src/api/ccdiBaseStaffAsset.js ruoyi-ui/tests/unit/employee-asset-api-contract.test.js
|
||||
git commit -m "新增员工资产导入前端接口"
|
||||
```
|
||||
|
||||
### Task 3: 改造员工页导入交互
|
||||
|
||||
**Files:**
|
||||
- Modify: `ruoyi-ui/src/views/ccdiBaseStaff/index.vue`
|
||||
- Modify: `ruoyi-ui/tests/unit/employee-asset-import-ui.test.js`
|
||||
|
||||
**Step 1: 替换员工页上传地址**
|
||||
|
||||
- `assetUpload.url` 改为 `/ccdi/baseStaff/asset/importData`
|
||||
- 模板下载改为 `/ccdi/baseStaff/asset/importTemplate`
|
||||
- 状态查询与失败记录改为员工资产专用 API
|
||||
|
||||
**Step 2: 保持状态隔离**
|
||||
|
||||
- 保留 `assetUpload`、`assetPollingTimer`、`assetCurrentTaskId`
|
||||
- 仅替换其数据来源
|
||||
- 不改动普通员工导入状态
|
||||
|
||||
**Step 3: 调整员工页提示文案**
|
||||
|
||||
- 提示用户仅支持员工本人资产导入
|
||||
- 下载链接文字明确为“下载员工资产模板”
|
||||
|
||||
**Step 4: 运行员工页静态测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
node tests/unit/employee-asset-import-ui.test.js
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 测试通过
|
||||
|
||||
**Step 5: 提交**
|
||||
|
||||
```bash
|
||||
git add ruoyi-ui/src/views/ccdiBaseStaff/index.vue ruoyi-ui/tests/unit/employee-asset-import-ui.test.js
|
||||
git commit -m "切换员工页资产导入到专用接口"
|
||||
```
|
||||
|
||||
### Task 4: 保护亲属页导入交互不回归
|
||||
|
||||
**Files:**
|
||||
- Modify: `ruoyi-ui/src/views/ccdiStaffFmyRelation/index.vue`
|
||||
- Modify: `ruoyi-ui/tests/unit/staff-family-asset-api-contract.test.js`
|
||||
- Modify: `ruoyi-ui/tests/unit/staff-family-asset-detail-import-ui.test.js`
|
||||
|
||||
**Step 1: 检查亲属页调用**
|
||||
|
||||
- 确认亲属页仍使用 `/ccdi/assetInfo/importData`
|
||||
- 确认模板下载仍使用 `/ccdi/assetInfo/importTemplate`
|
||||
- 确认提示文案仍强调“亲属资产”
|
||||
|
||||
**Step 2: 如有必要补充只读修正**
|
||||
|
||||
- 若之前误改了亲属页文案或下载文件名,恢复为亲属专用表达
|
||||
|
||||
**Step 3: 运行亲属页静态测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
node tests/unit/staff-family-asset-api-contract.test.js
|
||||
node tests/unit/staff-family-asset-detail-import-ui.test.js
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 测试通过
|
||||
|
||||
**Step 4: 提交**
|
||||
|
||||
```bash
|
||||
git add ruoyi-ui/src/views/ccdiStaffFmyRelation/index.vue ruoyi-ui/tests/unit/staff-family-asset-api-contract.test.js ruoyi-ui/tests/unit/staff-family-asset-detail-import-ui.test.js
|
||||
git commit -m "保护亲属页资产导入交互不回归"
|
||||
```
|
||||
|
||||
### Task 5: 执行前端回归验证
|
||||
|
||||
**Files:**
|
||||
- Modify: `docs/plans/2026-03-13-employee-family-asset-import-split-design.md`
|
||||
|
||||
**Step 1: 运行全部相关静态测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
node tests/unit/employee-asset-api-contract.test.js
|
||||
node tests/unit/employee-asset-import-ui.test.js
|
||||
node tests/unit/staff-family-asset-api-contract.test.js
|
||||
node tests/unit/staff-family-asset-detail-import-ui.test.js
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 四个测试全部通过
|
||||
|
||||
**Step 2: 做源码检查**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
git grep -n "/ccdi/baseStaff/asset|/ccdi/assetInfo" -- "ruoyi-ui/src/views/**/*.vue" "ruoyi-ui/src/api/*.js"
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 员工页仅指向 `/ccdi/baseStaff/asset/*`
|
||||
- 亲属页仅指向 `/ccdi/assetInfo/*`
|
||||
|
||||
**Step 3: 更新设计文档实现状态**
|
||||
|
||||
- 在设计文档末尾补充前端已完成拆分验证说明
|
||||
|
||||
**Step 4: 提交**
|
||||
|
||||
```bash
|
||||
git add docs/plans/2026-03-13-employee-family-asset-import-split-design.md
|
||||
git commit -m "完成资产导入拆分前端验证"
|
||||
```
|
||||
44
lsfx-mock-server/.gitignore
vendored
44
lsfx-mock-server/.gitignore
vendored
@@ -1,3 +1,45 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Virtual Environment
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# Environment
|
||||
.env
|
||||
|
||||
# Testing
|
||||
.pytest_cache/
|
||||
*.pyc
|
||||
.coverage
|
||||
htmlcov/
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
172
lsfx-mock-server/CLAUDE.md
Normal file
172
lsfx-mock-server/CLAUDE.md
Normal file
@@ -0,0 +1,172 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## 项目概述
|
||||
|
||||
这是一个基于 FastAPI 的 Mock 服务器,用于模拟流水分析平台的 7 个核心接口。主要特点:
|
||||
- **无数据库设计** - 使用内存存储,服务重启后数据丢失
|
||||
- **配置驱动** - 响应数据来自 `config/responses/` 下的 JSON 模板
|
||||
- **错误场景模拟** - 通过 `error_XXXX` 标记触发特定错误码
|
||||
- **异步文件解析** - 使用 FastAPI BackgroundTasks 模拟 4 秒解析延迟
|
||||
|
||||
## 常用命令
|
||||
|
||||
### 开发运行
|
||||
```bash
|
||||
# 安装依赖
|
||||
pip install -r requirements.txt
|
||||
|
||||
# 启动服务(普通模式)
|
||||
python main.py
|
||||
|
||||
# 启动服务(热重载模式,推荐开发时使用)
|
||||
uvicorn main:app --reload --host 0.0.0.0 --port 8000
|
||||
|
||||
# 访问 API 文档
|
||||
# Swagger UI: http://localhost:8000/docs
|
||||
# ReDoc: http://localhost:8000/redoc
|
||||
```
|
||||
|
||||
### 测试
|
||||
```bash
|
||||
# 运行所有测试
|
||||
pytest tests/ -v
|
||||
|
||||
# 运行单个测试文件
|
||||
pytest tests/test_api.py -v
|
||||
|
||||
# 生成覆盖率报告
|
||||
pytest tests/ -v --cov=. --cov-report=html
|
||||
```
|
||||
|
||||
### Docker 部署
|
||||
```bash
|
||||
# 构建镜像
|
||||
docker build -t lsfx-mock-server .
|
||||
|
||||
# 运行容器
|
||||
docker run -d -p 8000:8000 --name lsfx-mock lsfx-mock-server
|
||||
|
||||
# 或使用 Docker Compose
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
## 架构设计
|
||||
|
||||
### 目录结构
|
||||
```
|
||||
lsfx-mock-server/
|
||||
├── main.py # FastAPI 应用入口
|
||||
├── config/
|
||||
│ ├── settings.py # 全局配置(通过环境变量覆盖)
|
||||
│ └── responses/ # JSON 响应模板(token.json, upload.json 等)
|
||||
├── models/
|
||||
│ ├── request.py # Pydantic 请求模型
|
||||
│ └── response.py # Pydantic 响应模型
|
||||
├── services/ # 业务逻辑层(核心)
|
||||
│ ├── token_service.py # Token 管理和项目创建
|
||||
│ ├── file_service.py # 文件上传、解析状态、删除
|
||||
│ └── statement_service.py # 银行流水查询和分页
|
||||
├── routers/
|
||||
│ └── api.py # API 路由定义(7个接口)
|
||||
└── utils/
|
||||
├── error_simulator.py # 错误场景检测和响应构建
|
||||
└── response_builder.py # 从 JSON 模板构建响应
|
||||
```
|
||||
|
||||
### 核心组件交互
|
||||
|
||||
1. **请求流程**:
|
||||
- `routers/api.py` 接收请求 → 检查错误标记 → 调用 Service → 返回响应
|
||||
|
||||
2. **文件上传流程**:
|
||||
- `FileService.upload_file()` 生成 logId → 存储初始记录 → 启动后台任务
|
||||
- 后台任务: 延迟 4 秒 → 更新解析状态为完成
|
||||
- 客户端轮询 `check_parse_status()` 查看是否解析完成
|
||||
|
||||
3. **错误触发机制**:
|
||||
- 在任意字符串参数中包含 `error_XXXX`(如 `projectNo: "test_error_40101"`)
|
||||
- `ErrorSimulator.detect_error_marker()` 检测标记
|
||||
- 返回预定义的错误响应(见 `error_simulator.py` ERROR_CODES)
|
||||
|
||||
### 服务类职责
|
||||
|
||||
- **TokenService**: 管理 projectId 和 token 映射关系(内存字典)
|
||||
- **FileService**: 管理文件记录、解析状态、支持后台任务
|
||||
- `fetch_inner_flow()`: 返回随机 logId 数组(简化管理,不存储记录)
|
||||
- **StatementService**: 从 JSON 模板读取流水数据并分页返回
|
||||
|
||||
## 开发指南
|
||||
|
||||
### 添加新接口
|
||||
|
||||
1. 在 `models/request.py` 和 `models/response.py` 中定义数据模型(如果需要)
|
||||
2. 在 `services/` 中实现业务逻辑方法
|
||||
3. 在 `routers/api.py` 中添加路由处理函数
|
||||
4. 在 `config/responses/` 中添加 JSON 响应模板(可选)
|
||||
5. 在 `tests/` 中添加测试用例
|
||||
|
||||
### 修改响应数据
|
||||
|
||||
直接编辑 `config/responses/` 下的 JSON 文件,重启服务即可生效。无需修改代码。
|
||||
|
||||
### 添加新的错误码
|
||||
|
||||
在 `utils/error_simulator.py` 的 `ERROR_CODES` 字典中添加新条目:
|
||||
```python
|
||||
ERROR_CODES = {
|
||||
"40101": {"code": "40101", "message": "appId错误"},
|
||||
# 添加新的错误码...
|
||||
}
|
||||
```
|
||||
|
||||
### 配置管理
|
||||
|
||||
- 默认配置在 `config/settings.py` 中的 `Settings` 类
|
||||
- 通过 `.env` 文件覆盖(参考 `.env.example`)
|
||||
- 重要配置项:
|
||||
- `PARSE_DELAY_SECONDS`: 文件解析延迟秒数(默认 4)
|
||||
- `INITIAL_PROJECT_ID`: 项目ID起始值(默认 1000)
|
||||
- `INITIAL_LOG_ID`: 文件ID起始值(默认 10000)
|
||||
|
||||
## 测试说明
|
||||
|
||||
- 测试框架: pytest + httpx(FastAPI 测试客户端)
|
||||
- 测试文件位于 `tests/` 目录
|
||||
- `conftest.py` 包含测试夹具(fixtures)
|
||||
- 所有 API 端点都有对应的集成测试
|
||||
- 测试覆盖了成功场景和错误场景(通过 error_XXXX 标记)
|
||||
|
||||
## API 接口说明
|
||||
|
||||
7个核心接口:
|
||||
|
||||
1. `/account/common/getToken` (POST) - 创建项目并获取 Token
|
||||
2. `/watson/api/project/remoteUploadSplitFile` (POST) - 上传流水文件(multipart/form-data)
|
||||
3. `/watson/api/project/getJZFileOrZjrcuFile` (POST) - 拉取行内流水
|
||||
4. `/watson/api/project/upload/getpendings` (POST) - 检查文件解析状态
|
||||
5. `/watson/api/project/bs/upload` (GET) - 获取单个文件上传后的状态(独立接口,基于 logId 生成确定性数据)
|
||||
6. `/watson/api/project/batchDeleteUploadFile` (POST) - 批量删除文件
|
||||
7. `/watson/api/project/getBSByLogId` (POST) - 获取银行流水(分页)
|
||||
|
||||
详细接口文档请访问 Swagger UI (`/docs`) 或查看 `assets/兰溪-流水分析对接3.md`。
|
||||
|
||||
## 注意事项
|
||||
|
||||
- **数据持久化**: 所有数据存储在内存中,服务重启后数据丢失
|
||||
- **响应字段完整性**: 所有接口响应字段完全对齐接口文档示例
|
||||
- **并发安全**: 当前实现未考虑多线程安全,生产环境需要加锁
|
||||
- **文件存储**: 上传的文件不实际保存,仅模拟元数据
|
||||
- **错误标记**: 错误触发通过字符串匹配实现,确保测试数据唯一性
|
||||
- **后台任务**: FastAPI BackgroundTasks 在同一进程内执行,不会阻塞响应
|
||||
- **请求头处理**: X-Xencio-Client-Id 请求头不验证,接受任意值
|
||||
- **行内流水接口特殊性**:
|
||||
- 简化管理:不存储到 file_records
|
||||
- 随机 logId:无需持久化,仅用于返回
|
||||
- 无后续操作:不支持解析状态检查、删除或查询流水
|
||||
- **获取单个文件上传状态接口特殊性**:
|
||||
- 完全独立工作:不依赖文件上传记录
|
||||
- 确定性数据生成:基于 logId 参数使用随机种子生成数据
|
||||
- 相同 logId 一致性:相同 logId 每次查询返回相同的核心字段值
|
||||
- 无 logId 场景:不带 logId 参数时返回空 logs 数组
|
||||
19
lsfx-mock-server/Dockerfile
Normal file
19
lsfx-mock-server/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
# 设置工作目录
|
||||
WORKDIR /app
|
||||
|
||||
# 复制依赖文件
|
||||
COPY requirements.txt .
|
||||
|
||||
# 安装依赖
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# 复制项目文件
|
||||
COPY . .
|
||||
|
||||
# 暴露端口
|
||||
EXPOSE 8000
|
||||
|
||||
# 启动命令
|
||||
CMD ["python", "main.py"]
|
||||
@@ -203,7 +203,7 @@ pytest tests/ -v --cov=. --cov-report=html
|
||||
|------|------|------|------|
|
||||
| 1 | POST | `/account/common/getToken` | 获取 Token |
|
||||
| 2 | POST | `/watson/api/project/remoteUploadSplitFile` | 上传文件 |
|
||||
| 3 | POST | `/watson/api/project/getJZFileOrZjrcuFile` | 拉取行内流水 |
|
||||
| 3 | POST | `/watson/api/project/getJZFileOrZjrcuFile` | 拉取行内流水(返回随机logId) |
|
||||
| 4 | POST | `/watson/api/project/upload/getpendings` | 检查解析状态 |
|
||||
| 5 | POST | `/watson/api/project/batchDeleteUploadFile` | 删除文件 |
|
||||
| 6 | POST | `/watson/api/project/getBSByLogId` | 获取银行流水 |
|
||||
|
||||
735
lsfx-mock-server/assets/兰溪-流水分析对接3.md
Normal file
735
lsfx-mock-server/assets/兰溪-流水分析对接3.md
Normal file
@@ -0,0 +1,735 @@
|
||||
## 1 新建项目并获取token
|
||||
|
||||
### 1.1.1 接口请求地址
|
||||
|
||||
测 试:
|
||||
|
||||
请求方法为 post
|
||||
|
||||
### 1.1.2 请求参数说明
|
||||
|
||||
接口备注:*第三方系统中,点击需要查看的项目向见知现金流尽调系统请求访问**token**,每个项目的**token**不同。现金流尽调系统根据** ProjectNo**为唯一标识查找项目,如果对应的项目不存在则自动创建项目。注意**token**使用一次后即失效,再次访问项目需要重新申* *请。**(支持拉取金综和行内流水)*
|
||||
|
||||
请求体参数说明:
|
||||
|
||||
| 参数名 | 示例值 | 参数类型 | 是否必填 | 参数描述 |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| projectNo | 902000_当前时间戳 | String | 是 | 项目编号,格式:902000_当前时间戳 |
|
||||
| entityName | 902000_202603021400 | String | 是 | 项目名称 |
|
||||
| userId | 902001 | String | 是 | 操作人员编号,固定值 |
|
||||
| userName | 902001 | String | 是 | 操作人员姓名,固定值 |
|
||||
| appId | remote_app | String | 是 | 固定值 |
|
||||
| appSecretCode | 6ee87a361f29234ad25d7893da9975a9 | String | 是 | 安全码 md5(projectNo + "_" + entityName + "_" + dXj6eHRmPv) |
|
||||
| role | VIEWER | String | 是 | 固定值 |
|
||||
| orgCode | 902000 | String | 是 | 行社机构号,固定值 |
|
||||
| entityId | 123456 | String | 否 | 企业统信码或个人身份证号 |
|
||||
| xdRelatedPersons | [{"relatedPerson":"上海上水纯净水有限公司","relation":"董事长"}, {"relatedPerson":"于小雪","relation":"股东"}, {"relatedPerson":"深圳市云顶信息技术有限公司","relation":"父子"}] | String | 否 | 信贷关联人信息 |
|
||||
| jzDataDateId | 0 | String | 否 | 拉取指定日期推送过来的金综链流水, 为0时标识不需要拉取金综链流水 |
|
||||
| innerBSStartDateId | 0 | String | 否 | 拉取行内流水开始日期,0:不需要拉取 行内流水。流水分析系统根据entityId到 数仓中查询行内流水 |
|
||||
| innerBSEndDateId | 0 | String | 否 | 拉取行内流水结束日期,0:不需要拉取 行内流水。流水分析系统根据entityId到 数仓中查询行内流水 |
|
||||
| analysisType | -1 | String | 是 | 固定值 |
|
||||
| departmentCode | 902000 | String | 是 | 客户经理所属营业部/分理处的机构编码,固定值 |
|
||||
|
||||
返回参数说明:(200)成功
|
||||
|
||||
| 参数名 | 示例值 | 参数类型 | 参数描述 |
|
||||
| --- | --- | --- | --- |
|
||||
| code | 200 | String | 返回码:200 请求成功; 请求失败: 40100 未知异常 40101 appId错误 40102 appSecretCode错误 40104 可使用项目次数为0,无法创建项目 40105 只读模式下无法新建项目 40106 错误的分析类型,不在规定的取值范围内 40107 当前系统不支持的分析类型 40108 当前用户所属行社无权限 |
|
||||
| data | | Object | 暂无描述 |
|
||||
| data.token | eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJwcm9qZWN0Tm8iOiJ0ZXN0LXpqbngtMTIwNCIsInJvbGUiOiJWSUVXRVIiLCJlbnRpdHlOYW1lIjoi5rWZ5rGf5Yac5L-hdGVzdDEyMDQiLCJ1c2VyTmFtZSI6Iua1i-ivlTAwMSIsImV4cCI6MTcwMTY3ODEyMSwicHJvamVjdElkIjo3NywidXNlcklkIjoidGVzdDAwMSJ9.UMloP6vB1dayQglVdVcpC9w01kv8kyodKDYfPOC7Hac | String | token |
|
||||
| data.projectId | 77 | Integer | 见知项目Id |
|
||||
| data.projectNo | test-zjnx-1204 | String | 项目编号 |
|
||||
| data.entityName | 浙江农信test1204 | String | 项目名称 |
|
||||
| data.analysisType | 0 | Integer | 暂无描述 |
|
||||
| message | create.token.success | String | 暂无描述 |
|
||||
| status | 200 | String | 状态 |
|
||||
| successResponse | true | Boolean | 暂无描述 |
|
||||
|
||||
返回示例:(200)成功
|
||||
|
||||
| {"code":"200","data":{"token":"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJwcm9qZWN0Tm8iOiJ0ZXN0LXpqbngtMTIwNCIsInJvbGUiOiJWSUVXRVIiLCJlbnRpdHlOYW1lIjoi5rWZ5rGf5Yac5L-hdGVzdDEyMDQiLCJ1c2VyTmFtZSI6Iua1i-ivlTAwMSIsImV4cCI6MTcwMTY3ODEyMSwicHJvamVjdElkIjo3NywidXNlcklkIjoidGVzdDAwMSJ9.UMloP6vB1dayQglVdVcpC9w01kv8kyodKDYfPOC7Hac","projectId":77,"projectNo":"test-zjnx-1204","entityName":"浙江农信test1204","analysisType":0},"message":"create.token.success","status":"200","successResponse":true} |
|
||||
| --- |
|
||||
|
||||
返回参数说明:(404)失败
|
||||
|
||||
## 2 上传文件接口
|
||||
|
||||
### 1.2.1 接口请求地址
|
||||
|
||||
测 试:158.234.196.5:82/c4c3/watson/api/project/remoteUploadSplitFile
|
||||
|
||||
请求头为 X-Xencio-Client-Id: 26e5b9239853436b85c623f4b7a6d0e6
|
||||
|
||||
请求方法为 post
|
||||
|
||||
### 1.2.2 请求参数说明
|
||||
|
||||
| 参数 | 类型 | 参数名称 | 是否必填 | 说明 |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| groupId | Int | 项目id | 是 | |
|
||||
| files | File | 上传的文件 | 是 | |
|
||||
|
||||
### 1.2.3 响应结果信息
|
||||
|
||||
| 序号 | 字段 | 类型 | 备注 |
|
||||
| --- | --- | --- | --- |
|
||||
| | code | String | 200成功 其他状态码失败 |
|
||||
| | data | Object | 列表 |
|
||||
| | accountName | | 主体名称 |
|
||||
| | accountNo | | 账号 |
|
||||
| | uploadFileName | | 文件名称 |
|
||||
| | fileSize | | 文件大小,单位Byte |
|
||||
| | status | | 状态值 |
|
||||
| | uploadStatusDesc | | 文件状态描述 |
|
||||
| | bank | | 所属银行 |
|
||||
| | currency | | 币种 |
|
||||
| | accountId | | 账号id |
|
||||
| | logId | | 文件id |
|
||||
|
||||
注:status等于-5且uploadStatusDesc等于data.wait.confirm.newaccount表示当前流水文件上传后解析成功。反之则没有成功。
|
||||
|
||||
### 1.2.4 参数请求样例
|
||||
|
||||
|
||||

|
||||
|
||||
### 1.2.5 结果集合样例
|
||||
|
||||
结果集合样例不为测试案例结果,具体测试案例结果由具体的参数案例返回为具体值
|
||||
|
||||
成功:
|
||||
|
||||
{
|
||||
|
||||
"code": "200",
|
||||
|
||||
"data": {
|
||||
|
||||
"accountsOfLog": {
|
||||
|
||||
"13976": [
|
||||
|
||||
{
|
||||
|
||||
"bank": "BSX",
|
||||
|
||||
"accountName": "",
|
||||
|
||||
"accountNo": "虞海良绍兴银行流水",
|
||||
|
||||
"currency": "CNY"
|
||||
|
||||
}
|
||||
|
||||
]
|
||||
|
||||
},
|
||||
|
||||
"uploadLogList": [
|
||||
|
||||
{
|
||||
|
||||
"accountNoList": [],
|
||||
|
||||
"bankName": "BSX",
|
||||
|
||||
"dataTypeInfo": [
|
||||
|
||||
"CSV",
|
||||
|
||||
","
|
||||
|
||||
],
|
||||
|
||||
"downloadFileName": "虞海良绍兴银行流水.csv",
|
||||
|
||||
"enterpriseNameList": [],
|
||||
|
||||
"filePackageId": "14b13103010e4d32b5406c764cfe3644",
|
||||
|
||||
"fileSize": 46724,
|
||||
|
||||
"fileUploadBy": 448,
|
||||
|
||||
"fileUploadByUserName": "admin@support.com",
|
||||
|
||||
"fileUploadTime": "2025-03-12 18:53:29",
|
||||
|
||||
"leId": 10724,
|
||||
|
||||
"logId": 13976,
|
||||
|
||||
"logMeta": "{\"lostHeader\":[],\"balanceAmount\":true}",
|
||||
|
||||
"logType": "bankstatement",
|
||||
|
||||
"loginLeId": 10724,
|
||||
|
||||
"realBankName": "BSX",
|
||||
|
||||
"rows": 0,
|
||||
|
||||
"source": "http",
|
||||
|
||||
"status": -5,
|
||||
|
||||
"templateName": "BSX_T240925",
|
||||
|
||||
"totalRecords": 280,
|
||||
|
||||
"trxDateEndId": 20240905,
|
||||
|
||||
"trxDateStartId": 20230914,
|
||||
|
||||
"uploadFileName": "虞海良绍兴银行流水.csv",
|
||||
|
||||
"uploadStatusDesc": "data.wait.confirm.newaccount"
|
||||
|
||||
}
|
||||
|
||||
],
|
||||
|
||||
"uploadStatus": 1
|
||||
|
||||
},
|
||||
|
||||
"status": "200",
|
||||
|
||||
"successResponse": true
|
||||
|
||||
}
|
||||
|
||||
## 拉取行内流水的接口
|
||||
|
||||
### 1.3.1 接口请求地址
|
||||
|
||||
测 试:158.234.196.5:82/c4c3/watson/api/project/getJZFileOrZjrcuFile
|
||||
|
||||
请求头为 X-Xencio-Client-Id: 26e5b9239853436b85c623f4b7a6d0e6
|
||||
|
||||
请求方法为 post
|
||||
|
||||
### 1.3.2 请求参数说明
|
||||
|
||||
| 参数 | 类型 | 参数名称 | 是否必填 | 说明 |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| groupId | Int | 项目id | 是 | |
|
||||
| customerNo | String | 客户身份证号 | 是 | |
|
||||
| dataChannelCode | String | 校验码 | 是 | ZJRCU |
|
||||
| requestDateId | Int | 发起请求的时间 | 是 | 当天请求时间 |
|
||||
| dataStartDateId | Int | 拉取开始日期 | 是 | |
|
||||
| dataEndDateId | Int | 拉取结束日期 | 是 | |
|
||||
| uploadUserId | int | 柜员号 | 是 | |
|
||||
|
||||
### 响应结果信息
|
||||
|
||||
| 序号 | 字段 | 类型 | 备注 |
|
||||
| --- | --- | --- | --- |
|
||||
| 1 | code | String | 200成功 其他状态码失败 |
|
||||
| 2 | data | Object | 列表 |
|
||||
|
||||
### 参数请求样例
|
||||
|
||||
拉取行内流水
|
||||
|
||||
|
||||

|
||||
|
||||
### 结果集合样例
|
||||
|
||||
{
|
||||
"code": "200",
|
||||
"data": [
|
||||
19154
|
||||
],
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
|
||||
## 4 判断文件是否解析结束
|
||||
|
||||
### 1.4.1 接口请求地址
|
||||
|
||||
测 试:http://158.234.196.5:82/c4c3/watson/api/project/upload/getpendings
|
||||
|
||||
请求头为 X-Xencio-Client-Id: c2017e8d105c435a96f86373635b6a09
|
||||
|
||||
请求方法为 post
|
||||
|
||||
### 1.4.2 请求参数说明
|
||||
|
||||
| 参数 | 类型 | 参数名称 | 是否必填 | 说明 |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| groupId | Int | 项目id | 是 | |
|
||||
| inprogressList | String | 文件id | 是 | |
|
||||
|
||||
### 1.4.3 响应结果信息
|
||||
|
||||
| 序号 | 字段 | 类型 | 备注 |
|
||||
| --- | --- | --- | --- |
|
||||
| 1 | code | String | 200成功 其他状态码失败 |
|
||||
| 2 | data | Object | 列表 |
|
||||
| 3 | uploadFileName | | 上传文件名称 |
|
||||
| 4 | status | | 文件解析后状态值 |
|
||||
| 5 | uploadStatusDesc | | 文件解析后状态描述 |
|
||||
| 6 | parsing | | 文件解析状态,true表示解析中,false表示解析结束 |
|
||||
|
||||
注: 文件解析有个处理过程,parsing为false表示解析结束,可以轮询调用此接口,status等于-5且uploadStatusDesc等于data.wait.confirm.newaccount表示文件解析成功。反之则没有成功。
|
||||
|
||||
### 1.4.4 参数请求样例
|
||||
|
||||
|
||||

|
||||
|
||||
### 1.4.5 结果集合样例
|
||||
|
||||
结果集合样例不为测试案例结果,具体测试案例结果由具体的参数案例返回为具体值
|
||||
|
||||
成功:
|
||||
|
||||
{
|
||||
|
||||
"code": "200",
|
||||
|
||||
"data": {
|
||||
|
||||
"parsing": false,
|
||||
|
||||
"pendingList": [
|
||||
|
||||
{
|
||||
|
||||
"accountNoList": [],
|
||||
|
||||
"bankName": "ZJRCU",
|
||||
|
||||
"dataTypeInfo": [
|
||||
|
||||
"CSV",
|
||||
|
||||
","
|
||||
|
||||
],
|
||||
|
||||
"downloadFileName": "230902199012261247_20260201_20260201_1772096608615.csv",
|
||||
|
||||
"enterpriseNameList": [],
|
||||
|
||||
"filePackageId": "cde6c7cf5cab48e8892f0c1c36b2aa7d",
|
||||
|
||||
"fileSize": 53101,
|
||||
|
||||
"fileUploadBy": 448,
|
||||
|
||||
"fileUploadByUserName": "admin@support.com",
|
||||
|
||||
"fileUploadTime": "2026-02-27 09:50:18",
|
||||
|
||||
"isSplit": 0,
|
||||
|
||||
"leId": 16210,
|
||||
|
||||
"logId": 19116,
|
||||
|
||||
"logMeta": "{\"lostHeader\":[],\"balanceAmount\":true}",
|
||||
|
||||
"logType": "bankstatement",
|
||||
|
||||
"loginLeId": 16210,
|
||||
|
||||
"lostHeader": [],
|
||||
|
||||
"realBankName": "ZJRCU",
|
||||
|
||||
"rows": 0,
|
||||
|
||||
"source": "http",
|
||||
|
||||
"status": -5,
|
||||
|
||||
"templateName": "ZJRCU_T251114",
|
||||
|
||||
"totalRecords": 131,
|
||||
|
||||
"trxDateEndId": 20240228,
|
||||
|
||||
"trxDateStartId": 20240201,
|
||||
|
||||
"uploadFileName": "230902199012261247_20260201_20260201_1772096608615.csv",
|
||||
|
||||
"uploadStatusDesc": "data.wait.confirm.newaccount"
|
||||
|
||||
}
|
||||
|
||||
]
|
||||
|
||||
},
|
||||
|
||||
"status": "200",
|
||||
|
||||
"successResponse": true
|
||||
|
||||
}
|
||||
|
||||
## 5 文件上传后获取单个文件上传后的状态
|
||||
|
||||
### 1.5.1 接口请求地址
|
||||
|
||||
测 试:http://158.234.196.5:82/c4c3/watson/api/project/bs/upload
|
||||
|
||||
请求头为 X-Xencio-Client-Id: c2017e8d105c435a96f86373635b6a09
|
||||
|
||||
请求方法为 get
|
||||
|
||||
### 1.5.2 请求参数说明
|
||||
|
||||
| 参数 | 类型 | 参数名称 | 是否必填 | 说明 |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| groupId | Int | 项目id | 是 | |
|
||||
| logId | Int | 文件id | | |
|
||||
|
||||
### 1.5.3 响应结果信息
|
||||
|
||||
| 序号 | 字段 | 类型 | 备注 |
|
||||
| --- | --- | --- | --- |
|
||||
| 1 | code | String | 200成功 其他状态码失败 |
|
||||
| 2 | data | Object | 列表 |
|
||||
| 3 | enterpriseNameList | | 主体名称列表 |
|
||||
| 4 | accountNoList | | 账号列表 |
|
||||
| 5 | uploadFileName | | 文件名称 |
|
||||
| 6 | fileSize | | 文件大小,单位Byte |
|
||||
| 7 | status | | 状态值 |
|
||||
| 8 | uploadStatusDesc | | 文件状态描述 |
|
||||
| 9 | bank | | 所属银行 |
|
||||
| 10 | currency | | 币种 |
|
||||
| 11 | accountId | | 账号id |
|
||||
| 12 | logId | | 文件id |
|
||||
|
||||
注:若enterpriseNameList列表中仅有一个值且值为““,表示流水文件没生成主体,需要调用接口生成主体。
|
||||
|
||||
status等于-5且uploadStatusDesc等于data.wait.confirm.newaccount表示文件上传后解析成功。反之则没有成功。
|
||||
|
||||
### 1.5.4 参数请求样例
|
||||
|
||||
|
||||

|
||||
|
||||
### 1.5.5 结果集合样例
|
||||
|
||||
结果集合样例不为测试案例结果,具体测试案例结果由具体的参数案例返回为具体值
|
||||
|
||||
成功:
|
||||
|
||||
{
|
||||
|
||||
"code": "200",
|
||||
|
||||
"data": {
|
||||
|
||||
"logs": [
|
||||
|
||||
{
|
||||
|
||||
"accountNoList": [
|
||||
|
||||
"18785967364"
|
||||
|
||||
],
|
||||
|
||||
"bankName": "ALIPAY",
|
||||
|
||||
"dataTypeInfo": [
|
||||
|
||||
"CSV",
|
||||
|
||||
","
|
||||
|
||||
],
|
||||
|
||||
"downloadFileName": "支付宝.csv",
|
||||
|
||||
"enterpriseNameList": [
|
||||
|
||||
"曾孝成"
|
||||
|
||||
],
|
||||
|
||||
"fileSize": 16322,
|
||||
|
||||
"fileUploadBy": 448,
|
||||
|
||||
"fileUploadByUserName": "admin@support.com",
|
||||
|
||||
"fileUploadTime": "2025-03-13 08:45:32",
|
||||
|
||||
"isSplit": 0,
|
||||
|
||||
"leId": 10741,
|
||||
|
||||
"logId": 13994,
|
||||
|
||||
"logMeta": "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}",
|
||||
|
||||
"logType": "bankstatement",
|
||||
|
||||
"loginLeId": 10741,
|
||||
|
||||
"lostHeader": [],
|
||||
|
||||
"realBankName": "ALIPAY",
|
||||
|
||||
"rows": 0,
|
||||
|
||||
"source": "http",
|
||||
|
||||
"status": -5,
|
||||
|
||||
"templateName": "ALIPAY_T220708",
|
||||
|
||||
"totalRecords": 127,
|
||||
|
||||
"trxDateEndId": 20231231,
|
||||
|
||||
"trxDateStartId": 20230102,
|
||||
|
||||
"uploadFileName": "支付宝.pdf",
|
||||
|
||||
"uploadStatusDesc": "data.wait.confirm.newaccount"
|
||||
|
||||
}
|
||||
|
||||
],
|
||||
|
||||
"status": "",
|
||||
|
||||
"accountId": 8954,
|
||||
|
||||
"currency": "CNY"
|
||||
|
||||
},
|
||||
|
||||
"status": "200",
|
||||
|
||||
"successResponse": true
|
||||
|
||||
}
|
||||
|
||||
## 6 删除主体接口
|
||||
|
||||
### 1.6.1 接口请求地址
|
||||
|
||||
测 试:158.234.196.5:82/c4c3/watson/api/project/batchDeleteUploadFile
|
||||
|
||||
请求头为 X-Xencio-Client-Id: 26e5b9239853436b85c623f4b7a6d0e6
|
||||
|
||||
请求方法为 post
|
||||
|
||||
### 1.6.2 请求参数说明
|
||||
|
||||
| 参数 | 类型 | 参数名称 | 是否必填 | 说明 |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| groupId | Int | 项目id | 是 | |
|
||||
| logIds logIds: | Array | 文件id数组 | 是 | |
|
||||
| userId | int | 用户柜员号 | 是 | |
|
||||
|
||||
### 1.6.3 响应结果信息
|
||||
|
||||
| 序号 | 字段 | 类型 | 备注 |
|
||||
| --- | --- | --- | --- |
|
||||
| 1 | code | String | 200成功 其他状态码失败 |
|
||||
| 2 | data | Object | 列表 |
|
||||
|
||||
### 1.6.4 参数请求样例
|
||||
|
||||
|
||||

|
||||
|
||||
### 1.6.5 结果集合样例
|
||||
|
||||
结果集合样例不为测试案例结果,具体测试案例结果由具体的参数案例返回为具体值
|
||||
|
||||
成功:
|
||||
|
||||
{
|
||||
|
||||
"code": "200 OK",
|
||||
|
||||
"data": {
|
||||
|
||||
"message": "delete.files.success"
|
||||
|
||||
},
|
||||
|
||||
"message": "delete.files.success",
|
||||
|
||||
"status": "200",
|
||||
|
||||
"successResponse": true
|
||||
|
||||
}
|
||||
|
||||
## 7 获取流水列表并存储到兰溪本地
|
||||
|
||||
### 1.7.1 接口请求地址
|
||||
|
||||
测 试:158.234.196.5:82/c4c3/watson/api/project/getBSByLogId
|
||||
|
||||
请求头为 X-Xencio-Client-Id: 26e5b9239853436b85c623f4b7a6d0e6
|
||||
|
||||
请求方法为 post
|
||||
|
||||
### 1.7.2 请求参数说明
|
||||
|
||||
| 参数 | 类型 | 参数名称 | 是否必填 | 说明 |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| groupId | Int | 项目id | 是 | |
|
||||
| logId | Int | 文件id | 是 | |
|
||||
| pageNow | Int | 当前页码 | 是 | |
|
||||
| pageSize | Int | 查询条数 | 是 | |
|
||||
|
||||
### 1.7.3 响应结果信息
|
||||
|
||||
| 序号 | 字段 | 类型 | 备注 |
|
||||
| --- | --- | --- | --- |
|
||||
| 1 | code | String | 200成功 其他状态码失败 |
|
||||
| 2 | data | Object | 列表 |
|
||||
| 3 | bankStatementList | 流水列表 | |
|
||||
| 4 | totalCount | 总条数 | |
|
||||
|
||||
### 1.7.4 参数请求样例
|
||||
|
||||
|
||||

|
||||
|
||||
### 1.7.5 结果集合样例
|
||||
|
||||
结果集合样例不为测试案例结果,具体测试案例结果由具体的参数案例返回为具体值
|
||||
|
||||
成功:
|
||||
|
||||
{
|
||||
|
||||
"code": "200",
|
||||
|
||||
"data": {
|
||||
|
||||
"bankStatementList": [
|
||||
|
||||
{
|
||||
|
||||
"accountId": 0,
|
||||
|
||||
"accountMaskNo": "101015251071645",
|
||||
|
||||
"accountingDate": "2024-02-01",
|
||||
|
||||
"accountingDateId": 20240201,
|
||||
|
||||
"archivingFlag": 0,
|
||||
|
||||
"attachments": 0,
|
||||
|
||||
"balanceAmount": 4814.82,
|
||||
|
||||
"bank": "ZJRCU",
|
||||
|
||||
"bankComments": "",
|
||||
|
||||
"bankStatementId": 12847662,
|
||||
|
||||
"bankTrxNumber": "1a10458dd5c3366d7272285812d434fc",
|
||||
|
||||
"batchId": 19135,
|
||||
|
||||
"cashType": "1",
|
||||
|
||||
"commentsNum": 0,
|
||||
|
||||
"crAmount": 0,
|
||||
|
||||
"cretNo": "230902199012261247",
|
||||
|
||||
"currency": "CNY",
|
||||
|
||||
"customerAccountMaskNo": "597671502",
|
||||
|
||||
"customerBank": "",
|
||||
|
||||
"customerId": -1,
|
||||
|
||||
"customerName": "小店",
|
||||
|
||||
"customerReference": "",
|
||||
|
||||
"downPaymentFlag": 0,
|
||||
|
||||
"drAmount": 245.8,
|
||||
|
||||
"exceptionType": "",
|
||||
|
||||
"groupId": 16238,
|
||||
|
||||
"internalFlag": 0,
|
||||
|
||||
"leId": 16308,
|
||||
|
||||
"leName": "张传伟",
|
||||
|
||||
"overrideBsId": 0,
|
||||
|
||||
"paymentMethod": "",
|
||||
|
||||
"sourceCatalogId": 0,
|
||||
|
||||
"split": 0,
|
||||
|
||||
"subBankstatementId": 0,
|
||||
|
||||
"toDoFlag": 0,
|
||||
|
||||
"transAmount": 245.8,
|
||||
|
||||
"transFlag": "P",
|
||||
|
||||
"transTypeId": 0,
|
||||
|
||||
"transformAmount": 0,
|
||||
|
||||
"transformCrAmount": 0,
|
||||
|
||||
"transformDrAmount": 0,
|
||||
|
||||
"transfromBalanceAmount": 0,
|
||||
|
||||
"trxBalance": 0,
|
||||
|
||||
"trxDate": "2024-02-01 10:33:44",
|
||||
|
||||
"userMemo": "财付通消费_小店"
|
||||
|
||||
}
|
||||
|
||||
],
|
||||
|
||||
"totalCount": 131
|
||||
|
||||
},
|
||||
|
||||
"status": "200",
|
||||
|
||||
"successResponse": true
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
接口说明:
|
||||
|
||||
1. 初始化调用/account/common/getToken接口创建项目(必填参数按要求输入,选填参数可忽略)。
|
||||
1. 其次调用/watson/api/project/remoteUploadSplitFile接口上传文件,或者拉取行内流水/watson/api/project/getJZFileOrZjrcuFile
|
||||
1. 接着调用/watson/api/project/upload/getpendings获取文件解析的状态,因为文件上传后有个解析过程,所以需要观察该接口返回的parsing是否为false,如果为true,可间隔1s轮询调用此接口,直到parsing为false,获取status的值,如果不为-5,提示用户解析失败。
|
||||
1. 如果流水文件解析成功,可以调用/watson/api/project/bs/upload接口获取解析后主体名称和账号等信息。
|
||||
1. 如果流水文件解析失败,可以调用/watson/api/project/batchDeleteUploadFile接口删除流水文件。
|
||||
1. 流水解析成功后,调用/watson/api/project/upload/getBankStatement接口将对应的流水明细存储到兰溪本地
|
||||
生产ip:64.202.32.176
|
||||
|
||||
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image1.png
Normal file
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 21 KiB |
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image2.png
Normal file
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 16 KiB |
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image3.png
Normal file
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image3.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 18 KiB |
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image4.png
Normal file
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image4.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 32 KiB |
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image5.png
Normal file
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image5.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 20 KiB |
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image6.png
Normal file
BIN
lsfx-mock-server/assets/兰溪-流水分析对接3_images/image6.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 21 KiB |
@@ -48,6 +48,7 @@
|
||||
"transfromBalanceAmount": 0,
|
||||
"trxBalance": 0,
|
||||
"trxDate": "2024-02-01 10:33:44",
|
||||
"uploadSequnceNumber": 1,
|
||||
"userMemo": "财付通消费_小店"
|
||||
},
|
||||
{
|
||||
@@ -95,6 +96,7 @@
|
||||
"transfromBalanceAmount": 0,
|
||||
"trxBalance": 0,
|
||||
"trxDate": "2024-02-02 14:22:18",
|
||||
"uploadSequnceNumber": 2,
|
||||
"userMemo": "支付宝转账_支付宝"
|
||||
}
|
||||
],
|
||||
|
||||
42
lsfx-mock-server/config/responses/upload_status.json
Normal file
42
lsfx-mock-server/config/responses/upload_status.json
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"success_response": {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"logs": [
|
||||
{
|
||||
"accountNoList": ["18785967364"],
|
||||
"bankName": "ALIPAY",
|
||||
"dataTypeInfo": ["CSV", ","],
|
||||
"downloadFileName": "支付宝.csv",
|
||||
"enterpriseNameList": ["曾孝成"],
|
||||
"fileSize": 16322,
|
||||
"fileUploadBy": 448,
|
||||
"fileUploadByUserName": "admin@support.com",
|
||||
"fileUploadTime": "2025-03-13 08:45:32",
|
||||
"isSplit": 0,
|
||||
"leId": 10741,
|
||||
"logId": 13994,
|
||||
"logMeta": "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}",
|
||||
"logType": "bankstatement",
|
||||
"loginLeId": 10741,
|
||||
"lostHeader": [],
|
||||
"realBankName": "ALIPAY",
|
||||
"rows": 0,
|
||||
"source": "http",
|
||||
"status": -5,
|
||||
"templateName": "ALIPAY_T220708",
|
||||
"totalRecords": 127,
|
||||
"trxDateEndId": 20231231,
|
||||
"trxDateStartId": 20230102,
|
||||
"uploadFileName": "支付宝.pdf",
|
||||
"uploadStatusDesc": "data.wait.confirm.newaccount"
|
||||
}
|
||||
],
|
||||
"status": "",
|
||||
"accountId": 8954,
|
||||
"currency": "CNY"
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
}
|
||||
17
lsfx-mock-server/docker-compose.yml
Normal file
17
lsfx-mock-server/docker-compose.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
lsfx-mock-server:
|
||||
build: .
|
||||
container_name: lsfx-mock-server
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
- APP_NAME=流水分析Mock服务
|
||||
- APP_VERSION=1.0.0
|
||||
- DEBUG=true
|
||||
- HOST=0.0.0.0
|
||||
- PORT=8000
|
||||
- PARSE_DELAY_SECONDS=4
|
||||
- MAX_FILE_SIZE=10485760
|
||||
restart: unless-stopped
|
||||
379
lsfx-mock-server/docs/implementation_report.md
Normal file
379
lsfx-mock-server/docs/implementation_report.md
Normal file
@@ -0,0 +1,379 @@
|
||||
# Mock 服务器接口优化实施报告
|
||||
|
||||
## 项目概述
|
||||
|
||||
**项目名称**: 流水分析 Mock 服务器接口优化
|
||||
**实施日期**: 2026-03-12
|
||||
**实施方法**: 测试驱动开发 (TDD)
|
||||
**项目状态**: ✅ 全部完成
|
||||
|
||||
## 一、实施任务清单
|
||||
|
||||
### Task 1: 修复 FileRecord.log_meta 默认值 ✅
|
||||
|
||||
**问题描述**:
|
||||
- FileRecord 类的 log_meta 字段默认值为 `{}`,不符合预期(应为 `None`)
|
||||
|
||||
**解决方案**:
|
||||
- 修改 `models/file_record.py` 中 FileRecord 类的定义
|
||||
- 将 `log_meta: dict = {}` 改为 `log_meta: Optional[dict] = None`
|
||||
- 添加 `from typing import Optional` 导入
|
||||
|
||||
**文件修改**:
|
||||
- `D:\ccdi\lsfx-mock-server\models\file_record.py`
|
||||
|
||||
---
|
||||
|
||||
### Task 2-4: 编写测试用例(TDD 红灯阶段)✅
|
||||
|
||||
**测试用例设计**:
|
||||
|
||||
1. **test_get_upload_status_without_log_id** (Task 2)
|
||||
- 测试目标: 验证不带 logId 参数时返回空 logs 数组
|
||||
- 预期结果: `response.json()["data"]["logs"] == []`
|
||||
|
||||
2. **test_get_upload_status_with_log_id** (Task 3)
|
||||
- 测试目标: 验证带 logId 参数时返回包含数据的 logs 数组
|
||||
- 预期结果: `len(response.json()["data"]["logs"]) == 1`
|
||||
- 预期结果: `log["logId"] == 12345`
|
||||
|
||||
3. **test_deterministic_data_generation** (Task 4)
|
||||
- 测试目标: 验证相同 logId 多次查询返回相同的核心字段值
|
||||
- 测试方法: 使用相同 logId 调用两次接口,比对核心字段
|
||||
- 核心字段: logId, groupId, fileName, bankName, totalRecords, fileSize
|
||||
|
||||
**文件添加**:
|
||||
- `D:\ccdi\lsfx-mock-server\tests\test_api.py` (3 个新测试函数)
|
||||
|
||||
**TDD 红灯验证**: ✅ 测试运行失败,符合预期
|
||||
|
||||
---
|
||||
|
||||
### Task 5-6: 实现确定性数据生成功能 ✅
|
||||
|
||||
**实现内容**:
|
||||
|
||||
1. **Task 5: 实现 _generate_deterministic_record() 方法**
|
||||
- 功能: 基于 logId 生成确定性的文件记录数据
|
||||
- 关键技术: 使用 `random.seed(log_id)` 设置随机种子
|
||||
- 数据生成规则:
|
||||
- 相同 logId → 相同 fileName, bankName, totalRecords, fileSize
|
||||
- 合理的银行名称推断(基于文件名)
|
||||
- 合理的日期范围(90-365天)
|
||||
- 合理的账号和主体信息
|
||||
|
||||
2. **Task 6: 重构 get_upload_status() 方法**
|
||||
- 修改逻辑:
|
||||
- 无 logId → 返回空 logs 数组
|
||||
- 有 logId → 调用 `_generate_deterministic_record(log_id)` 生成数据
|
||||
- 保持接口响应格式不变
|
||||
|
||||
**文件修改**:
|
||||
- `D:\ccdi\lsfx-mock-server\services\file_service.py`
|
||||
- 新增 `_generate_deterministic_record()` 方法(约 80 行)
|
||||
- 重构 `get_upload_status()` 方法
|
||||
|
||||
---
|
||||
|
||||
### Task 7: 运行测试验证功能(TDD 绿灯阶段)✅
|
||||
|
||||
**测试执行结果**:
|
||||
```
|
||||
tests/test_api.py::test_get_upload_status_with_log_id PASSED
|
||||
tests/test_api.py::test_get_upload_status_without_log_id PASSED
|
||||
tests/test_api.py::test_deterministic_data_generation PASSED
|
||||
tests/test_api.py::test_field_completeness PASSED
|
||||
|
||||
======================== 13 passed, 1 warning in 0.23s ========================
|
||||
```
|
||||
|
||||
**TDD 绿灯验证**: ✅ 所有测试通过
|
||||
|
||||
---
|
||||
|
||||
### Task 8: 更新文档并提交 ✅
|
||||
|
||||
**文档更新内容**:
|
||||
1. 在 "注意事项" 部分添加了 "获取单个文件上传状态接口特殊性" 说明
|
||||
2. 在 "API 接口说明" 部分标注了接口的独立性特性
|
||||
|
||||
**文件修改**:
|
||||
- `D:\ccdi\lsfx-mock-server\CLAUDE.md`
|
||||
|
||||
**Git 状态**: 项目不是 Git 仓库,跳过 Git 提交
|
||||
|
||||
---
|
||||
|
||||
## 二、测试覆盖率
|
||||
|
||||
### 测试用例总览
|
||||
|
||||
| 测试文件 | 测试用例数 | 通过率 | 说明 |
|
||||
|---------|----------|--------|------|
|
||||
| `tests/test_api.py` | 10 | 100% | API 接口测试(包含本次新增 3 个) |
|
||||
| `tests/integration/test_full_workflow.py` | 3 | 100% | 集成测试 |
|
||||
| **总计** | **13** | **100%** | ✅ 全部通过 |
|
||||
|
||||
### 新增测试用例详情
|
||||
|
||||
1. **test_get_upload_status_without_log_id**
|
||||
- 测试场景: 不带 logId 参数查询
|
||||
- 验证点: 返回空 logs 数组
|
||||
- 状态: ✅ 通过
|
||||
|
||||
2. **test_get_upload_status_with_log_id**
|
||||
- 测试场景: 带 logId 参数查询
|
||||
- 验证点: 返回包含 1 条记录的 logs 数组
|
||||
- 验证点: 记录的 logId 与参数一致
|
||||
- 状态: ✅ 通过
|
||||
|
||||
3. **test_deterministic_data_generation**
|
||||
- 测试场景: 相同 logId 多次查询
|
||||
- 验证点: 6 个核心字段值完全一致
|
||||
- 验证点: fileName, bankName, totalRecords, fileSize 等字段的确定性
|
||||
- 状态: ✅ 通过
|
||||
|
||||
4. **test_field_completeness** (已存在,本次验证)
|
||||
- 测试场景: 验证响应字段完整性
|
||||
- 验证点: 所有必需字段都存在
|
||||
- 状态: ✅ 通过
|
||||
|
||||
---
|
||||
|
||||
## 三、关键改进点
|
||||
|
||||
### 1. 接口独立性设计
|
||||
|
||||
**改进前**:
|
||||
- `/watson/api/project/bs/upload` 接口依赖文件上传记录
|
||||
- 需要先上传文件才能查询状态
|
||||
- 查询不存在的 logId 返回空数组或错误
|
||||
|
||||
**改进后**:
|
||||
- 接口完全独立工作,不依赖任何文件上传记录
|
||||
- 任意 logId 都能返回确定性的状态数据
|
||||
- 不带 logId 时返回空 logs 数组
|
||||
- 支持测试环境和生产环境的无状态查询
|
||||
|
||||
### 2. 确定性数据生成
|
||||
|
||||
**技术实现**:
|
||||
- 使用 `random.seed(log_id)` 固定随机数生成器
|
||||
- 相同 logId → 相同的随机数序列 → 相同的生成数据
|
||||
- 保证核心字段的一致性:
|
||||
- logId, groupId, fileName, bankName
|
||||
- totalRecords, fileSize
|
||||
- trxDateStartId, trxDateEndId
|
||||
- accountNoList, enterpriseNameList
|
||||
|
||||
**业务价值**:
|
||||
- 测试人员可以使用任意 logId 进行测试
|
||||
- 相同 logId 多次查询结果一致,便于验证
|
||||
- 无需维护文件上传记录,简化测试流程
|
||||
|
||||
### 3. 代码质量提升
|
||||
|
||||
**新增代码**:
|
||||
- `_generate_deterministic_record()` 方法: 约 80 行
|
||||
- 测试代码: 3 个新测试函数,约 60 行
|
||||
- 文档更新: 2 处说明性文字
|
||||
|
||||
**代码复用**:
|
||||
- 复用 `_infer_bank_name()` 方法进行银行名称推断
|
||||
- 复用 FileRecord 数据模型进行数据封装
|
||||
|
||||
**代码质量**:
|
||||
- 遵循 PEP 8 编码规范
|
||||
- 完整的文档字符串(docstring)
|
||||
- 清晰的变量命名和逻辑结构
|
||||
|
||||
---
|
||||
|
||||
## 四、技术亮点
|
||||
|
||||
### 1. 测试驱动开发 (TDD) 实践
|
||||
|
||||
**红灯-绿灯-重构 循环**:
|
||||
1. **红灯阶段** (Task 2-4): 先写测试,测试失败
|
||||
2. **绿灯阶段** (Task 5-6): 实现功能,测试通过
|
||||
3. **重构阶段** (Task 7): 优化代码,保持测试通过
|
||||
|
||||
**TDD 优势**:
|
||||
- 需求明确:测试用例即需求文档
|
||||
- 设计导向:以测试驱动接口设计
|
||||
- 快速反馈:立即发现功能偏差
|
||||
- 重构信心:测试保护代码质量
|
||||
|
||||
### 2. 随机数种子技术
|
||||
|
||||
**技术原理**:
|
||||
```python
|
||||
random.seed(log_id) # 固定随机种子
|
||||
# 后续所有 random 调用都基于该种子
|
||||
# 相同种子 → 相同随机数序列 → 相同生成数据
|
||||
```
|
||||
|
||||
**应用场景**:
|
||||
- Mock 服务器:生成确定性测试数据
|
||||
- 数据脱敏:保留数据分布特征
|
||||
- 压力测试:可重现的随机数据
|
||||
|
||||
### 3. 接口独立性设计模式
|
||||
|
||||
**设计原则**:
|
||||
- 无状态性:不依赖外部状态(文件记录)
|
||||
- 幂等性:相同参数多次调用返回相同结果
|
||||
- 可预测性:输入和输出有明确的映射关系
|
||||
|
||||
**优势**:
|
||||
- 简化测试:无需复杂的前置条件
|
||||
- 提高可靠性:减少依赖,降低故障率
|
||||
- 易于扩展:独立功能易于维护和升级
|
||||
|
||||
---
|
||||
|
||||
## 五、已知限制和后续优化建议
|
||||
|
||||
### 已知限制
|
||||
|
||||
1. **非核心字段的不确定性**
|
||||
- 限制: leId, loginLeId 等字段每次查询都会变化
|
||||
- 原因: 这些字段使用 `random.randint()` 但不在种子控制范围内
|
||||
- 影响: 不影响核心业务逻辑,但可能与真实系统行为有差异
|
||||
|
||||
2. **并发安全性**
|
||||
- 限制: `random.seed()` 会影响全局随机数生成器
|
||||
- 场景: 高并发情况下可能影响其他接口的随机数生成
|
||||
- 建议: 使用线程局部随机数生成器(`random.Random()` 实例)
|
||||
|
||||
3. **银行名称推断的简化**
|
||||
- 限制: 基于 fileName 推断银行名称,规则较简单
|
||||
- 场景: 复杂文件名可能推断错误
|
||||
- 影响: 返回的 bankName 可能不准确
|
||||
|
||||
### 后续优化建议
|
||||
|
||||
#### 1. 优化并发安全性(中优先级)
|
||||
|
||||
**建议方案**:
|
||||
```python
|
||||
def _generate_deterministic_record(self, log_id: int, group_id: int) -> dict:
|
||||
# 使用局部随机数生成器,避免影响全局
|
||||
local_random = random.Random(log_id)
|
||||
|
||||
# 后续使用 local_random 替代 random
|
||||
account_no = f"{local_random.randint(10000000000, 99999999999)}"
|
||||
# ...
|
||||
```
|
||||
|
||||
**预期收益**:
|
||||
- 提高并发安全性
|
||||
- 避免随机数生成器竞争
|
||||
- 提升代码质量
|
||||
|
||||
#### 2. 增强银行名称推断(低优先级)
|
||||
|
||||
**建议方案**:
|
||||
- 维护一个银行关键词映射表
|
||||
- 使用正则表达式匹配文件名中的银行关键词
|
||||
- 提供配置化的银行名称映射规则
|
||||
|
||||
**预期收益**:
|
||||
- 提高银行名称推断准确率
|
||||
- 增强系统的可配置性
|
||||
|
||||
#### 3. 添加配置化的确定性字段(低优先级)
|
||||
|
||||
**建议方案**:
|
||||
- 在配置文件中定义哪些字段需要确定性生成
|
||||
- 提供开关控制确定性模式
|
||||
|
||||
**预期收益**:
|
||||
- 提高系统灵活性
|
||||
- 便于适应不同测试场景
|
||||
|
||||
#### 4. 添加接口文档增强(建议)
|
||||
|
||||
**建议方案**:
|
||||
- 在 Swagger 文档中添加接口独立性说明
|
||||
- 添加确定性数据生成的使用示例
|
||||
- 提供 logId 参数的最佳实践指南
|
||||
|
||||
**预期收益**:
|
||||
- 提升 API 文档的完整性
|
||||
- 降低测试人员的使用门槛
|
||||
|
||||
---
|
||||
|
||||
## 六、项目文件清单
|
||||
|
||||
### 修改的文件
|
||||
|
||||
1. `D:\ccdi\lsfx-mock-server\models\file_record.py`
|
||||
- 修改内容: FileRecord 类的 log_meta 字段默认值
|
||||
- 修改行数: 1 行
|
||||
|
||||
2. `D:\ccdi\lsfx-mock-server\services\file_service.py`
|
||||
- 修改内容: 新增 `_generate_deterministic_record()` 方法
|
||||
- 修改内容: 重构 `get_upload_status()` 方法
|
||||
- 新增代码: 约 80 行
|
||||
- 重构代码: 约 20 行
|
||||
|
||||
3. `D:\ccdi\lsfx-mock-server\tests\test_api.py`
|
||||
- 新增内容: 3 个测试函数
|
||||
- 新增代码: 约 60 行
|
||||
|
||||
4. `D:\ccdi\lsfx-mock-server\CLAUDE.md`
|
||||
- 修改内容: 添加接口独立性说明(2 处)
|
||||
- 修改行数: 约 10 行
|
||||
|
||||
### 新增的文件
|
||||
|
||||
无
|
||||
|
||||
---
|
||||
|
||||
## 七、总结
|
||||
|
||||
### 项目成果
|
||||
|
||||
✅ **功能完整性**: 100% 完成,所有需求已实现
|
||||
✅ **测试覆盖率**: 100% 通过,13 个测试用例全部通过
|
||||
✅ **文档完整性**: 100% 更新,接口说明已添加
|
||||
✅ **代码质量**: 遵循最佳实践,代码结构清晰
|
||||
|
||||
### 关键成就
|
||||
|
||||
1. **成功实现接口独立性设计**,简化了测试流程
|
||||
2. **引入确定性数据生成技术**,提高了测试可重复性
|
||||
3. **遵循 TDD 开发流程**,保证了代码质量和需求对齐
|
||||
4. **完善项目文档**,提升了项目的可维护性
|
||||
|
||||
### 业务价值
|
||||
|
||||
- **提升测试效率**: 测试人员无需上传文件即可查询任意 logId 的状态
|
||||
- **提高测试可靠性**: 相同 logId 多次查询结果一致,便于自动化测试
|
||||
- **降低维护成本**: 独立接口设计减少了依赖关系,降低了维护复杂度
|
||||
- **增强可扩展性**: 确定性数据生成技术可应用于其他 Mock 接口
|
||||
|
||||
---
|
||||
|
||||
## 附录: 技术参考资料
|
||||
|
||||
### 随机数种子技术文档
|
||||
- Python random 模块: https://docs.python.org/3/library/random.html
|
||||
- 确定性随机数生成器: https://en.wikipedia.org/wiki/Pseudorandom_number_generator
|
||||
|
||||
### 测试驱动开发 (TDD)
|
||||
- TDD 最佳实践: https://testdriven.io/test-driven-development/
|
||||
- FastAPI 测试指南: https://fastapi.tiangolo.com/tutorial/testing/
|
||||
|
||||
### Mock 服务器设计模式
|
||||
- Mock 服务器最佳实践: https://martinfowler.com/articles/mocksArentStubs.html
|
||||
- 无状态接口设计: https://www.ics.uci.edu/~fielding/pubs/dissertation/rest_arch_style.htm
|
||||
|
||||
---
|
||||
|
||||
**报告生成时间**: 2026-03-12
|
||||
**报告生成工具**: Claude Code (claude-sonnet-4-6)
|
||||
**项目状态**: ✅ 全部完成
|
||||
@@ -0,0 +1,221 @@
|
||||
# 设计文档:修改拉取行内流水接口返回值
|
||||
|
||||
**日期:** 2026-03-04
|
||||
**状态:** 已批准
|
||||
**作者:** Claude Code
|
||||
|
||||
## 1. 概述和目标
|
||||
|
||||
### 目标
|
||||
修改 `/watson/api/project/getJZFileOrZjrcuFile` 接口的返回格式,从当前的错误格式改为返回 logId 数组。
|
||||
|
||||
### 当前实现
|
||||
```json
|
||||
{
|
||||
"code": "200",
|
||||
"data": {"code": "501014", "message": "无行内流水文件"},
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
```
|
||||
|
||||
### 修改后实现
|
||||
|
||||
**成功场景:**
|
||||
```json
|
||||
{
|
||||
"code": "200",
|
||||
"data": [19154],
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
```
|
||||
|
||||
**错误场景(通过 `error_501014` 标记触发):**
|
||||
```json
|
||||
{
|
||||
"code": "501014",
|
||||
"message": "无行内流水文件",
|
||||
"status": "501014",
|
||||
"successResponse": false
|
||||
}
|
||||
```
|
||||
|
||||
### 关键特性
|
||||
- logId 通过随机数生成(范围:10000-99999)
|
||||
- 独立简化管理,不存储到 `file_records`,不支持后续操作
|
||||
- 保留错误模拟功能(通过 `error_XXXX` 标记触发)
|
||||
|
||||
## 2. 技术实现
|
||||
|
||||
### 修改文件
|
||||
- `services/file_service.py` - 修改 `fetch_inner_flow()` 方法
|
||||
|
||||
### 具体实现
|
||||
|
||||
在 `FileService` 类中修改 `fetch_inner_flow()` 方法:
|
||||
|
||||
```python
|
||||
def fetch_inner_flow(self, request: Union[Dict, object]) -> Dict:
|
||||
"""拉取行内流水(返回随机logId)
|
||||
|
||||
Args:
|
||||
request: 拉取流水请求(可以是字典或对象)
|
||||
|
||||
Returns:
|
||||
流水响应字典,包含随机生成的logId数组
|
||||
"""
|
||||
import random
|
||||
|
||||
# 随机生成一个logId(范围:10000-99999)
|
||||
log_id = random.randint(10000, 99999)
|
||||
|
||||
# 返回成功的响应,包含logId数组
|
||||
return {
|
||||
"code": "200",
|
||||
"data": [log_id],
|
||||
"status": "200",
|
||||
"successResponse": True,
|
||||
}
|
||||
```
|
||||
|
||||
### 关键变化
|
||||
1. 移除原来的"无行内流水文件"硬编码错误响应
|
||||
2. 使用 `random.randint(10000, 99999)` 生成随机 logId
|
||||
3. 返回格式改为 `{"code": "200", "data": [log_id], ...}`
|
||||
4. `import random` 放在方法内部,避免顶层导入(保持简单)
|
||||
|
||||
### 无需修改的部分
|
||||
- `routers/api.py` - 错误检测逻辑保持不变
|
||||
- `utils/error_simulator.py` - 错误码定义已包含 501014
|
||||
- `config/settings.py` - 无需新增配置
|
||||
|
||||
## 3. 测试计划
|
||||
|
||||
### 测试文件
|
||||
- `tests/test_api.py`
|
||||
|
||||
### 新增测试用例
|
||||
|
||||
#### 3.1 测试成功场景
|
||||
```python
|
||||
def test_fetch_inner_flow_success(client, sample_inner_flow_request):
|
||||
"""测试拉取行内流水 - 成功场景"""
|
||||
response = client.post(
|
||||
"/watson/api/project/getJZFileOrZjrcuFile",
|
||||
data=sample_inner_flow_request
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["code"] == "200"
|
||||
assert data["successResponse"] == True
|
||||
assert isinstance(data["data"], list)
|
||||
assert len(data["data"]) == 1
|
||||
assert isinstance(data["data"][0], int)
|
||||
assert 10000 <= data["data"][0] <= 99999
|
||||
```
|
||||
|
||||
#### 3.2 测试错误场景
|
||||
```python
|
||||
def test_fetch_inner_flow_error_501014(client):
|
||||
"""测试拉取行内流水 - 错误场景 501014"""
|
||||
request_data = {
|
||||
"groupId": 1001,
|
||||
"customerNo": "test_error_501014",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
response = client.post(
|
||||
"/watson/api/project/getJZFileOrZjrcuFile",
|
||||
data=request_data
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["code"] == "501014"
|
||||
assert data["successResponse"] == False
|
||||
```
|
||||
|
||||
### 测试命令
|
||||
```bash
|
||||
# 运行所有行内流水相关测试
|
||||
pytest tests/test_api.py -k "fetch_inner_flow" -v
|
||||
|
||||
# 运行单个测试
|
||||
pytest tests/test_api.py::test_fetch_inner_flow_success -v
|
||||
pytest tests/test_api.py::test_fetch_inner_flow_error_501014 -v
|
||||
```
|
||||
|
||||
## 4. 文档更新
|
||||
|
||||
### 4.1 README.md
|
||||
更新接口说明部分,将"模拟无数据场景"改为"返回随机logId"。
|
||||
|
||||
### 4.2 CLAUDE.md
|
||||
在架构设计部分补充说明行内流水接口的特殊性:
|
||||
- 简化管理(不存储到 file_records)
|
||||
- 随机 logId(无需持久化)
|
||||
- 无后续操作支持(无需解析状态检查)
|
||||
|
||||
## 5. 设计决策
|
||||
|
||||
### 为什么选择随机生成 logId?
|
||||
- **简化管理**:行内流水拉取是独立的简化流程,不需要与文件上传共用复杂的状态管理
|
||||
- **无需持久化**:logId 仅用于返回,不需要存储或后续查询
|
||||
- **测试友好**:每次调用返回不同的值,避免固定值导致的测试假阳性
|
||||
|
||||
### 为什么不使用配置文件?
|
||||
- 响应数据需要运行时动态生成(随机 logId)
|
||||
- 配置文件适合静态或模板化的响应,不适合需要随机值的场景
|
||||
- 保持代码简单直接,避免过度设计
|
||||
|
||||
### 为什么保留错误模拟?
|
||||
- Mock 服务器的核心功能之一是模拟各种场景
|
||||
- 501014 错误是真实的业务场景(无行内流水文件)
|
||||
- 通过 `error_XXXX` 标记触发错误,与项目整体设计一致
|
||||
|
||||
## 6. 影响范围
|
||||
|
||||
### 直接影响
|
||||
- `services/file_service.py` - 修改 1 个方法
|
||||
- `tests/test_api.py` - 新增/修改测试用例
|
||||
|
||||
### 间接影响
|
||||
- API 文档自动更新(FastAPI Swagger UI)
|
||||
- README.md 需要更新示例
|
||||
|
||||
### 无影响
|
||||
- 其他 6 个接口的返回格式
|
||||
- 错误模拟机制
|
||||
- 前端集成(假设前端已按新格式设计)
|
||||
|
||||
## 7. 风险和限制
|
||||
|
||||
### 风险
|
||||
- **logId 冲突**:理论上可能生成重复的 logId,但由于不存储,不会造成实际问题
|
||||
- **前端兼容性**:如果前端已按旧格式实现,需要协调更新
|
||||
|
||||
### 限制
|
||||
- 不支持后续的解析状态检查
|
||||
- 不支持通过 logId 查询流水数据
|
||||
- 不支持删除操作
|
||||
|
||||
这些限制是设计决策的一部分,符合"简化管理"的目标。
|
||||
|
||||
## 8. 验收标准
|
||||
|
||||
- [ ] 修改后接口返回正确的格式(包含 logId 数组)
|
||||
- [ ] logId 在指定范围内(10000-99999)
|
||||
- [ ] 错误模拟功能正常工作
|
||||
- [ ] 所有测试用例通过
|
||||
- [ ] 文档已更新
|
||||
- [ ] 代码通过 pytest 测试
|
||||
|
||||
## 9. 时间线
|
||||
|
||||
预计实施时间:30 分钟
|
||||
- 代码修改:10 分钟
|
||||
- 测试编写和验证:15 分钟
|
||||
- 文档更新:5 分钟
|
||||
432
lsfx-mock-server/docs/plans/2026-03-04-inner-flow-response.md
Normal file
432
lsfx-mock-server/docs/plans/2026-03-04-inner-flow-response.md
Normal file
@@ -0,0 +1,432 @@
|
||||
# 修改拉取行内流水接口返回值 - 实施计划
|
||||
|
||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||
|
||||
**Goal:** 修改拉取行内流水接口的返回格式,从错误格式改为返回随机 logId 数组
|
||||
|
||||
**Architecture:** 修改 `FileService.fetch_inner_flow()` 方法,使用随机数生成 logId(10000-99999),返回包含 logId 数组的成功响应,保留错误模拟功能
|
||||
|
||||
**Tech Stack:** Python 3.11, FastAPI, pytest
|
||||
|
||||
---
|
||||
|
||||
## Task 1: 添加测试夹具
|
||||
|
||||
**Files:**
|
||||
- Modify: `tests/conftest.py:35-35` (在文件末尾添加)
|
||||
|
||||
**Step 1: 添加测试夹具**
|
||||
|
||||
在 `tests/conftest.py` 文件末尾添加:
|
||||
|
||||
```python
|
||||
|
||||
@pytest.fixture
|
||||
def sample_inner_flow_request():
|
||||
"""示例拉取行内流水请求"""
|
||||
return {
|
||||
"groupId": 1001,
|
||||
"customerNo": "test_customer_001",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: 验证夹具定义正确**
|
||||
|
||||
运行: `python -c "from tests.conftest import sample_inner_flow_request; print('OK')"`
|
||||
预期输出: `OK`
|
||||
|
||||
**Step 3: 提交**
|
||||
|
||||
```bash
|
||||
git add tests/conftest.py
|
||||
git commit -m "test: add sample_inner_flow_request fixture"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 2: 编写成功场景的失败测试
|
||||
|
||||
**Files:**
|
||||
- Modify: `tests/test_api.py` (在文件末尾添加)
|
||||
|
||||
**Step 1: 编写测试用例**
|
||||
|
||||
在 `tests/test_api.py` 文件末尾添加:
|
||||
|
||||
```python
|
||||
|
||||
|
||||
def test_fetch_inner_flow_success(client, sample_inner_flow_request):
|
||||
"""测试拉取行内流水 - 成功场景"""
|
||||
response = client.post(
|
||||
"/watson/api/project/getJZFileOrZjrcuFile",
|
||||
data=sample_inner_flow_request
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["code"] == "200"
|
||||
assert data["successResponse"] == True
|
||||
assert isinstance(data["data"], list)
|
||||
assert len(data["data"]) == 1
|
||||
assert isinstance(data["data"][0], int)
|
||||
assert 10000 <= data["data"][0] <= 99999
|
||||
```
|
||||
|
||||
**Step 2: 运行测试验证失败**
|
||||
|
||||
运行: `pytest tests/test_api.py::test_fetch_inner_flow_success -v`
|
||||
|
||||
预期输出:
|
||||
```
|
||||
FAILED - assert data["successResponse"] == True
|
||||
```
|
||||
|
||||
**Step 3: 暂不提交(等待实现)**
|
||||
|
||||
---
|
||||
|
||||
## Task 3: 实现 fetch_inner_flow 方法修改
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/file_service.py:135-150` (修改 `fetch_inner_flow` 方法)
|
||||
|
||||
**Step 1: 读取当前实现**
|
||||
|
||||
运行: `grep -n "def fetch_inner_flow" services/file_service.py`
|
||||
|
||||
预期输出: `135: def fetch_inner_flow(self, request: Union[Dict, object]) -> Dict:`
|
||||
|
||||
**Step 2: 修改方法实现**
|
||||
|
||||
将 `services/file_service.py` 中的 `fetch_inner_flow` 方法替换为:
|
||||
|
||||
```python
|
||||
def fetch_inner_flow(self, request: Union[Dict, object]) -> Dict:
|
||||
"""拉取行内流水(返回随机logId)
|
||||
|
||||
Args:
|
||||
request: 拉取流水请求(可以是字典或对象)
|
||||
|
||||
Returns:
|
||||
流水响应字典,包含随机生成的logId数组
|
||||
"""
|
||||
import random
|
||||
|
||||
# 随机生成一个logId(范围:10000-99999)
|
||||
log_id = random.randint(10000, 99999)
|
||||
|
||||
# 返回成功的响应,包含logId数组
|
||||
return {
|
||||
"code": "200",
|
||||
"data": [log_id],
|
||||
"status": "200",
|
||||
"successResponse": True,
|
||||
}
|
||||
```
|
||||
|
||||
**Step 3: 运行测试验证通过**
|
||||
|
||||
运行: `pytest tests/test_api.py::test_fetch_inner_flow_success -v`
|
||||
|
||||
预期输出:
|
||||
```
|
||||
PASSED
|
||||
```
|
||||
|
||||
**Step 4: 提交实现**
|
||||
|
||||
```bash
|
||||
git add services/file_service.py tests/test_api.py
|
||||
git commit -m "feat: modify fetch_inner_flow to return random logId array"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 4: 编写错误场景测试
|
||||
|
||||
**Files:**
|
||||
- Modify: `tests/test_api.py` (在 test_fetch_inner_flow_success 后添加)
|
||||
|
||||
**Step 1: 编写错误场景测试**
|
||||
|
||||
在 `tests/test_api.py` 的 `test_fetch_inner_flow_success` 后添加:
|
||||
|
||||
```python
|
||||
|
||||
|
||||
def test_fetch_inner_flow_error_501014(client):
|
||||
"""测试拉取行内流水 - 错误场景 501014"""
|
||||
request_data = {
|
||||
"groupId": 1001,
|
||||
"customerNo": "test_error_501014",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
response = client.post(
|
||||
"/watson/api/project/getJZFileOrZjrcuFile",
|
||||
data=request_data
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["code"] == "501014"
|
||||
assert data["successResponse"] == False
|
||||
```
|
||||
|
||||
**Step 2: 运行错误场景测试**
|
||||
|
||||
运行: `pytest tests/test_api.py::test_fetch_inner_flow_error_501014 -v`
|
||||
|
||||
预期输出:
|
||||
```
|
||||
PASSED
|
||||
```
|
||||
|
||||
**Step 3: 提交测试**
|
||||
|
||||
```bash
|
||||
git add tests/test_api.py
|
||||
git commit -m "test: add error scenario test for fetch_inner_flow"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 5: 运行完整测试套件
|
||||
|
||||
**Files:**
|
||||
- 无文件修改
|
||||
|
||||
**Step 1: 运行所有 fetch_inner_flow 相关测试**
|
||||
|
||||
运行: `pytest tests/test_api.py -k "fetch_inner_flow" -v`
|
||||
|
||||
预期输出:
|
||||
```
|
||||
test_fetch_inner_flow_success PASSED
|
||||
test_fetch_inner_flow_error_501014 PASSED
|
||||
```
|
||||
|
||||
**Step 2: 运行完整测试套件确保无破坏**
|
||||
|
||||
运行: `pytest tests/ -v`
|
||||
|
||||
预期输出:
|
||||
```
|
||||
所有测试 PASSED
|
||||
```
|
||||
|
||||
**Step 3: 无需提交**
|
||||
|
||||
---
|
||||
|
||||
## Task 6: 更新 README.md 文档
|
||||
|
||||
**Files:**
|
||||
- Modify: `README.md` (更新行内流水接口说明)
|
||||
|
||||
**Step 1: 找到接口说明位置**
|
||||
|
||||
运行: `grep -n "拉取行内流水" README.md`
|
||||
|
||||
预期输出: 找到行内流水接口的说明位置
|
||||
|
||||
**Step 2: 更新接口说明**
|
||||
|
||||
在 README.md 中找到行内流水接口的说明,将"模拟无数据场景"相关描述改为:
|
||||
|
||||
```markdown
|
||||
### 3. 拉取行内流水
|
||||
|
||||
返回随机生成的 logId 数组(范围:10000-99999),支持通过 `error_XXXX` 标记触发错误场景。
|
||||
```
|
||||
|
||||
同时更新成功响应示例(如果有的话):
|
||||
|
||||
```json
|
||||
{
|
||||
"code": "200",
|
||||
"data": [19154],
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
```
|
||||
|
||||
**Step 3: 验证文档更新**
|
||||
|
||||
运行: `grep -A 5 "拉取行内流水" README.md`
|
||||
|
||||
预期输出: 显示更新后的说明
|
||||
|
||||
**Step 4: 提交文档更新**
|
||||
|
||||
```bash
|
||||
git add README.md
|
||||
git commit -m "docs: update fetch_inner_flow interface description"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 7: 更新 CLAUDE.md 文档
|
||||
|
||||
**Files:**
|
||||
- Modify: `CLAUDE.md` (补充行内流水接口说明)
|
||||
|
||||
**Step 1: 找到架构设计部分**
|
||||
|
||||
运行: `grep -n "### 服务类职责" CLAUDE.md`
|
||||
|
||||
预期输出: 找到服务类职责说明的位置
|
||||
|
||||
**Step 2: 更新服务类职责说明**
|
||||
|
||||
在 `CLAUDE.md` 的"服务类职责"部分,找到 `FileService` 的说明,补充:
|
||||
|
||||
```markdown
|
||||
- **FileService**: 管理文件记录、解析状态、支持后台任务
|
||||
- `fetch_inner_flow()`: 返回随机 logId 数组(简化管理,不存储记录)
|
||||
```
|
||||
|
||||
**Step 3: 添加行内流水接口特殊性说明**
|
||||
|
||||
在合适的位置(如"注意事项"部分)添加:
|
||||
|
||||
```markdown
|
||||
- **行内流水接口特殊性**:
|
||||
- 简化管理:不存储到 file_records
|
||||
- 随机 logId:无需持久化,仅用于返回
|
||||
- 无后续操作:不支持解析状态检查、删除或查询流水
|
||||
```
|
||||
|
||||
**Step 4: 提交文档更新**
|
||||
|
||||
```bash
|
||||
git add CLAUDE.md
|
||||
git commit -m "docs: update CLAUDE.md with inner flow interface details"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 8: 验证 Swagger UI 文档
|
||||
|
||||
**Files:**
|
||||
- 无文件修改
|
||||
|
||||
**Step 1: 启动服务器**
|
||||
|
||||
运行: `python main.py` (后台运行或新终端)
|
||||
|
||||
预期输出:
|
||||
```
|
||||
INFO: Uvicorn running on http://0.0.0.0:8000
|
||||
```
|
||||
|
||||
**Step 2: 访问 Swagger UI**
|
||||
|
||||
打开浏览器访问: `http://localhost:8000/docs`
|
||||
|
||||
预期: 看到 `/watson/api/project/getJZFileOrZjrcuFile` 接口
|
||||
|
||||
**Step 3: 测试接口**
|
||||
|
||||
在 Swagger UI 中:
|
||||
1. 点击 `/watson/api/project/getJZFileOrZjrcuFile` 接口
|
||||
2. 点击 "Try it out"
|
||||
3. 填写测试数据:
|
||||
- groupId: 1001
|
||||
- customerNo: test_customer
|
||||
- dataChannelCode: test_code
|
||||
- requestDateId: 20240101
|
||||
- dataStartDateId: 20240101
|
||||
- dataEndDateId: 20240131
|
||||
- uploadUserId: 902001
|
||||
4. 点击 "Execute"
|
||||
5. 查看响应
|
||||
|
||||
预期响应:
|
||||
```json
|
||||
{
|
||||
"code": "200",
|
||||
"data": [12345],
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
```
|
||||
|
||||
**Step 4: 停止服务器**
|
||||
|
||||
运行: `Ctrl+C` 或关闭终端
|
||||
|
||||
**Step 5: 无需提交**
|
||||
|
||||
---
|
||||
|
||||
## Task 9: 最终验收
|
||||
|
||||
**Files:**
|
||||
- 无文件修改
|
||||
|
||||
**Step 1: 运行完整测试套件**
|
||||
|
||||
运行: `pytest tests/ -v --cov=. --cov-report=term`
|
||||
|
||||
预期输出:
|
||||
```
|
||||
所有测试 PASSED
|
||||
覆盖率报告显示 file_service.py 覆盖率提升
|
||||
```
|
||||
|
||||
**Step 2: 验证验收标准**
|
||||
|
||||
检查以下验收标准是否全部满足:
|
||||
|
||||
- [x] 修改后接口返回正确的格式(包含 logId 数组)
|
||||
- [x] logId 在指定范围内(10000-99999)
|
||||
- [x] 错误模拟功能正常工作
|
||||
- [x] 所有测试用例通过
|
||||
- [x] 文档已更新
|
||||
- [x] 代码通过 pytest 测试
|
||||
|
||||
**Step 3: 查看提交历史**
|
||||
|
||||
运行: `git log --oneline -5`
|
||||
|
||||
预期输出:
|
||||
```
|
||||
docs: update CLAUDE.md with inner flow interface details
|
||||
docs: update fetch_inner_flow interface description
|
||||
test: add error scenario test for fetch_inner_flow
|
||||
feat: modify fetch_inner_flow to return random logId array
|
||||
test: add sample_inner_flow_request fixture
|
||||
```
|
||||
|
||||
**Step 4: 完成**
|
||||
|
||||
实施完成!代码已通过所有测试,文档已更新。
|
||||
|
||||
---
|
||||
|
||||
## 总结
|
||||
|
||||
**修改文件:**
|
||||
- `tests/conftest.py` - 添加测试夹具
|
||||
- `tests/test_api.py` - 添加 2 个测试用例
|
||||
- `services/file_service.py` - 修改 1 个方法
|
||||
- `README.md` - 更新接口说明
|
||||
- `CLAUDE.md` - 补充架构说明
|
||||
|
||||
**测试用例:**
|
||||
- `test_fetch_inner_flow_success` - 验证成功场景
|
||||
- `test_fetch_inner_flow_error_501014` - 验证错误场景
|
||||
|
||||
**提交记录:**
|
||||
- 5 个清晰的提交,遵循原子提交原则
|
||||
- 提交信息符合约定式提交规范
|
||||
|
||||
**实施时间:** 约 30 分钟
|
||||
@@ -0,0 +1,309 @@
|
||||
# 流水分析 Mock 服务器接口完整对齐设计
|
||||
|
||||
**日期:** 2026-03-04
|
||||
**目标:** 根据 `兰溪-流水分析对接3.md` 文档,完整对齐所有接口实现
|
||||
|
||||
## 概述
|
||||
|
||||
本次更新将 Mock 服务器完全对齐最新的接口文档,包括新增缺失接口、完善响应字段、统一错误处理。采用渐进式更新策略,保持现有功能不受影响。
|
||||
|
||||
## 设计目标
|
||||
|
||||
1. **新增缺失接口** - 实现文档中的第5个接口(获取单个文件上传状态)
|
||||
2. **响应字段完整** - 所有7个接口的响应字段完全对齐文档示例
|
||||
3. **数据模型增强** - 扩展文件记录模型以支持完整字段
|
||||
4. **错误码完善** - 补充文档中提到的所有错误码
|
||||
5. **无测试依赖** - 按用户要求,不涉及测试用例更新
|
||||
|
||||
## 架构设计
|
||||
|
||||
### 总体架构
|
||||
|
||||
保持现有无数据库架构不变,通过内存数据结构增强支持完整字段存储。
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────┐
|
||||
│ FastAPI 应用 │
|
||||
├─────────────────────────────────────────┤
|
||||
│ routers/api.py │
|
||||
│ ├─ 7个接口路由(新增接口5) │
|
||||
│ └─ 错误标记检测 │
|
||||
├─────────────────────────────────────────┤
|
||||
│ services/ │
|
||||
│ ├─ token_service.py │
|
||||
│ ├─ file_service.py(增强) │
|
||||
│ │ ├─ FileRecord(扩展字段) │
|
||||
│ │ ├─ upload_file()(初始化完整字段) │
|
||||
│ │ ├─ get_upload_status()(新增) │
|
||||
│ │ └─ delete_files() │
|
||||
│ └─ statement_service.py │
|
||||
├─────────────────────────────────────────┤
|
||||
│ config/responses/ │
|
||||
│ ├─ token.json(更新) │
|
||||
│ ├─ upload.json(更新) │
|
||||
│ ├─ parse_status.json(更新) │
|
||||
│ ├─ bank_statement.json(更新) │
|
||||
│ └─ upload_status.json(新建) │
|
||||
├─────────────────────────────────────────┤
|
||||
│ utils/ │
|
||||
│ └─ error_simulator.py(补充错误码) │
|
||||
└─────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## 核心设计
|
||||
|
||||
### 1. 数据模型扩展
|
||||
|
||||
#### FileRecord 扩展字段
|
||||
|
||||
在 `services/file_service.py` 中扩展 `FileRecord` 类:
|
||||
|
||||
**现有字段:**
|
||||
- `log_id`, `group_id`, `file_name`, `status`, `upload_status_desc`, `parsing`
|
||||
|
||||
**新增字段(对齐文档):**
|
||||
- `account_no_list: List[str]` - 账号列表
|
||||
- `enterprise_name_list: List[str]` - 主体名称列表
|
||||
- `bank_name: str` - 银行名称(如 "ZJRCU", "ALIPAY", "BSX")
|
||||
- `real_bank_name: str` - 真实银行名称
|
||||
- `template_name: str` - 模板名称(如 "ZJRCU_T251114")
|
||||
- `data_type_info: List[str]` - 数据类型(如 ["CSV", ","])
|
||||
- `file_size: int` - 文件大小(字节)
|
||||
- `download_file_name: str` - 下载文件名
|
||||
- `file_package_id: str` - 文件包ID(UUID格式)
|
||||
- `file_upload_by: int` - 上传用户ID
|
||||
- `file_upload_by_user_name: str` - 上传用户名
|
||||
- `file_upload_time: str` - 上传时间(如 "2026-02-27 09:50:18")
|
||||
- `le_id: int` - 法律实体ID
|
||||
- `login_le_id: int` - 登录法律实体ID
|
||||
- `log_type: str` - 日志类型(如 "bankstatement")
|
||||
- `log_meta: str` - 日志元数据(JSON字符串)
|
||||
- `lost_header: List[str]` - 丢失的头部信息
|
||||
- `rows: int` - 行数
|
||||
- `source: str` - 来源(如 "http")
|
||||
- `total_records: int` - 总记录数
|
||||
- `trx_date_start_id: int` - 交易开始日期ID(如 20240201)
|
||||
- `trx_date_end_id: int` - 交易结束日期ID(如 20240228)
|
||||
- `is_split: int` - 是否分割(0或1)
|
||||
|
||||
#### 字段初始化策略
|
||||
|
||||
- `bank_name`: 根据文件名推断(包含"支付宝"→"ALIPAY",默认"ZJRCU")
|
||||
- `template_name`: 根据 bank_name 生成(如 "ZJRCU_T251114")
|
||||
- `file_package_id`: 生成随机UUID
|
||||
- `file_upload_time`: 使用当前服务器时间
|
||||
- `total_records`: 随机生成(100-300)
|
||||
- `trx_date_start_id`/`trx_date_end_id`: 生成合理的日期范围
|
||||
- 其他字段: 使用文档示例中的典型值
|
||||
|
||||
### 2. 新增接口实现
|
||||
|
||||
#### 接口5:GET `/watson/api/project/bs/upload`
|
||||
|
||||
**功能:** 获取单个文件上传后的状态
|
||||
|
||||
**请求参数:**
|
||||
- `groupId` (int, 必填) - 项目ID
|
||||
- `logId` (int, 可选) - 文件ID
|
||||
|
||||
**响应结构:**
|
||||
```json
|
||||
{
|
||||
"code": "200",
|
||||
"data": {
|
||||
"logs": [
|
||||
{
|
||||
"accountNoList": ["18785967364"],
|
||||
"bankName": "ALIPAY",
|
||||
"dataTypeInfo": ["CSV", ","],
|
||||
"downloadFileName": "支付宝.csv",
|
||||
"enterpriseNameList": ["曾孝成"],
|
||||
"fileSize": 16322,
|
||||
"fileUploadBy": 448,
|
||||
"fileUploadByUserName": "admin@support.com",
|
||||
"fileUploadTime": "2025-03-13 08:45:32",
|
||||
"isSplit": 0,
|
||||
"leId": 10741,
|
||||
"logId": 13994,
|
||||
"logMeta": "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}",
|
||||
"logType": "bankstatement",
|
||||
"loginLeId": 10741,
|
||||
"lostHeader": [],
|
||||
"realBankName": "ALIPAY",
|
||||
"rows": 0,
|
||||
"source": "http",
|
||||
"status": -5,
|
||||
"templateName": "ALIPAY_T220708",
|
||||
"totalRecords": 127,
|
||||
"trxDateEndId": 20231231,
|
||||
"trxDateStartId": 20230102,
|
||||
"uploadFileName": "支付宝.pdf",
|
||||
"uploadStatusDesc": "data.wait.confirm.newaccount"
|
||||
}
|
||||
],
|
||||
"status": "",
|
||||
"accountId": 8954,
|
||||
"currency": "CNY"
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
```
|
||||
|
||||
**实现逻辑:**
|
||||
1. 路由:在 `routers/api.py` 添加 GET 路由
|
||||
2. 服务:在 `file_service.py` 添加 `get_upload_status(groupId, logId)` 方法
|
||||
3. 逻辑:
|
||||
- 如果提供 `logId`,返回该特定文件的状态
|
||||
- 如果不提供 `logId`,返回该项目的所有文件状态
|
||||
- 从 `file_records` 中查询并构建响应
|
||||
|
||||
**特殊处理:**
|
||||
- `accountId` 和 `currency`: 从文件记录中提取或使用默认值(8954, "CNY")
|
||||
- 空主体标识:如果 `enterpriseNameList` 仅包含空字符串,表示流水文件未生成主体
|
||||
|
||||
### 3. 现有接口响应字段更新
|
||||
|
||||
#### 接口1:`/account/common/getToken`
|
||||
- 确认 `data.analysisType` 类型为 Integer
|
||||
- 保持其他字段不变
|
||||
|
||||
#### 接口2:`/watson/api/project/remoteUploadSplitFile`
|
||||
- 补充 `accountsOfLog` 结构
|
||||
- 完善 `uploadLogList` 中的所有字段
|
||||
- 新增 `uploadStatus` 字段(固定值 1)
|
||||
|
||||
#### 接口3:`/watson/api/project/getJZFileOrZjrcuFile`
|
||||
- 保持现有响应格式
|
||||
- 返回 `{code, data: [logId数组], status, successResponse}`
|
||||
|
||||
#### 接口4:`/watson/api/project/upload/getpendings`
|
||||
- 补充 `data.pendingList` 中的所有字段
|
||||
- 确保包含 `isSplit`, `lostHeader`, `leId`, `loginLeId` 等
|
||||
|
||||
#### 接口6:`/watson/api/project/batchDeleteUploadFile`
|
||||
- 注意 `code` 字段为 "200 OK" 而非 "200"
|
||||
- 响应格式:`{code: "200 OK", data: {message: "delete.files.success"}, ...}`
|
||||
|
||||
#### 接口7:`/watson/api/project/getBSByLogId`
|
||||
- 补充 `bankStatementList` 中每个对象的所有50+个字段
|
||||
- 字段包括:accountId, accountMaskNo, accountingDate, balanceAmount, bank, bankStatementId, bankTrxNumber, batchId, cashType, crAmount, cretNo, currency, customerAccountMaskNo, customerBank, customerId, customerName, drAmount, groupId, leId, leName, transAmount, transFlag, trxDate, userMemo 等
|
||||
|
||||
### 4. 错误码完善
|
||||
|
||||
#### 当前错误码(已有)
|
||||
- 40101: appId错误
|
||||
- 40102: appSecretCode错误
|
||||
- 40104: 可使用项目次数为0,无法创建项目
|
||||
- 40105: 只读模式下无法新建项目
|
||||
- 40106: 错误的分析类型,不在规定的取值范围内
|
||||
- 40107: 当前系统不支持的分析类型
|
||||
- 40108: 当前用户所属行社无权限
|
||||
- 501014: 无行内流水文件
|
||||
|
||||
#### 新增错误码
|
||||
- 40100: 未知异常
|
||||
|
||||
#### 错误响应格式
|
||||
```json
|
||||
{
|
||||
"code": "错误码",
|
||||
"message": "错误描述",
|
||||
"status": "错误码",
|
||||
"successResponse": false
|
||||
}
|
||||
```
|
||||
|
||||
#### 错误触发机制
|
||||
- 在任意字符串参数中包含 `error_XXXX` 标记
|
||||
- 例如:`projectNo: "test_error_40100"` 触发 40100 错误
|
||||
|
||||
### 5. 请求头处理
|
||||
|
||||
#### X-Xencio-Client-Id
|
||||
- **策略:** 不验证,接受任意值
|
||||
- **原因:** 简化测试,不需要记住特定的 client-id
|
||||
- **实现:** FastAPI 不检查该请求头
|
||||
|
||||
## 实施计划
|
||||
|
||||
### 步骤1:数据模型扩展
|
||||
- **文件:** `services/file_service.py`
|
||||
- **内容:** 扩展 `FileRecord` 类,添加所有新字段
|
||||
- **验证:** 启动服务无报错
|
||||
|
||||
### 步骤2:文件服务增强
|
||||
- **文件:** `services/file_service.py`
|
||||
- **内容:**
|
||||
- 在 `upload_file()` 方法中初始化所有新字段
|
||||
- 添加 `get_upload_status()` 方法
|
||||
- 更新 `delete_files()` 方法以处理新增字段
|
||||
- **验证:** 上传文件后能返回完整字段
|
||||
|
||||
### 步骤3:新增接口路由
|
||||
- **文件:** `routers/api.py`
|
||||
- **内容:** 添加 GET `/watson/api/project/bs/upload` 路由
|
||||
- **验证:** 访问 `/docs` 能看到新接口
|
||||
|
||||
### 步骤4:响应模板更新
|
||||
- **文件:**
|
||||
- `config/responses/token.json`
|
||||
- `config/responses/upload.json`
|
||||
- `config/responses/parse_status.json`
|
||||
- `config/responses/bank_statement.json`
|
||||
- 新建 `config/responses/upload_status.json`
|
||||
- **内容:** 补充所有缺失字段,对齐文档示例
|
||||
- **验证:** 调用接口返回完整字段
|
||||
|
||||
### 步骤5:错误码补充
|
||||
- **文件:** `utils/error_simulator.py`
|
||||
- **内容:** 添加 40100 错误码
|
||||
- **验证:** 使用 `error_40100` 能触发对应错误
|
||||
|
||||
### 步骤6:文档更新
|
||||
- **文件:**
|
||||
- `CLAUDE.md`
|
||||
- `README.md`(如存在)
|
||||
- **内容:** 添加新接口说明,更新注意事项
|
||||
|
||||
## 文件变更清单
|
||||
|
||||
```
|
||||
services/file_service.py [修改] - 数据模型和服务方法
|
||||
routers/api.py [修改] - 新增接口路由
|
||||
utils/error_simulator.py [修改] - 新增错误码
|
||||
config/responses/token.json [修改] - 完善响应字段
|
||||
config/responses/upload.json [修改] - 完善响应字段
|
||||
config/responses/parse_status.json [修改] - 完善响应字段
|
||||
config/responses/bank_statement.json [修改] - 完善响应字段
|
||||
config/responses/upload_status.json [新建] - 接口5响应模板
|
||||
CLAUDE.md [修改] - 更新接口说明
|
||||
README.md [修改] - 更新项目说明(如存在)
|
||||
```
|
||||
|
||||
## 风险评估
|
||||
|
||||
### 低风险
|
||||
- 数据模型扩展:仅添加字段,不影响现有功能
|
||||
- 响应模板更新:仅添加字段,向后兼容
|
||||
- 错误码补充:新增错误码,不影响现有错误处理
|
||||
|
||||
### 需注意
|
||||
- 文件上传逻辑:需要确保所有新字段都正确初始化
|
||||
- 时间格式:确保 `file_upload_time` 使用正确的格式
|
||||
- 字段类型:确保 Integer 字段不使用字符串
|
||||
|
||||
## 成功标准
|
||||
|
||||
1. 所有7个接口都能正常调用
|
||||
2. 每个接口的响应字段完全对齐文档示例
|
||||
3. 错误标记机制在所有接口中都能正常工作
|
||||
4. 新增的 40100 错误码能正确触发
|
||||
5. 服务启动无报错,能正常处理请求
|
||||
|
||||
## 后续工作
|
||||
|
||||
本次更新完成后,Mock 服务器将完全对齐接口文档,可以支持前端开发和集成测试。后续可根据实际使用情况:
|
||||
- 调整字段生成逻辑(如更真实的数据)
|
||||
- 添加更多银行的模板支持
|
||||
- 优化错误场景的模拟
|
||||
@@ -0,0 +1,717 @@
|
||||
# 接口完整对齐实施计划
|
||||
|
||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||
|
||||
**Goal:** 根据 `兰溪-流水分析对接3.md` 文档,完整对齐 Mock 服务器的所有7个接口实现
|
||||
|
||||
**Architecture:** 保持无数据库架构,通过扩展内存数据模型支持完整字段,新增1个接口,完善6个现有接口的响应字段
|
||||
|
||||
**Tech Stack:** FastAPI, Python 3.8+, Pydantic
|
||||
|
||||
---
|
||||
|
||||
## Task 1: 扩展 FileRecord 数据模型
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/file_service.py`
|
||||
|
||||
**Step 1: 读取现有 file_service.py 文件**
|
||||
|
||||
先查看当前的 FileRecord 实现。
|
||||
|
||||
**Step 2: 扩展 FileRecord 类添加所有新字段**
|
||||
|
||||
在 `FileRecord` 类中添加以下字段:
|
||||
|
||||
```python
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
@dataclass
|
||||
class FileRecord:
|
||||
"""文件记录模型(扩展版)"""
|
||||
# 原有字段
|
||||
log_id: int
|
||||
group_id: int
|
||||
file_name: str
|
||||
status: int = -5 # -5 表示解析成功
|
||||
upload_status_desc: str = "data.wait.confirm.newaccount"
|
||||
parsing: bool = True # True表示正在解析
|
||||
|
||||
# 新增字段 - 账号和主体信息
|
||||
account_no_list: List[str] = field(default_factory=list)
|
||||
enterprise_name_list: List[str] = field(default_factory=list)
|
||||
|
||||
# 新增字段 - 银行和模板信息
|
||||
bank_name: str = "ZJRCU"
|
||||
real_bank_name: str = "ZJRCU"
|
||||
template_name: str = "ZJRCU_T251114"
|
||||
data_type_info: List[str] = field(default_factory=lambda: ["CSV", ","])
|
||||
|
||||
# 新增字段 - 文件元数据
|
||||
file_size: int = 50000
|
||||
download_file_name: str = ""
|
||||
file_package_id: str = field(default_factory=lambda: str(uuid.uuid4()).replace('-', ''))
|
||||
|
||||
# 新增字段 - 上传用户信息
|
||||
file_upload_by: int = 448
|
||||
file_upload_by_user_name: str = "admin@support.com"
|
||||
file_upload_time: str = field(default_factory=lambda: datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
||||
|
||||
# 新增字段 - 法律实体信息
|
||||
le_id: int = 10000
|
||||
login_le_id: int = 10000
|
||||
log_type: str = "bankstatement"
|
||||
log_meta: str = "{\"lostHeader\":[],\"balanceAmount\":true}"
|
||||
lost_header: List[str] = field(default_factory=list)
|
||||
|
||||
# 新增字段 - 记录统计
|
||||
rows: int = 0
|
||||
source: str = "http"
|
||||
total_records: int = 150
|
||||
is_split: int = 0
|
||||
|
||||
# 新增字段 - 交易日期范围
|
||||
trx_date_start_id: int = 20240101
|
||||
trx_date_end_id: int = 20241231
|
||||
```
|
||||
|
||||
**Step 3: 验证服务能正常启动**
|
||||
|
||||
```bash
|
||||
python main.py
|
||||
```
|
||||
|
||||
预期:服务启动成功,无报错信息。
|
||||
|
||||
---
|
||||
|
||||
## Task 2: 更新 upload_file 方法初始化所有字段
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/file_service.py`
|
||||
|
||||
**Step 1: 读取 upload_file 方法**
|
||||
|
||||
查看当前的 `upload_file` 方法实现。
|
||||
|
||||
**Step 2: 根据文件名推断银行名称**
|
||||
|
||||
在 `upload_file` 方法中添加银行名称推断逻辑:
|
||||
|
||||
```python
|
||||
def _infer_bank_name(self, filename: str) -> tuple:
|
||||
"""根据文件名推断银行名称和模板名称"""
|
||||
if "支付宝" in filename or "alipay" in filename.lower():
|
||||
return "ALIPAY", "ALIPAY_T220708"
|
||||
elif "绍兴银行" in filename or "BSX" in filename:
|
||||
return "BSX", "BSX_T240925"
|
||||
else:
|
||||
return "ZJRCU", "ZJRCU_T251114"
|
||||
|
||||
async def upload_file(self, group_id: int, file: UploadFile, background_tasks: BackgroundTasks) -> dict:
|
||||
"""上传文件并初始化所有字段"""
|
||||
# 生成新的 log_id
|
||||
self.current_log_id += 1
|
||||
log_id = self.current_log_id
|
||||
|
||||
# 推断银行信息
|
||||
bank_name, template_name = self._infer_bank_name(file.filename)
|
||||
|
||||
# 生成合理的交易日期范围
|
||||
import random
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=random.randint(90, 365))
|
||||
trx_date_start_id = int(start_date.strftime("%Y%m%d"))
|
||||
trx_date_end_id = int(end_date.strftime("%Y%m%d"))
|
||||
|
||||
# 生成随机账号和主体
|
||||
account_no = f"{random.randint(10000000000, 99999999999)}"
|
||||
enterprise_names = ["测试主体"] if random.random() > 0.3 else [""]
|
||||
|
||||
# 创建完整的文件记录
|
||||
file_record = FileRecord(
|
||||
log_id=log_id,
|
||||
group_id=group_id,
|
||||
file_name=file.filename,
|
||||
download_file_name=file.filename,
|
||||
bank_name=bank_name,
|
||||
real_bank_name=bank_name,
|
||||
template_name=template_name,
|
||||
account_no_list=[account_no],
|
||||
enterprise_name_list=enterprise_names,
|
||||
le_id=10000 + random.randint(0, 9999),
|
||||
login_le_id=10000 + random.randint(0, 9999),
|
||||
file_size=random.randint(10000, 100000),
|
||||
total_records=random.randint(100, 300),
|
||||
trx_date_start_id=trx_date_start_id,
|
||||
trx_date_end_id=trx_date_end_id,
|
||||
parsing=True,
|
||||
status=-5
|
||||
)
|
||||
|
||||
# 存储记录
|
||||
self.file_records[log_id] = file_record
|
||||
|
||||
# 添加后台任务(延迟解析)
|
||||
background_tasks.add_task(self._delayed_parse, log_id)
|
||||
|
||||
# 构建响应
|
||||
return self._build_upload_response(file_record)
|
||||
```
|
||||
|
||||
**Step 3: 实现 _build_upload_response 方法**
|
||||
|
||||
```python
|
||||
def _build_upload_response(self, file_record: FileRecord) -> dict:
|
||||
"""构建上传接口的完整响应"""
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"accountsOfLog": {
|
||||
str(file_record.log_id): [
|
||||
{
|
||||
"bank": file_record.bank_name,
|
||||
"accountName": file_record.enterprise_name_list[0] if file_record.enterprise_name_list else "",
|
||||
"accountNo": file_record.account_no_list[0] if file_record.account_no_list else "",
|
||||
"currency": "CNY"
|
||||
}
|
||||
]
|
||||
},
|
||||
"uploadLogList": [
|
||||
{
|
||||
"accountNoList": file_record.account_no_list,
|
||||
"bankName": file_record.bank_name,
|
||||
"dataTypeInfo": file_record.data_type_info,
|
||||
"downloadFileName": file_record.download_file_name,
|
||||
"enterpriseNameList": file_record.enterprise_name_list,
|
||||
"filePackageId": file_record.file_package_id,
|
||||
"fileSize": file_record.file_size,
|
||||
"fileUploadBy": file_record.file_upload_by,
|
||||
"fileUploadByUserName": file_record.file_upload_by_user_name,
|
||||
"fileUploadTime": file_record.file_upload_time,
|
||||
"leId": file_record.le_id,
|
||||
"logId": file_record.log_id,
|
||||
"logMeta": file_record.log_meta,
|
||||
"logType": file_record.log_type,
|
||||
"loginLeId": file_record.login_le_id,
|
||||
"lostHeader": file_record.lost_header,
|
||||
"realBankName": file_record.real_bank_name,
|
||||
"rows": file_record.rows,
|
||||
"source": file_record.source,
|
||||
"status": file_record.status,
|
||||
"templateName": file_record.template_name,
|
||||
"totalRecords": file_record.total_records,
|
||||
"trxDateEndId": file_record.trx_date_end_id,
|
||||
"trxDateStartId": file_record.trx_date_start_id,
|
||||
"uploadFileName": file_record.file_name,
|
||||
"uploadStatusDesc": file_record.upload_status_desc
|
||||
}
|
||||
],
|
||||
"uploadStatus": 1
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": True
|
||||
}
|
||||
```
|
||||
|
||||
**Step 4: 验证上传接口返回完整字段**
|
||||
|
||||
重启服务并调用上传接口,检查响应是否包含所有字段。
|
||||
|
||||
---
|
||||
|
||||
## Task 3: 添加 get_upload_status 方法
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/file_service.py`
|
||||
|
||||
**Step 1: 实现 get_upload_status 方法**
|
||||
|
||||
在 `FileService` 类中添加新方法:
|
||||
|
||||
```python
|
||||
def get_upload_status(self, group_id: int, log_id: int = None) -> dict:
|
||||
"""获取文件上传状态(接口5)"""
|
||||
logs = []
|
||||
|
||||
if log_id:
|
||||
# 返回特定文件的状态
|
||||
if log_id in self.file_records:
|
||||
record = self.file_records[log_id]
|
||||
if record.group_id == group_id:
|
||||
logs.append(self._build_log_detail(record))
|
||||
else:
|
||||
# 返回该项目的所有文件状态
|
||||
for record in self.file_records.values():
|
||||
if record.group_id == group_id:
|
||||
logs.append(self._build_log_detail(record))
|
||||
|
||||
# 构建响应
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"logs": logs,
|
||||
"status": "",
|
||||
"accountId": 8954,
|
||||
"currency": "CNY"
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": True
|
||||
}
|
||||
|
||||
def _build_log_detail(self, record: FileRecord) -> dict:
|
||||
"""构建日志详情对象"""
|
||||
return {
|
||||
"accountNoList": record.account_no_list,
|
||||
"bankName": record.bank_name,
|
||||
"dataTypeInfo": record.data_type_info,
|
||||
"downloadFileName": record.download_file_name,
|
||||
"enterpriseNameList": record.enterprise_name_list,
|
||||
"fileSize": record.file_size,
|
||||
"fileUploadBy": record.file_upload_by,
|
||||
"fileUploadByUserName": record.file_upload_by_user_name,
|
||||
"fileUploadTime": record.file_upload_time,
|
||||
"isSplit": record.is_split,
|
||||
"leId": record.le_id,
|
||||
"logId": record.log_id,
|
||||
"logMeta": record.log_meta,
|
||||
"logType": record.log_type,
|
||||
"loginLeId": record.login_le_id,
|
||||
"lostHeader": record.lost_header,
|
||||
"realBankName": record.real_bank_name,
|
||||
"rows": record.rows,
|
||||
"source": record.source,
|
||||
"status": record.status,
|
||||
"templateName": record.template_name,
|
||||
"totalRecords": record.total_records,
|
||||
"trxDateEndId": record.trx_date_end_id,
|
||||
"trxDateStartId": record.trx_date_start_id,
|
||||
"uploadFileName": record.file_name,
|
||||
"uploadStatusDesc": record.upload_status_desc
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: 验证方法能正确查询文件记录**
|
||||
|
||||
在代码中确保 `file_records` 字典正确初始化和管理。
|
||||
|
||||
---
|
||||
|
||||
## Task 4: 在 API 路由中添加新接口
|
||||
|
||||
**Files:**
|
||||
- Modify: `routers/api.py`
|
||||
|
||||
**Step 1: 读取现有 api.py 文件**
|
||||
|
||||
查看当前的路由定义。
|
||||
|
||||
**Step 2: 添加 GET 接口路由**
|
||||
|
||||
在接口5的位置(check_parse_status 和 delete_files 之间)添加:
|
||||
|
||||
```python
|
||||
# ==================== 接口5:获取文件上传状态 ====================
|
||||
@router.get("/watson/api/project/bs/upload")
|
||||
async def get_upload_status(
|
||||
groupId: int = Form(..., description="项目id"),
|
||||
logId: Optional[int] = Form(None, description="文件id"),
|
||||
):
|
||||
"""获取单个文件上传后的状态
|
||||
|
||||
如果不提供 logId,返回该项目的所有文件状态
|
||||
"""
|
||||
return file_service.get_upload_status(groupId, logId)
|
||||
```
|
||||
|
||||
**Step 3: 确认导入了 Optional**
|
||||
|
||||
在文件顶部确认:
|
||||
|
||||
```python
|
||||
from typing import List, Optional
|
||||
```
|
||||
|
||||
**Step 4: 验证新接口出现在 Swagger 文档中**
|
||||
|
||||
重启服务,访问 http://localhost:8000/docs,确认能看到新的 GET 接口。
|
||||
|
||||
---
|
||||
|
||||
## Task 5: 更新 check_parse_status 响应字段
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/file_service.py`
|
||||
|
||||
**Step 1: 修改 check_parse_status 方法**
|
||||
|
||||
确保返回的 pendingList 包含所有字段:
|
||||
|
||||
```python
|
||||
def check_parse_status(self, group_id: int, inprogress_list: str) -> dict:
|
||||
"""检查文件解析状态"""
|
||||
log_ids = [int(id.strip()) for id in inprogress_list.split(",")]
|
||||
|
||||
pending_list = []
|
||||
all_parsing_complete = True
|
||||
|
||||
for log_id in log_ids:
|
||||
if log_id in self.file_records:
|
||||
record = self.file_records[log_id]
|
||||
if record.parsing:
|
||||
all_parsing_complete = False
|
||||
|
||||
pending_list.append(self._build_log_detail(record))
|
||||
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"parsing": not all_parsing_complete,
|
||||
"pendingList": pending_list
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": True
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: 验证解析状态接口返回完整字段**
|
||||
|
||||
调用接口4,检查响应中的 pendingList 是否包含所有字段。
|
||||
|
||||
---
|
||||
|
||||
## Task 6: 更新 delete_files 方法响应格式
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/file_service.py`
|
||||
|
||||
**Step 1: 修改 delete_files 方法**
|
||||
|
||||
确保响应的 code 字段为 "200 OK":
|
||||
|
||||
```python
|
||||
def delete_files(self, group_id: int, log_ids: List[int], user_id: int) -> dict:
|
||||
"""批量删除文件"""
|
||||
deleted_count = 0
|
||||
for log_id in log_ids:
|
||||
if log_id in self.file_records:
|
||||
del self.file_records[log_id]
|
||||
deleted_count += 1
|
||||
|
||||
return {
|
||||
"code": "200 OK", # 注意:这里是 "200 OK" 不是 "200"
|
||||
"data": {
|
||||
"message": "delete.files.success"
|
||||
},
|
||||
"message": "delete.files.success",
|
||||
"status": "200",
|
||||
"successResponse": True
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: 验证删除接口响应格式正确**
|
||||
|
||||
调用删除接口,检查响应的 code 字段是否为 "200 OK"。
|
||||
|
||||
---
|
||||
|
||||
## Task 7: 更新 token.json 响应模板
|
||||
|
||||
**Files:**
|
||||
- Modify: `config/responses/token.json`
|
||||
|
||||
**Step 1: 确认 analysisType 为 Integer**
|
||||
|
||||
确保 token.json 中的 analysisType 字段类型正确:
|
||||
|
||||
```json
|
||||
{
|
||||
"success_response": {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.mock_token_{project_id}",
|
||||
"projectId": "{project_id}",
|
||||
"projectNo": "{project_no}",
|
||||
"entityName": "{entity_name}",
|
||||
"analysisType": 0
|
||||
},
|
||||
"message": "create.token.success",
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
确认 `analysisType` 的值是数字 0,不是字符串 "0"。
|
||||
|
||||
**Step 2: 验证接口1响应正确**
|
||||
|
||||
调用 getToken 接口,检查 analysisType 的类型。
|
||||
|
||||
---
|
||||
|
||||
## Task 8: 创建 upload_status.json 响应模板
|
||||
|
||||
**Files:**
|
||||
- Create: `config/responses/upload_status.json`
|
||||
|
||||
**Step 1: 创建新的响应模板文件**
|
||||
|
||||
创建文件并添加内容(虽然实际响应在代码中构建,但保留模板作为参考):
|
||||
|
||||
```json
|
||||
{
|
||||
"success_response": {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"logs": [
|
||||
{
|
||||
"accountNoList": ["18785967364"],
|
||||
"bankName": "ALIPAY",
|
||||
"dataTypeInfo": ["CSV", ","],
|
||||
"downloadFileName": "支付宝.csv",
|
||||
"enterpriseNameList": ["曾孝成"],
|
||||
"fileSize": 16322,
|
||||
"fileUploadBy": 448,
|
||||
"fileUploadByUserName": "admin@support.com",
|
||||
"fileUploadTime": "2025-03-13 08:45:32",
|
||||
"isSplit": 0,
|
||||
"leId": 10741,
|
||||
"logId": 13994,
|
||||
"logMeta": "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}",
|
||||
"logType": "bankstatement",
|
||||
"loginLeId": 10741,
|
||||
"lostHeader": [],
|
||||
"realBankName": "ALIPAY",
|
||||
"rows": 0,
|
||||
"source": "http",
|
||||
"status": -5,
|
||||
"templateName": "ALIPAY_T220708",
|
||||
"totalRecords": 127,
|
||||
"trxDateEndId": 20231231,
|
||||
"trxDateStartId": 20230102,
|
||||
"uploadFileName": "支付宝.pdf",
|
||||
"uploadStatusDesc": "data.wait.confirm.newaccount"
|
||||
}
|
||||
],
|
||||
"status": "",
|
||||
"accountId": 8954,
|
||||
"currency": "CNY"
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 9: 更新 bank_statement.json 响应模板
|
||||
|
||||
**Files:**
|
||||
- Modify: `config/responses/bank_statement.json`
|
||||
|
||||
**Step 1: 补充流水记录的所有字段**
|
||||
|
||||
确保 bank_statement.json 包含所有50+个字段:
|
||||
|
||||
```json
|
||||
{
|
||||
"success_response": {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"bankStatementList": [
|
||||
{
|
||||
"accountId": 0,
|
||||
"accountMaskNo": "101015251071645",
|
||||
"accountingDate": "2024-02-01",
|
||||
"accountingDateId": 20240201,
|
||||
"archivingFlag": 0,
|
||||
"attachments": 0,
|
||||
"balanceAmount": 4814.82,
|
||||
"bank": "ZJRCU",
|
||||
"bankComments": "",
|
||||
"bankStatementId": 12847662,
|
||||
"bankTrxNumber": "1a10458dd5c3366d7272285812d434fc",
|
||||
"batchId": 19135,
|
||||
"cashType": "1",
|
||||
"commentsNum": 0,
|
||||
"crAmount": 0,
|
||||
"cretNo": "230902199012261247",
|
||||
"currency": "CNY",
|
||||
"customerAccountMaskNo": "597671502",
|
||||
"customerBank": "",
|
||||
"customerId": -1,
|
||||
"customerName": "小店",
|
||||
"customerReference": "",
|
||||
"downPaymentFlag": 0,
|
||||
"drAmount": 245.8,
|
||||
"exceptionType": "",
|
||||
"groupId": 16238,
|
||||
"internalFlag": 0,
|
||||
"leId": 16308,
|
||||
"leName": "张传伟",
|
||||
"overrideBsId": 0,
|
||||
"paymentMethod": "",
|
||||
"sourceCatalogId": 0,
|
||||
"split": 0,
|
||||
"subBankstatementId": 0,
|
||||
"toDoFlag": 0,
|
||||
"transAmount": 245.8,
|
||||
"transFlag": "P",
|
||||
"transTypeId": 0,
|
||||
"transformAmount": 0,
|
||||
"transformCrAmount": 0,
|
||||
"transformDrAmount": 0,
|
||||
"transfromBalanceAmount": 0,
|
||||
"trxBalance": 0,
|
||||
"trxDate": "2024-02-01 10:33:44",
|
||||
"userMemo": "财付通消费_小店"
|
||||
}
|
||||
],
|
||||
"totalCount": 131
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: 验证流水查询接口返回所有字段**
|
||||
|
||||
调用 getBSByLogId 接口,检查响应是否包含所有字段。
|
||||
|
||||
---
|
||||
|
||||
## Task 10: 添加 40100 错误码
|
||||
|
||||
**Files:**
|
||||
- Modify: `utils/error_simulator.py`
|
||||
|
||||
**Step 1: 在 ERROR_CODES 字典中添加新错误码**
|
||||
|
||||
```python
|
||||
ERROR_CODES = {
|
||||
"40100": {"code": "40100", "message": "未知异常"},
|
||||
"40101": {"code": "40101", "message": "appId错误"},
|
||||
"40102": {"code": "40102", "message": "appSecretCode错误"},
|
||||
"40104": {"code": "40104", "message": "可使用项目次数为0,无法创建项目"},
|
||||
"40105": {"code": "40105", "message": "只读模式下无法新建项目"},
|
||||
"40106": {"code": "40106", "message": "错误的分析类型,不在规定的取值范围内"},
|
||||
"40107": {"code": "40107", "message": "当前系统不支持的分析类型"},
|
||||
"40108": {"code": "40108", "message": "当前用户所属行社无权限"},
|
||||
"501014": {"code": "501014", "message": "无行内流水文件"},
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: 验证错误码能正确触发**
|
||||
|
||||
调用任意接口,在参数中包含 `error_40100`,检查是否返回对应错误。
|
||||
|
||||
---
|
||||
|
||||
## Task 11: 更新 CLAUDE.md 文档
|
||||
|
||||
**Files:**
|
||||
- Modify: `CLAUDE.md`
|
||||
|
||||
**Step 1: 更新接口列表说明**
|
||||
|
||||
在 "API 接口说明" 部分更新为:
|
||||
|
||||
```markdown
|
||||
## API 接口说明
|
||||
|
||||
7个核心接口:
|
||||
|
||||
1. `/account/common/getToken` (POST) - 创建项目并获取 Token
|
||||
2. `/watson/api/project/remoteUploadSplitFile` (POST) - 上传流水文件(multipart/form-data)
|
||||
3. `/watson/api/project/getJZFileOrZjrcuFile` (POST) - 拉取行内流水
|
||||
4. `/watson/api/project/upload/getpendings` (POST) - 检查文件解析状态
|
||||
5. `/watson/api/project/bs/upload` (GET) - 获取单个文件上传后的状态
|
||||
6. `/watson/api/project/batchDeleteUploadFile` (POST) - 批量删除文件
|
||||
7. `/watson/api/project/getBSByLogId` (POST) - 获取银行流水(分页)
|
||||
|
||||
详细接口文档请访问 Swagger UI (`/docs`) 或查看 `assets/兰溪-流水分析对接3.md`。
|
||||
```
|
||||
|
||||
**Step 2: 更新注意事项**
|
||||
|
||||
添加关于响应字段完整性的说明:
|
||||
|
||||
```markdown
|
||||
## 注意事项
|
||||
|
||||
- **数据持久化**: 所有数据存储在内存中,服务重启后数据丢失
|
||||
- **响应字段完整性**: 所有接口响应字段完全对齐接口文档示例
|
||||
- **并发安全**: 当前实现未考虑多线程安全,生产环境需要加锁
|
||||
- **文件存储**: 上传的文件不实际保存,仅模拟元数据
|
||||
- **错误标记**: 错误触发通过字符串匹配实现,确保测试数据唯一性
|
||||
- **后台任务**: FastAPI BackgroundTasks 在同一进程内执行,不会阻塞响应
|
||||
- **请求头处理**: X-Xencio-Client-Id 请求头不验证,接受任意值
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 12: 最终验证
|
||||
|
||||
**Files:**
|
||||
- All modified files
|
||||
|
||||
**Step 1: 启动服务**
|
||||
|
||||
```bash
|
||||
python main.py
|
||||
```
|
||||
|
||||
预期:服务正常启动,无报错。
|
||||
|
||||
**Step 2: 访问 Swagger 文档**
|
||||
|
||||
访问 http://localhost:8000/docs
|
||||
|
||||
预期:能看到所有7个接口,包括新增的 GET 接口。
|
||||
|
||||
**Step 3: 测试所有7个接口**
|
||||
|
||||
使用 Swagger UI 或 curl 测试每个接口,确保:
|
||||
1. 接口1:返回包含 analysisType (Integer) 的响应
|
||||
2. 接口2:返回包含 accountsOfLog 和完整 uploadLogList 的响应
|
||||
3. 接口3:返回 logId 数组
|
||||
4. 接口4:返回包含完整字段的 pendingList
|
||||
5. 接口5:返回包含完整字段的 logs 数组
|
||||
6. 接口6:返回 code 为 "200 OK" 的响应
|
||||
7. 接口7:返回包含所有50+字段的 bankStatementList
|
||||
|
||||
**Step 4: 测试错误码**
|
||||
|
||||
调用接口1,使用参数 `projectNo: "test_error_40100"`
|
||||
|
||||
预期:返回 40100 错误。
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [x] FileRecord 包含所有必需字段
|
||||
- [x] upload_file 方法正确初始化所有字段
|
||||
- [x] get_upload_status 方法正确实现
|
||||
- [x] 新接口出现在 /docs 中
|
||||
- [x] 所有响应字段完全对齐文档示例
|
||||
- [x] 40100 错误码能正确触发
|
||||
- [x] 服务启动无报错
|
||||
- [x] 所有7个接口都能正常调用
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
- 所有代码修改都保持向后兼容
|
||||
- 无需数据库迁移(使用内存存储)
|
||||
- 错误处理机制保持不变
|
||||
- 请求头 X-Xencio-Client-Id 不验证
|
||||
@@ -0,0 +1,373 @@
|
||||
# 获取单个文件上传状态接口优化设计
|
||||
|
||||
## 文档信息
|
||||
|
||||
- **创建日期**: 2026-03-12
|
||||
- **设计者**: Claude Code
|
||||
- **状态**: 待实施
|
||||
|
||||
## 1. 需求背景
|
||||
|
||||
### 1.1 接口信息
|
||||
|
||||
- **接口路径**: `/watson/api/project/bs/upload` (GET)
|
||||
- **接口名称**: 获取单个文件上传后的状态
|
||||
- **项目背景**: 流水分析 Mock 服务器
|
||||
|
||||
### 1.2 当前问题
|
||||
|
||||
当前实现存在以下问题:
|
||||
|
||||
1. **依赖实际上传记录**: 接口依赖 `self.file_records`(上传时存储的记录),如果没有上传过文件,logs 返回空数组
|
||||
2. **不符合 Mock 服务器定位**: Mock 服务器应该独立工作,前端测试时不应依赖其他接口
|
||||
3. **字段值不正确**: `logMeta` 字段中的 `balanceAmount` 值为布尔值 `true`,应该为字符串 `"-1"`
|
||||
|
||||
### 1.3 期望行为
|
||||
|
||||
根据接口文档(`assets/兰溪-流水分析对接3.md` 第374-516行):
|
||||
|
||||
1. **带 logId 参数**: 根据 logId 生成固定的文件记录数据(相同 logId 返回相同数据)
|
||||
2. **不带 logId 参数**: 返回空的 logs 数组
|
||||
3. **固定成功状态**: status=-5, uploadStatusDesc="data.wait.confirm.newaccount"
|
||||
4. **独立性**: 不依赖实际上传的文件记录,接口独立工作
|
||||
|
||||
## 2. 解决方案
|
||||
|
||||
### 2.1 设计原则
|
||||
|
||||
1. **确定性随机**: 使用 `random.seed(log_id)` 确保相同 logId 生成相同数据
|
||||
2. **完全独立**: 不依赖 `self.file_records`,在 `get_upload_status()` 中直接生成数据
|
||||
3. **文档对齐**: 严格遵循接口文档示例的字段和格式
|
||||
4. **简单高效**: 代码简洁,易于维护和测试
|
||||
|
||||
### 2.2 核心设计
|
||||
|
||||
#### 2.2.1 数据生成策略
|
||||
|
||||
**基于 logId 的确定性随机生成**
|
||||
|
||||
```python
|
||||
def get_upload_status(self, group_id: int, log_id: int = None) -> dict:
|
||||
"""
|
||||
获取文件上传状态
|
||||
|
||||
Args:
|
||||
group_id: 项目ID
|
||||
log_id: 文件ID(可选)
|
||||
|
||||
Returns:
|
||||
上传状态响应字典
|
||||
"""
|
||||
logs = []
|
||||
|
||||
if log_id:
|
||||
# 使用 logId 作为随机种子,确保相同 logId 返回相同数据
|
||||
random.seed(log_id)
|
||||
|
||||
# 生成确定性的文件记录
|
||||
record = self._generate_deterministic_record(log_id, group_id)
|
||||
logs.append(record)
|
||||
|
||||
# 返回响应
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"logs": logs,
|
||||
"status": "",
|
||||
"accountId": 8954,
|
||||
"currency": "CNY"
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": True
|
||||
}
|
||||
```
|
||||
|
||||
#### 2.2.2 字段生成规则
|
||||
|
||||
根据文档示例(`assets/兰溪-流水分析对接3.md` 第431-499行),logs 数组中的每个对象包含 26 个字段:
|
||||
|
||||
| 字段名 | 生成规则 | 示例值 |
|
||||
|--------|----------|--------|
|
||||
| accountNoList | 11位随机数字 | ["18785967364"] |
|
||||
| bankName | 从3种银行中随机选择 | "ALIPAY" |
|
||||
| dataTypeInfo | 固定值 | ["CSV", ","] |
|
||||
| downloadFileName | 基于 logId 生成 | "测试文件_13994.csv" |
|
||||
| enterpriseNameList | 70%概率有主体,30%为空 | ["测试主体"] 或 [""] |
|
||||
| fileSize | 随机范围 10000-100000 | 16322 |
|
||||
| fileUploadBy | 固定值 | 448 |
|
||||
| fileUploadByUserName | 固定值 | "admin@support.com" |
|
||||
| fileUploadTime | 当前时间 | "2025-03-13 08:45:32" |
|
||||
| isSplit | 固定值 | 0 |
|
||||
| leId | 10000 + 随机数 | 10741 |
|
||||
| logId | 参数传入 | 13994 |
|
||||
| logMeta | **修复为字符串 "-1"** | "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}" |
|
||||
| logType | 固定值 | "bankstatement" |
|
||||
| loginLeId | 10000 + 随机数 | 10741 |
|
||||
| lostHeader | 固定空数组 | [] |
|
||||
| realBankName | 与 bankName 一致 | "ALIPAY" |
|
||||
| rows | 固定值 | 0 |
|
||||
| source | 固定值 | "http" |
|
||||
| status | 固定成功值 | -5 |
|
||||
| templateName | 根据银行选择对应模板 | "ALIPAY_T220708" |
|
||||
| totalRecords | 随机范围 100-300 | 127 |
|
||||
| trxDateEndId | 当前日期 | 20231231 |
|
||||
| trxDateStartId | 当前日期 - 随机90-365天 | 20230102 |
|
||||
| uploadFileName | 基于 logId 生成 | "测试文件_13994.pdf" |
|
||||
| uploadStatusDesc | 固定成功描述 | "data.wait.confirm.newaccount" |
|
||||
|
||||
#### 2.2.3 银行类型映射
|
||||
|
||||
| bankName | templateName | realBankName |
|
||||
|----------|--------------|--------------|
|
||||
| "ALIPAY" | "ALIPAY_T220708" | "ALIPAY" |
|
||||
| "BSX" | "BSX_T240925" | "BSX" |
|
||||
| "ZJRCU" | "ZJRCU_T251114" | "ZJRCU" |
|
||||
|
||||
### 2.3 关键修复点
|
||||
|
||||
#### 修复1: logMeta 字段
|
||||
|
||||
**当前实现**(`services/file_service.py:47`):
|
||||
```python
|
||||
log_meta: str = "{\"lostHeader\":[],\"balanceAmount\":true}" # ❌ 错误
|
||||
```
|
||||
|
||||
**修复后**:
|
||||
```python
|
||||
log_meta: str = "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}" # ✅ 正确
|
||||
```
|
||||
|
||||
#### 修复2: 独立数据生成
|
||||
|
||||
**当前实现**: 依赖 `self.file_records`
|
||||
|
||||
**修复后**: 在 `get_upload_status()` 中独立生成数据,不依赖上传记录
|
||||
|
||||
## 3. 技术设计
|
||||
|
||||
### 3.1 修改文件清单
|
||||
|
||||
| 文件 | 修改内容 |
|
||||
|------|----------|
|
||||
| `services/file_service.py` | 1. 修复 FileRecord.log_meta 默认值<br>2. 重构 get_upload_status() 方法<br>3. 新增 _generate_deterministic_record() 方法 |
|
||||
|
||||
### 3.2 核心代码实现
|
||||
|
||||
#### 3.2.1 新增方法: _generate_deterministic_record()
|
||||
|
||||
```python
|
||||
def _generate_deterministic_record(self, log_id: int, group_id: int) -> dict:
|
||||
"""
|
||||
基于 logId 生成确定性的文件记录
|
||||
|
||||
Args:
|
||||
log_id: 文件ID(用作随机种子)
|
||||
group_id: 项目ID
|
||||
|
||||
Returns:
|
||||
文件记录字典(26个字段)
|
||||
"""
|
||||
# 银行类型选项
|
||||
bank_options = [
|
||||
("ALIPAY", "ALIPAY_T220708"),
|
||||
("BSX", "BSX_T240925"),
|
||||
("ZJRCU", "ZJRCU_T251114")
|
||||
]
|
||||
|
||||
bank_name, template_name = random.choice(bank_options)
|
||||
|
||||
# 生成交易日期范围
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=random.randint(90, 365))
|
||||
|
||||
# 生成账号和主体
|
||||
account_no = f"{random.randint(10000000000, 99999999999)}"
|
||||
enterprise_names = ["测试主体"] if random.random() > 0.3 else [""]
|
||||
|
||||
return {
|
||||
"accountNoList": [account_no],
|
||||
"bankName": bank_name,
|
||||
"dataTypeInfo": ["CSV", ","],
|
||||
"downloadFileName": f"测试文件_{log_id}.csv",
|
||||
"enterpriseNameList": enterprise_names,
|
||||
"fileSize": random.randint(10000, 100000),
|
||||
"fileUploadBy": 448,
|
||||
"fileUploadByUserName": "admin@support.com",
|
||||
"fileUploadTime": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"isSplit": 0,
|
||||
"leId": 10000 + random.randint(0, 9999),
|
||||
"logId": log_id,
|
||||
"logMeta": "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}",
|
||||
"logType": "bankstatement",
|
||||
"loginLeId": 10000 + random.randint(0, 9999),
|
||||
"lostHeader": [],
|
||||
"realBankName": bank_name,
|
||||
"rows": 0,
|
||||
"source": "http",
|
||||
"status": -5,
|
||||
"templateName": template_name,
|
||||
"totalRecords": random.randint(100, 300),
|
||||
"trxDateEndId": int(end_date.strftime("%Y%m%d")),
|
||||
"trxDateStartId": int(start_date.strftime("%Y%m%d")),
|
||||
"uploadFileName": f"测试文件_{log_id}.pdf",
|
||||
"uploadStatusDesc": "data.wait.confirm.newaccount"
|
||||
}
|
||||
```
|
||||
|
||||
#### 3.2.2 重构方法: get_upload_status()
|
||||
|
||||
```python
|
||||
def get_upload_status(self, group_id: int, log_id: int = None) -> dict:
|
||||
"""
|
||||
获取文件上传状态(基于 logId 生成确定性数据)
|
||||
|
||||
Args:
|
||||
group_id: 项目ID
|
||||
log_id: 文件ID(可选)
|
||||
|
||||
Returns:
|
||||
上传状态响应字典
|
||||
"""
|
||||
logs = []
|
||||
|
||||
if log_id:
|
||||
# 使用 logId 作为随机种子,确保相同 logId 返回相同数据
|
||||
random.seed(log_id)
|
||||
|
||||
# 生成确定性的文件记录
|
||||
record = self._generate_deterministic_record(log_id, group_id)
|
||||
logs.append(record)
|
||||
|
||||
# 返回响应
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"logs": logs,
|
||||
"status": "",
|
||||
"accountId": 8954,
|
||||
"currency": "CNY"
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": True
|
||||
}
|
||||
```
|
||||
|
||||
### 3.3 测试设计
|
||||
|
||||
#### 3.3.1 测试场景
|
||||
|
||||
1. **带 logId 查询**: 验证返回非空 logs 数组
|
||||
2. **不带 logId 查询**: 验证返回空 logs 数组
|
||||
3. **确定性测试**: 相同 logId 多次调用返回相同数据
|
||||
4. **字段完整性**: 验证返回的 26 个字段都存在
|
||||
5. **字段值正确性**: 验证 status=-5, logMeta 格式正确
|
||||
6. **银行类型随机性**: 验证不同 logId 生成不同银行类型
|
||||
|
||||
#### 3.3.2 测试用例示例
|
||||
|
||||
```python
|
||||
def test_get_upload_status_with_log_id():
|
||||
"""测试带 logId 参数查询"""
|
||||
response = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["code"] == "200"
|
||||
assert len(data["data"]["logs"]) == 1
|
||||
assert data["data"]["logs"][0]["logId"] == 13994
|
||||
assert data["data"]["logs"][0]["status"] == -5
|
||||
assert data["data"]["logs"][0]["logMeta"] == "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}"
|
||||
|
||||
def test_get_upload_status_without_log_id():
|
||||
"""测试不带 logId 参数查询"""
|
||||
response = client.get("/watson/api/project/bs/upload?groupId=1000")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["code"] == "200"
|
||||
assert len(data["data"]["logs"]) == 0
|
||||
|
||||
def test_deterministic_data():
|
||||
"""测试相同 logId 返回相同数据"""
|
||||
response1 = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
response2 = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
|
||||
log1 = response1.json()["data"]["logs"][0]
|
||||
log2 = response2.json()["data"]["logs"][0]
|
||||
|
||||
# 验证关键字段相同(除了 fileUploadTime)
|
||||
assert log1["logId"] == log2["logId"]
|
||||
assert log1["bankName"] == log2["bankName"]
|
||||
assert log1["accountNoList"] == log2["accountNoList"]
|
||||
assert log1["enterpriseNameList"] == log2["enterpriseNameList"]
|
||||
```
|
||||
|
||||
## 4. 实施要点
|
||||
|
||||
### 4.1 实施步骤
|
||||
|
||||
1. **修复 FileRecord 类**:修改 `log_meta` 默认值为正确的字符串格式
|
||||
2. **重构 get_upload_status() 方法**:移除对 `self.file_records` 的依赖
|
||||
3. **新增 _generate_deterministic_record() 方法**:实现确定性数据生成
|
||||
4. **更新单元测试**:添加新的测试用例验证功能
|
||||
5. **运行测试验证**:确保所有测试通过
|
||||
|
||||
### 4.2 注意事项
|
||||
|
||||
1. **随机种子**: 必须在生成数据前调用 `random.seed(log_id)`
|
||||
2. **时间字段**: `fileUploadTime` 使用当前时间,每次调用会不同
|
||||
3. **兼容性**: 不影响其他接口(上传、解析状态检查等)
|
||||
4. **性能**: 无需优化,当前方案已足够高效
|
||||
|
||||
### 4.3 风险评估
|
||||
|
||||
| 风险 | 影响 | 缓解措施 |
|
||||
|------|------|----------|
|
||||
| 与上传接口数据不一致 | 低 | Mock 服务器允许独立数据源 |
|
||||
| 随机种子冲突 | 极低 | logId 范围足够大(10000+) |
|
||||
| 字段缺失 | 中 | 严格按文档生成 26 个字段 |
|
||||
|
||||
## 5. 验收标准
|
||||
|
||||
### 5.1 功能验收
|
||||
|
||||
- [ ] 带 logId 参数查询返回非空 logs 数组
|
||||
- [ ] 不带 logId 参数查询返回空 logs 数组
|
||||
- [ ] 相同 logId 多次查询返回相同的核心字段值
|
||||
- [ ] 返回数据包含完整的 26 个字段
|
||||
- [ ] status 字段值为 -5
|
||||
- [ ] logMeta 字段中 balanceAmount 为字符串 "-1"
|
||||
|
||||
### 5.2 质量验收
|
||||
|
||||
- [ ] 所有单元测试通过
|
||||
- [ ] 代码符合项目编码规范
|
||||
- [ ] 无语法错误和运行时错误
|
||||
- [ ] API 文档(Swagger UI)正确展示接口
|
||||
|
||||
### 5.3 文档验收
|
||||
|
||||
- [ ] CLAUDE.md 更新(如有必要)
|
||||
- [ ] 代码注释完整清晰
|
||||
- [ ] 测试用例覆盖所有场景
|
||||
|
||||
## 6. 后续优化建议
|
||||
|
||||
### 6.1 可选增强
|
||||
|
||||
1. **缓存机制**: 如需提高性能,可基于 logId 缓存生成结果
|
||||
2. **更多银行类型**: 扩展银行类型和模板选项
|
||||
3. **异常场景**: 支持通过特殊 logId 触发错误响应
|
||||
|
||||
### 6.2 不建议的优化
|
||||
|
||||
1. **关联上传记录**: 会增加复杂度,违背 Mock 服务器独立原则
|
||||
2. **预生成数据池**: 过度设计,当前场景不需要
|
||||
|
||||
## 7. 参考资料
|
||||
|
||||
- 接口文档: `assets/兰溪-流水分析对接3.md` 第374-516行
|
||||
- 当前实现: `services/file_service.py` 第265-300行
|
||||
- FileRecord 模型: `services/file_service.py` 第12-59行
|
||||
@@ -0,0 +1,468 @@
|
||||
# 获取单个文件上传状态接口优化实施计划
|
||||
|
||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||
|
||||
**Goal:** 优化 `/watson/api/project/bs/upload` 接口,实现基于 logId 的确定性数据生成,不依赖上传记录。
|
||||
|
||||
**Architecture:** 使用 `random.seed(log_id)` 确保相同 logId 生成相同数据,完全独立于文件上传记录,符合 Mock 服务器定位。
|
||||
|
||||
**Tech Stack:** FastAPI, Python random/datetime, pytest
|
||||
|
||||
---
|
||||
|
||||
## Task 1: 修复 FileRecord 类的 log_meta 默认值
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/file_service.py:47`
|
||||
|
||||
**Step 1: 修改 log_meta 默认值**
|
||||
|
||||
在 `services/file_service.py` 第 47 行,将:
|
||||
|
||||
```python
|
||||
log_meta: str = "{\"lostHeader\":[],\"balanceAmount\":true}"
|
||||
```
|
||||
|
||||
改为:
|
||||
|
||||
```python
|
||||
log_meta: str = "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}"
|
||||
```
|
||||
|
||||
**Step 2: 验证修改**
|
||||
|
||||
运行: `python -c "from services.file_service import FileRecord; r = FileRecord(log_id=1, group_id=1, file_name='test.csv'); print(r.log_meta)"`
|
||||
|
||||
预期输出:
|
||||
```
|
||||
{"lostHeader":[],"balanceAmount":"-1"}
|
||||
```
|
||||
|
||||
**Step 3: 提交修复**
|
||||
|
||||
```bash
|
||||
git add services/file_service.py
|
||||
git commit -m "fix: 修复 FileRecord.log_meta 中 balanceAmount 值为字符串 '-1'"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 2: 编写测试 - 带 logId 查询返回数据
|
||||
|
||||
**Files:**
|
||||
- Modify: `tests/test_api.py`
|
||||
|
||||
**Step 1: 编写测试用例**
|
||||
|
||||
在 `tests/test_api.py` 文件末尾添加:
|
||||
|
||||
```python
|
||||
def test_get_upload_status_with_log_id():
|
||||
"""测试带 logId 参数查询返回非空 logs"""
|
||||
response = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# 验证基本响应结构
|
||||
assert data["code"] == "200"
|
||||
assert data["status"] == "200"
|
||||
assert data["successResponse"] is True
|
||||
|
||||
# 验证 logs 不为空
|
||||
assert len(data["data"]["logs"]) == 1
|
||||
|
||||
# 验证返回的 logId 正确
|
||||
log = data["data"]["logs"][0]
|
||||
assert log["logId"] == 13994
|
||||
|
||||
# 验证固定成功状态
|
||||
assert log["status"] == -5
|
||||
assert log["uploadStatusDesc"] == "data.wait.confirm.newaccount"
|
||||
|
||||
# 验证 logMeta 格式正确
|
||||
assert log["logMeta"] == "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}"
|
||||
```
|
||||
|
||||
**Step 2: 运行测试验证失败**
|
||||
|
||||
运行: `pytest tests/test_api.py::test_get_upload_status_with_log_id -v`
|
||||
|
||||
预期: FAIL(因为还未实现)
|
||||
|
||||
**Step 3: 提交测试**
|
||||
|
||||
```bash
|
||||
git add tests/test_api.py
|
||||
git commit -m "test: 添加带 logId 查询的测试用例"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 3: 编写测试 - 不带 logId 查询返回空数组
|
||||
|
||||
**Files:**
|
||||
- Modify: `tests/test_api.py`
|
||||
|
||||
**Step 1: 编写测试用例**
|
||||
|
||||
在 `tests/test_api.py` 文件末尾添加:
|
||||
|
||||
```python
|
||||
def test_get_upload_status_without_log_id():
|
||||
"""测试不带 logId 参数查询返回空 logs 数组"""
|
||||
response = client.get("/watson/api/project/bs/upload?groupId=1000")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# 验证基本响应结构
|
||||
assert data["code"] == "200"
|
||||
assert data["status"] == "200"
|
||||
assert data["successResponse"] is True
|
||||
|
||||
# 验证 logs 为空
|
||||
assert len(data["data"]["logs"]) == 0
|
||||
|
||||
# 验证其他字段存在
|
||||
assert data["data"]["status"] == ""
|
||||
assert data["data"]["accountId"] == 8954
|
||||
assert data["data"]["currency"] == "CNY"
|
||||
```
|
||||
|
||||
**Step 2: 运行测试验证失败**
|
||||
|
||||
运行: `pytest tests/test_api.py::test_get_upload_status_without_log_id -v`
|
||||
|
||||
预期: FAIL(因为还未实现)
|
||||
|
||||
**Step 3: 提交测试**
|
||||
|
||||
```bash
|
||||
git add tests/test_api.py
|
||||
git commit -m "test: 添加不带 logId 查询的测试用例"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 4: 编写测试 - 确定性数据生成
|
||||
|
||||
**Files:**
|
||||
- Modify: `tests/test_api.py`
|
||||
|
||||
**Step 1: 编写测试用例**
|
||||
|
||||
在 `tests/test_api.py` 文件末尾添加:
|
||||
|
||||
```python
|
||||
def test_deterministic_data_generation():
|
||||
"""测试相同 logId 多次查询返回相同的核心字段值"""
|
||||
# 第一次查询
|
||||
response1 = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
log1 = response1.json()["data"]["logs"][0]
|
||||
|
||||
# 第二次查询
|
||||
response2 = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
log2 = response2.json()["data"]["logs"][0]
|
||||
|
||||
# 验证关键字段相同
|
||||
assert log1["logId"] == log2["logId"]
|
||||
assert log1["bankName"] == log2["bankName"]
|
||||
assert log1["accountNoList"] == log2["accountNoList"]
|
||||
assert log1["enterpriseNameList"] == log2["enterpriseNameList"]
|
||||
assert log1["status"] == log2["status"]
|
||||
assert log1["logMeta"] == log2["logMeta"]
|
||||
assert log1["templateName"] == log2["templateName"]
|
||||
assert log1["trxDateStartId"] == log2["trxDateStartId"]
|
||||
assert log1["trxDateEndId"] == log2["trxDateEndId"]
|
||||
|
||||
def test_field_completeness():
|
||||
"""测试返回数据包含完整的 26 个字段"""
|
||||
response = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
log = response.json()["data"]["logs"][0]
|
||||
|
||||
# 验证所有必需字段存在
|
||||
required_fields = [
|
||||
"accountNoList", "bankName", "dataTypeInfo", "downloadFileName",
|
||||
"enterpriseNameList", "fileSize", "fileUploadBy", "fileUploadByUserName",
|
||||
"fileUploadTime", "isSplit", "leId", "logId", "logMeta", "logType",
|
||||
"loginLeId", "lostHeader", "realBankName", "rows", "source", "status",
|
||||
"templateName", "totalRecords", "trxDateEndId", "trxDateStartId",
|
||||
"uploadFileName", "uploadStatusDesc"
|
||||
]
|
||||
|
||||
for field in required_fields:
|
||||
assert field in log, f"缺少字段: {field}"
|
||||
```
|
||||
|
||||
**Step 2: 运行测试验证失败**
|
||||
|
||||
运行: `pytest tests/test_api.py::test_deterministic_data_generation tests/test_api.py::test_field_completeness -v`
|
||||
|
||||
预期: FAIL(因为还未实现)
|
||||
|
||||
**Step 3: 提交测试**
|
||||
|
||||
```bash
|
||||
git add tests/test_api.py
|
||||
git commit -m "test: 添加确定性和字段完整性测试用例"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 5: 实现 _generate_deterministic_record() 方法
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/file_service.py`
|
||||
|
||||
**Step 1: 在 FileService 类中添加新方法**
|
||||
|
||||
在 `services/file_service.py` 的 `FileService` 类中,在 `_delayed_parse` 方法之后(约第 200 行)添加:
|
||||
|
||||
```python
|
||||
def _generate_deterministic_record(self, log_id: int, group_id: int) -> dict:
|
||||
"""
|
||||
基于 logId 生成确定性的文件记录
|
||||
|
||||
Args:
|
||||
log_id: 文件ID(用作随机种子)
|
||||
group_id: 项目ID
|
||||
|
||||
Returns:
|
||||
文件记录字典(26个字段)
|
||||
"""
|
||||
# 银行类型选项
|
||||
bank_options = [
|
||||
("ALIPAY", "ALIPAY_T220708"),
|
||||
("BSX", "BSX_T240925"),
|
||||
("ZJRCU", "ZJRCU_T251114")
|
||||
]
|
||||
|
||||
bank_name, template_name = random.choice(bank_options)
|
||||
|
||||
# 生成交易日期范围
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=random.randint(90, 365))
|
||||
|
||||
# 生成账号和主体
|
||||
account_no = f"{random.randint(10000000000, 99999999999)}"
|
||||
enterprise_names = ["测试主体"] if random.random() > 0.3 else [""]
|
||||
|
||||
return {
|
||||
"accountNoList": [account_no],
|
||||
"bankName": bank_name,
|
||||
"dataTypeInfo": ["CSV", ","],
|
||||
"downloadFileName": f"测试文件_{log_id}.csv",
|
||||
"enterpriseNameList": enterprise_names,
|
||||
"fileSize": random.randint(10000, 100000),
|
||||
"fileUploadBy": 448,
|
||||
"fileUploadByUserName": "admin@support.com",
|
||||
"fileUploadTime": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"isSplit": 0,
|
||||
"leId": 10000 + random.randint(0, 9999),
|
||||
"logId": log_id,
|
||||
"logMeta": "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}",
|
||||
"logType": "bankstatement",
|
||||
"loginLeId": 10000 + random.randint(0, 9999),
|
||||
"lostHeader": [],
|
||||
"realBankName": bank_name,
|
||||
"rows": 0,
|
||||
"source": "http",
|
||||
"status": -5,
|
||||
"templateName": template_name,
|
||||
"totalRecords": random.randint(100, 300),
|
||||
"trxDateEndId": int(end_date.strftime("%Y%m%d")),
|
||||
"trxDateStartId": int(start_date.strftime("%Y%m%d")),
|
||||
"uploadFileName": f"测试文件_{log_id}.pdf",
|
||||
"uploadStatusDesc": "data.wait.confirm.newaccount"
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: 验证语法正确**
|
||||
|
||||
运行: `python -m py_compile services/file_service.py`
|
||||
|
||||
预期: 无输出(表示语法正确)
|
||||
|
||||
**Step 3: 提交代码**
|
||||
|
||||
```bash
|
||||
git add services/file_service.py
|
||||
git commit -m "feat: 添加 _generate_deterministic_record 方法"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 6: 重构 get_upload_status() 方法
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/file_service.py:265-300`
|
||||
|
||||
**Step 1: 替换整个 get_upload_status() 方法**
|
||||
|
||||
在 `services/file_service.py` 中,找到 `get_upload_status` 方法(约第 265-300 行),完全替换为:
|
||||
|
||||
```python
|
||||
def get_upload_status(self, group_id: int, log_id: int = None) -> dict:
|
||||
"""
|
||||
获取文件上传状态(基于 logId 生成确定性数据)
|
||||
|
||||
Args:
|
||||
group_id: 项目ID
|
||||
log_id: 文件ID(可选)
|
||||
|
||||
Returns:
|
||||
上传状态响应字典
|
||||
"""
|
||||
logs = []
|
||||
|
||||
if log_id:
|
||||
# 使用 logId 作为随机种子,确保相同 logId 返回相同数据
|
||||
random.seed(log_id)
|
||||
|
||||
# 生成确定性的文件记录
|
||||
record = self._generate_deterministic_record(log_id, group_id)
|
||||
logs.append(record)
|
||||
|
||||
# 返回响应
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"logs": logs,
|
||||
"status": "",
|
||||
"accountId": 8954,
|
||||
"currency": "CNY"
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": True
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: 验证语法正确**
|
||||
|
||||
运行: `python -m py_compile services/file_service.py`
|
||||
|
||||
预期: 无输出(表示语法正确)
|
||||
|
||||
**Step 3: 提交重构**
|
||||
|
||||
```bash
|
||||
git add services/file_service.py
|
||||
git commit -m "refactor: 重构 get_upload_status 方法实现独立数据生成"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 7: 运行所有测试验证功能
|
||||
|
||||
**Files:**
|
||||
- Test: `tests/test_api.py`
|
||||
|
||||
**Step 1: 运行新增的测试用例**
|
||||
|
||||
运行: `pytest tests/test_api.py::test_get_upload_status_with_log_id tests/test_api.py::test_get_upload_status_without_log_id tests/test_api.py::test_deterministic_data_generation tests/test_api.py::test_field_completeness -v`
|
||||
|
||||
预期: 所有测试 PASS
|
||||
|
||||
**Step 2: 运行完整的测试套件**
|
||||
|
||||
运行: `pytest tests/ -v`
|
||||
|
||||
预期: 所有测试 PASS(确保没有破坏其他功能)
|
||||
|
||||
**Step 3: 手动测试接口**
|
||||
|
||||
运行: `python main.py`(在后台启动服务器)
|
||||
|
||||
在另一个终端运行:
|
||||
```bash
|
||||
curl "http://localhost:8000/watson/api/project/bs/upload?groupId=1000&logId=13994"
|
||||
```
|
||||
|
||||
预期: 返回包含 logId=13994 的 JSON 数据
|
||||
|
||||
**Step 4: 提交验证记录**
|
||||
|
||||
```bash
|
||||
git add tests/
|
||||
git commit -m "test: 验证所有测试通过"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 8: 更新文档并提交
|
||||
|
||||
**Files:**
|
||||
- Modify: `CLAUDE.md`(可选)
|
||||
|
||||
**Step 1: 检查是否需要更新 CLAUDE.md**
|
||||
|
||||
查看项目根目录的 `CLAUDE.md` 文件,确认是否需要添加关于接口独立性的说明。如果需要,在适当位置添加:
|
||||
|
||||
```markdown
|
||||
### 接口说明
|
||||
|
||||
**获取单个文件上传状态接口 (`/watson/api/project/bs/upload`)**:
|
||||
- 此接口完全独立工作,不依赖文件上传记录
|
||||
- 基于 logId 参数生成确定性的随机数据
|
||||
- 相同 logId 每次查询返回相同的核心字段值
|
||||
```
|
||||
|
||||
**Step 2: 提交文档更新(如果有)**
|
||||
|
||||
```bash
|
||||
git add CLAUDE.md
|
||||
git commit -m "docs: 更新接口独立性说明"
|
||||
```
|
||||
|
||||
**Step 3: 最终提交**
|
||||
|
||||
确保所有修改已提交:
|
||||
|
||||
```bash
|
||||
git status
|
||||
```
|
||||
|
||||
预期: 工作目录干净
|
||||
|
||||
---
|
||||
|
||||
## 验收清单
|
||||
|
||||
实施完成后,确认以下验收标准:
|
||||
|
||||
### 功能验收
|
||||
- [x] 带 logId 参数查询返回非空 logs 数组
|
||||
- [x] 不带 logId 参数查询返回空 logs 数组
|
||||
- [x] 相同 logId 多次查询返回相同的核心字段值
|
||||
- [x] 返回数据包含完整的 26 个字段
|
||||
- [x] status 字段值为 -5
|
||||
- [x] logMeta 字段中 balanceAmount 为字符串 "-1"
|
||||
|
||||
### 质量验收
|
||||
- [x] 所有单元测试通过
|
||||
- [x] 代码符合项目编码规范
|
||||
- [x] 无语法错误和运行时错误
|
||||
- [x] API 文档(Swagger UI)正确展示接口
|
||||
|
||||
### 文档验收
|
||||
- [x] 代码注释完整清晰
|
||||
- [x] 测试用例覆盖所有场景
|
||||
|
||||
---
|
||||
|
||||
## 实施说明
|
||||
|
||||
1. **TDD 流程**: 严格遵循"先写测试 → 运行失败 → 写代码 → 运行通过 → 提交"的流程
|
||||
2. **频繁提交**: 每个小的步骤都有独立的提交,便于回滚和追踪
|
||||
3. **独立性**: 此修改不影响其他接口(上传、解析状态检查等)
|
||||
4. **确定性**: 使用 `random.seed(log_id)` 确保相同 logId 生成相同数据
|
||||
5. **简单高效**: 代码简洁,无过度设计,符合 YAGNI 原则
|
||||
|
||||
---
|
||||
|
||||
## 参考资料
|
||||
|
||||
- 设计文档: `docs/plans/2026-03-12-upload-status-api-design.md`
|
||||
- 接口文档: `assets/兰溪-流水分析对接3.md` 第374-516行
|
||||
- 当前实现: `services/file_service.py` 第265-300行
|
||||
@@ -129,6 +129,8 @@ class BankStatementItem(BaseModel):
|
||||
cashType: str = Field("1", description="现金类型")
|
||||
commentsNum: int = Field(0, description="评论数")
|
||||
crAmount: float = Field(0, description="贷方金额")
|
||||
createDate: str = Field(..., description="创建日期")
|
||||
createdBy: str = Field("902001", description="创建人")
|
||||
cretNo: str = Field(..., description="证件号")
|
||||
currency: str = Field("CNY", description="币种")
|
||||
customerAccountMaskNo: str = Field(..., description="客户账号")
|
||||
@@ -158,6 +160,7 @@ class BankStatementItem(BaseModel):
|
||||
transfromBalanceAmount: int = Field(0, description="转换余额")
|
||||
trxBalance: int = Field(0, description="交易余额")
|
||||
trxDate: str = Field(..., description="交易日期")
|
||||
uploadSequnceNumber: int = Field(..., description="上传序列号")
|
||||
userMemo: str = Field(..., description="用户备注")
|
||||
|
||||
|
||||
|
||||
@@ -5,4 +5,4 @@ pydantic-settings==2.1.0
|
||||
python-multipart==0.0.6
|
||||
pytest>=7.0.0
|
||||
pytest-cov>=4.0.0
|
||||
httpx==0.27.2
|
||||
httpx>=0.25.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from fastapi import APIRouter, BackgroundTasks, UploadFile, File, Form
|
||||
from fastapi import APIRouter, BackgroundTasks, UploadFile, File, Form, Query
|
||||
from services.token_service import TokenService
|
||||
from services.file_service import FileService
|
||||
from services.statement_service import StatementService
|
||||
@@ -70,13 +70,13 @@ async def get_token(
|
||||
async def upload_file(
|
||||
background_tasks: BackgroundTasks,
|
||||
groupId: int = Form(..., description="项目ID"),
|
||||
file: UploadFile = File(..., description="流水文件"),
|
||||
files: UploadFile = File(..., description="流水文件"),
|
||||
):
|
||||
"""上传流水文件
|
||||
|
||||
文件将立即返回,并在后台延迟4秒完成解析
|
||||
"""
|
||||
return await file_service.upload_file(groupId, file, background_tasks)
|
||||
return await file_service.upload_file(groupId, files, background_tasks)
|
||||
|
||||
|
||||
# ==================== 接口3:拉取行内流水 ====================
|
||||
@@ -127,7 +127,20 @@ async def check_parse_status(
|
||||
return file_service.check_parse_status(groupId, inprogressList)
|
||||
|
||||
|
||||
# ==================== 接口5:删除文件 ====================
|
||||
# ==================== 接口5:获取文件上传状态 ====================
|
||||
@router.get("/watson/api/project/bs/upload")
|
||||
async def get_upload_status(
|
||||
groupId: int = Query(..., description="项目id"),
|
||||
logId: Optional[int] = Query(None, description="文件id"),
|
||||
):
|
||||
"""获取单个文件上传后的状态
|
||||
|
||||
如果不提供 logId,返回该项目的所有文件状态
|
||||
"""
|
||||
return file_service.get_upload_status(groupId, logId)
|
||||
|
||||
|
||||
# ==================== 接口6:删除文件 ====================
|
||||
@router.post("/watson/api/project/batchDeleteUploadFile")
|
||||
async def delete_files(
|
||||
groupId: int = Form(..., description="项目id"),
|
||||
|
||||
@@ -2,18 +2,78 @@ from fastapi import BackgroundTasks, UploadFile
|
||||
from utils.response_builder import ResponseBuilder
|
||||
from config.settings import settings
|
||||
from typing import Dict, List, Union
|
||||
from dataclasses import dataclass, field
|
||||
import time
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
import random
|
||||
import uuid
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileRecord:
|
||||
"""文件记录模型(扩展版)"""
|
||||
# 原有字段
|
||||
log_id: int
|
||||
group_id: int
|
||||
file_name: str
|
||||
status: int = -5 # -5 表示解析成功
|
||||
upload_status_desc: str = "data.wait.confirm.newaccount"
|
||||
parsing: bool = True # True表示正在解析
|
||||
|
||||
# 新增字段 - 账号和主体信息
|
||||
account_no_list: List[str] = field(default_factory=list)
|
||||
enterprise_name_list: List[str] = field(default_factory=list)
|
||||
|
||||
# 新增字段 - 银行和模板信息
|
||||
bank_name: str = "ZJRCU"
|
||||
real_bank_name: str = "ZJRCU"
|
||||
template_name: str = "ZJRCU_T251114"
|
||||
data_type_info: List[str] = field(default_factory=lambda: ["CSV", ","])
|
||||
|
||||
# 新增字段 - 文件元数据
|
||||
file_size: int = 50000
|
||||
download_file_name: str = ""
|
||||
file_package_id: str = field(default_factory=lambda: str(uuid.uuid4()).replace('-', ''))
|
||||
|
||||
# 新增字段 - 上传用户信息
|
||||
file_upload_by: int = 448
|
||||
file_upload_by_user_name: str = "admin@support.com"
|
||||
file_upload_time: str = field(default_factory=lambda: datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
||||
|
||||
# 新增字段 - 法律实体信息
|
||||
le_id: int = 10000
|
||||
login_le_id: int = 10000
|
||||
log_type: str = "bankstatement"
|
||||
log_meta: str = "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}"
|
||||
lost_header: List[str] = field(default_factory=list)
|
||||
|
||||
# 新增字段 - 记录统计
|
||||
rows: int = 0
|
||||
source: str = "http"
|
||||
total_records: int = 150
|
||||
is_split: int = 0
|
||||
|
||||
# 新增字段 - 交易日期范围
|
||||
trx_date_start_id: int = 20240101
|
||||
trx_date_end_id: int = 20241231
|
||||
|
||||
|
||||
class FileService:
|
||||
"""文件上传和解析服务"""
|
||||
|
||||
def __init__(self):
|
||||
self.file_records = {} # logId -> record
|
||||
self.parsing_status = {} # logId -> is_parsing
|
||||
self.file_records: Dict[int, FileRecord] = {} # logId -> FileRecord
|
||||
self.log_counter = settings.INITIAL_LOG_ID
|
||||
|
||||
def _infer_bank_name(self, filename: str) -> tuple:
|
||||
"""根据文件名推断银行名称和模板名称"""
|
||||
if "支付宝" in filename or "alipay" in filename.lower():
|
||||
return "ALIPAY", "ALIPAY_T220708"
|
||||
elif "绍兴银行" in filename or "BSX" in filename:
|
||||
return "BSX", "BSX_T240925"
|
||||
else:
|
||||
return "ZJRCU", "ZJRCU_T251114"
|
||||
|
||||
async def upload_file(
|
||||
self, group_id: int, file: UploadFile, background_tasks: BackgroundTasks
|
||||
) -> Dict:
|
||||
@@ -31,51 +91,199 @@ class FileService:
|
||||
self.log_counter += 1
|
||||
log_id = self.log_counter
|
||||
|
||||
# 获取当前时间
|
||||
upload_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
# 推断银行信息
|
||||
bank_name, template_name = self._infer_bank_name(file.filename)
|
||||
|
||||
# 立即存储文件记录(初始状态:解析中)
|
||||
self.file_records[log_id] = {
|
||||
"logId": log_id,
|
||||
"groupId": group_id,
|
||||
"status": -5,
|
||||
"uploadStatusDesc": "parsing",
|
||||
"uploadFileName": file.filename,
|
||||
"fileSize": 0, # 简化处理
|
||||
"bankName": "MOCK",
|
||||
"uploadTime": upload_time,
|
||||
}
|
||||
# 生成合理的交易日期范围
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=random.randint(90, 365))
|
||||
trx_date_start_id = int(start_date.strftime("%Y%m%d"))
|
||||
trx_date_end_id = int(end_date.strftime("%Y%m%d"))
|
||||
|
||||
# 标记为解析中
|
||||
self.parsing_status[log_id] = True
|
||||
# 生成随机账号和主体
|
||||
account_no = f"{random.randint(10000000000, 99999999999)}"
|
||||
enterprise_names = ["测试主体"] if random.random() > 0.3 else [""]
|
||||
|
||||
# 启动后台任务,延迟解析
|
||||
background_tasks.add_task(
|
||||
self._simulate_parsing, log_id, settings.PARSE_DELAY_SECONDS
|
||||
# 创建完整的文件记录
|
||||
file_record = FileRecord(
|
||||
log_id=log_id,
|
||||
group_id=group_id,
|
||||
file_name=file.filename,
|
||||
download_file_name=file.filename,
|
||||
bank_name=bank_name,
|
||||
real_bank_name=bank_name,
|
||||
template_name=template_name,
|
||||
account_no_list=[account_no],
|
||||
enterprise_name_list=enterprise_names,
|
||||
le_id=10000 + random.randint(0, 9999),
|
||||
login_le_id=10000 + random.randint(0, 9999),
|
||||
file_size=random.randint(10000, 100000),
|
||||
total_records=random.randint(100, 300),
|
||||
trx_date_start_id=trx_date_start_id,
|
||||
trx_date_end_id=trx_date_end_id,
|
||||
parsing=True,
|
||||
status=-5
|
||||
)
|
||||
|
||||
# 存储记录
|
||||
self.file_records[log_id] = file_record
|
||||
|
||||
# 添加后台任务(延迟解析)
|
||||
background_tasks.add_task(self._delayed_parse, log_id)
|
||||
|
||||
# 构建响应
|
||||
response = ResponseBuilder.build_success_response(
|
||||
"upload", log_id=log_id, upload_time=upload_time
|
||||
)
|
||||
return self._build_upload_response(file_record)
|
||||
|
||||
return response
|
||||
def _build_upload_response(self, file_record: FileRecord) -> dict:
|
||||
"""构建上传接口的完整响应"""
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"accountsOfLog": {
|
||||
str(file_record.log_id): [
|
||||
{
|
||||
"bank": file_record.bank_name,
|
||||
"accountName": file_record.enterprise_name_list[0] if file_record.enterprise_name_list else "",
|
||||
"accountNo": file_record.account_no_list[0] if file_record.account_no_list else "",
|
||||
"currency": "CNY"
|
||||
}
|
||||
]
|
||||
},
|
||||
"uploadLogList": [
|
||||
{
|
||||
"accountNoList": file_record.account_no_list,
|
||||
"bankName": file_record.bank_name,
|
||||
"dataTypeInfo": file_record.data_type_info,
|
||||
"downloadFileName": file_record.download_file_name,
|
||||
"enterpriseNameList": file_record.enterprise_name_list,
|
||||
"filePackageId": file_record.file_package_id,
|
||||
"fileSize": file_record.file_size,
|
||||
"fileUploadBy": file_record.file_upload_by,
|
||||
"fileUploadByUserName": file_record.file_upload_by_user_name,
|
||||
"fileUploadTime": file_record.file_upload_time,
|
||||
"leId": file_record.le_id,
|
||||
"logId": file_record.log_id,
|
||||
"logMeta": file_record.log_meta,
|
||||
"logType": file_record.log_type,
|
||||
"loginLeId": file_record.login_le_id,
|
||||
"lostHeader": file_record.lost_header,
|
||||
"realBankName": file_record.real_bank_name,
|
||||
"rows": file_record.rows,
|
||||
"source": file_record.source,
|
||||
"status": file_record.status,
|
||||
"templateName": file_record.template_name,
|
||||
"totalRecords": file_record.total_records,
|
||||
"trxDateEndId": file_record.trx_date_end_id,
|
||||
"trxDateStartId": file_record.trx_date_start_id,
|
||||
"uploadFileName": file_record.file_name,
|
||||
"uploadStatusDesc": file_record.upload_status_desc
|
||||
}
|
||||
],
|
||||
"uploadStatus": 1
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": True
|
||||
}
|
||||
|
||||
def _simulate_parsing(self, log_id: int, delay_seconds: int):
|
||||
def _delayed_parse(self, log_id: int):
|
||||
"""后台任务:模拟文件解析过程
|
||||
|
||||
Args:
|
||||
log_id: 日志ID
|
||||
delay_seconds: 延迟秒数
|
||||
"""
|
||||
time.sleep(delay_seconds)
|
||||
time.sleep(settings.PARSE_DELAY_SECONDS)
|
||||
|
||||
# 解析完成,更新状态
|
||||
if log_id in self.file_records:
|
||||
self.file_records[log_id]["uploadStatusDesc"] = (
|
||||
"data.wait.confirm.newaccount"
|
||||
)
|
||||
self.parsing_status[log_id] = False
|
||||
self.file_records[log_id].parsing = False
|
||||
|
||||
def _generate_deterministic_record(self, log_id: int, group_id: int) -> dict:
|
||||
"""
|
||||
基于 logId 生成确定性的文件记录
|
||||
|
||||
Args:
|
||||
log_id: 文件ID(用作随机种子)
|
||||
group_id: 项目ID
|
||||
|
||||
Returns:
|
||||
文件记录字典(26个字段)
|
||||
"""
|
||||
# 银行类型选项
|
||||
bank_options = [
|
||||
("ALIPAY", "ALIPAY_T220708"),
|
||||
("BSX", "BSX_T240925"),
|
||||
("ZJRCU", "ZJRCU_T251114")
|
||||
]
|
||||
|
||||
bank_name, template_name = random.choice(bank_options)
|
||||
|
||||
# 生成交易日期范围
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=random.randint(90, 365))
|
||||
|
||||
# 生成账号和主体
|
||||
account_no = f"{random.randint(10000000000, 99999999999)}"
|
||||
enterprise_names = ["测试主体"] if random.random() > 0.3 else [""]
|
||||
|
||||
return {
|
||||
"accountNoList": [account_no],
|
||||
"bankName": bank_name,
|
||||
"dataTypeInfo": ["CSV", ","],
|
||||
"downloadFileName": f"测试文件_{log_id}.csv",
|
||||
"enterpriseNameList": enterprise_names,
|
||||
"fileSize": random.randint(10000, 100000),
|
||||
"fileUploadBy": 448,
|
||||
"fileUploadByUserName": "admin@support.com",
|
||||
"fileUploadTime": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"isSplit": 0,
|
||||
"leId": 10000 + random.randint(0, 9999),
|
||||
"logId": log_id,
|
||||
"logMeta": "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}",
|
||||
"logType": "bankstatement",
|
||||
"loginLeId": 10000 + random.randint(0, 9999),
|
||||
"lostHeader": [],
|
||||
"realBankName": bank_name,
|
||||
"rows": 0,
|
||||
"source": "http",
|
||||
"status": -5,
|
||||
"templateName": template_name,
|
||||
"totalRecords": random.randint(100, 300),
|
||||
"trxDateEndId": int(end_date.strftime("%Y%m%d")),
|
||||
"trxDateStartId": int(start_date.strftime("%Y%m%d")),
|
||||
"uploadFileName": f"测试文件_{log_id}.pdf",
|
||||
"uploadStatusDesc": "data.wait.confirm.newaccount"
|
||||
}
|
||||
|
||||
def _build_log_detail(self, record: FileRecord) -> dict:
|
||||
"""构建日志详情对象"""
|
||||
return {
|
||||
"accountNoList": record.account_no_list,
|
||||
"bankName": record.bank_name,
|
||||
"dataTypeInfo": record.data_type_info,
|
||||
"downloadFileName": record.download_file_name,
|
||||
"enterpriseNameList": record.enterprise_name_list,
|
||||
"fileSize": record.file_size,
|
||||
"fileUploadBy": record.file_upload_by,
|
||||
"fileUploadByUserName": record.file_upload_by_user_name,
|
||||
"fileUploadTime": record.file_upload_time,
|
||||
"isSplit": record.is_split,
|
||||
"leId": record.le_id,
|
||||
"logId": record.log_id,
|
||||
"logMeta": record.log_meta,
|
||||
"logType": record.log_type,
|
||||
"loginLeId": record.login_le_id,
|
||||
"lostHeader": record.lost_header,
|
||||
"realBankName": record.real_bank_name,
|
||||
"rows": record.rows,
|
||||
"source": record.source,
|
||||
"status": record.status,
|
||||
"templateName": record.template_name,
|
||||
"totalRecords": record.total_records,
|
||||
"trxDateEndId": record.trx_date_end_id,
|
||||
"trxDateStartId": record.trx_date_start_id,
|
||||
"uploadFileName": record.file_name,
|
||||
"uploadStatusDesc": record.upload_status_desc
|
||||
}
|
||||
|
||||
def check_parse_status(self, group_id: int, inprogress_list: str) -> Dict:
|
||||
"""检查文件解析状态
|
||||
@@ -90,23 +298,59 @@ class FileService:
|
||||
# 解析logId列表
|
||||
log_ids = [int(x.strip()) for x in inprogress_list.split(",") if x.strip()]
|
||||
|
||||
# 检查是否还在解析中
|
||||
is_parsing = any(
|
||||
self.parsing_status.get(log_id, False) for log_id in log_ids
|
||||
)
|
||||
pending_list = []
|
||||
all_parsing_complete = True
|
||||
|
||||
# 获取待处理列表
|
||||
pending_list = [
|
||||
self.file_records[log_id]
|
||||
for log_id in log_ids
|
||||
if log_id in self.file_records
|
||||
]
|
||||
for log_id in log_ids:
|
||||
if log_id in self.file_records:
|
||||
record = self.file_records[log_id]
|
||||
if record.parsing:
|
||||
all_parsing_complete = False
|
||||
|
||||
pending_list.append(self._build_log_detail(record))
|
||||
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {"parsing": is_parsing, "pendingList": pending_list},
|
||||
"data": {
|
||||
"parsing": not all_parsing_complete,
|
||||
"pendingList": pending_list
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": True,
|
||||
"successResponse": True
|
||||
}
|
||||
|
||||
def get_upload_status(self, group_id: int, log_id: int = None) -> dict:
|
||||
"""
|
||||
获取文件上传状态(基于 logId 生成确定性数据)
|
||||
|
||||
Args:
|
||||
group_id: 项目ID
|
||||
log_id: 文件ID(可选)
|
||||
|
||||
Returns:
|
||||
上传状态响应字典
|
||||
"""
|
||||
logs = []
|
||||
|
||||
if log_id:
|
||||
# 使用 logId 作为随机种子,确保相同 logId 返回相同数据
|
||||
random.seed(log_id)
|
||||
|
||||
# 生成确定性的文件记录
|
||||
record = self._generate_deterministic_record(log_id, group_id)
|
||||
logs.append(record)
|
||||
|
||||
# 返回响应
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {
|
||||
"logs": logs,
|
||||
"status": "",
|
||||
"accountId": 8954,
|
||||
"currency": "CNY"
|
||||
},
|
||||
"status": "200",
|
||||
"successResponse": True
|
||||
}
|
||||
|
||||
def delete_files(self, group_id: int, log_ids: List[int], user_id: int) -> Dict:
|
||||
@@ -121,30 +365,38 @@ class FileService:
|
||||
删除响应字典
|
||||
"""
|
||||
# 删除文件记录
|
||||
deleted_count = 0
|
||||
for log_id in log_ids:
|
||||
self.file_records.pop(log_id, None)
|
||||
self.parsing_status.pop(log_id, None)
|
||||
if log_id in self.file_records:
|
||||
del self.file_records[log_id]
|
||||
deleted_count += 1
|
||||
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {"message": "delete.files.success"},
|
||||
"code": "200 OK", # 注意:这里是 "200 OK" 不是 "200"
|
||||
"data": {
|
||||
"message": "delete.files.success"
|
||||
},
|
||||
"message": "delete.files.success",
|
||||
"status": "200",
|
||||
"successResponse": True,
|
||||
"successResponse": True
|
||||
}
|
||||
|
||||
def fetch_inner_flow(self, request: Union[Dict, object]) -> Dict:
|
||||
"""拉取行内流水(模拟无数据场景)
|
||||
"""拉取行内流水(返回随机logId)
|
||||
|
||||
Args:
|
||||
request: 拉取流水请求(可以是字典或对象)
|
||||
request: 拉取流水请求(保留参数以符合接口规范,当前Mock实现不使用)
|
||||
|
||||
Returns:
|
||||
流水响应字典
|
||||
流水响应字典,包含随机生成的logId数组
|
||||
"""
|
||||
# 模拟无行内流水文件场景
|
||||
# 随机生成一个logId(范围:10000-99999)
|
||||
log_id = random.randint(10000, 99999)
|
||||
|
||||
# 返回成功的响应,包含logId数组
|
||||
return {
|
||||
"code": "200",
|
||||
"data": {"code": "501014", "message": "无行内流水文件"},
|
||||
"data": [log_id],
|
||||
"status": "200",
|
||||
"successResponse": True,
|
||||
}
|
||||
|
||||
@@ -1,10 +1,147 @@
|
||||
from utils.response_builder import ResponseBuilder
|
||||
from typing import Dict, Union
|
||||
from typing import Dict, Union, List
|
||||
import random
|
||||
from datetime import datetime, timedelta
|
||||
import uuid
|
||||
import logging
|
||||
|
||||
# 配置日志
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StatementService:
|
||||
"""流水数据服务"""
|
||||
|
||||
def __init__(self):
|
||||
# 缓存:logId -> (statements_list, total_count)
|
||||
self._cache: Dict[int, tuple] = {}
|
||||
# 配置日志级别为 INFO
|
||||
logger.info(f"StatementService initialized with empty cache")
|
||||
|
||||
def _generate_random_statement(self, index: int, group_id: int, log_id: int) -> Dict:
|
||||
"""生成单条随机流水记录
|
||||
|
||||
Args:
|
||||
index: 流水序号
|
||||
group_id: 项目ID
|
||||
log_id: 文件ID
|
||||
|
||||
Returns:
|
||||
单条流水记录字典
|
||||
"""
|
||||
# 随机生成交易日期(最近1年内)
|
||||
days_ago = random.randint(0, 365)
|
||||
trx_datetime = datetime.now() - timedelta(days=days_ago)
|
||||
trx_date = trx_datetime.strftime("%Y-%m-%d %H:%M:%S")
|
||||
accounting_date = trx_datetime.strftime("%Y-%m-%d")
|
||||
accounting_date_id = int(trx_datetime.strftime("%Y%m%d"))
|
||||
|
||||
# 生成创建日期(格式:YYYY-MM-DD HH:MM:SS)
|
||||
create_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# 随机生成交易金额
|
||||
trans_amount = round(random.uniform(10, 10000), 2)
|
||||
|
||||
# 随机决定是收入还是支出
|
||||
if random.random() > 0.5:
|
||||
# 支出
|
||||
dr_amount = trans_amount
|
||||
cr_amount = 0
|
||||
trans_flag = "P"
|
||||
else:
|
||||
# 收入
|
||||
cr_amount = trans_amount
|
||||
dr_amount = 0
|
||||
trans_flag = "R"
|
||||
|
||||
# 随机余额
|
||||
balance_amount = round(random.uniform(1000, 50000), 2)
|
||||
|
||||
# 随机客户信息
|
||||
customers = ["小店", "支付宝", "微信支付", "财付通", "美团", "京东", "淘宝", "银行转账"]
|
||||
customer_name = random.choice(customers)
|
||||
customer_account = str(random.randint(100000000, 999999999))
|
||||
|
||||
# 随机交易描述
|
||||
memos = [
|
||||
f"消费_{customer_name}",
|
||||
f"转账_{customer_name}",
|
||||
f"收款_{customer_name}",
|
||||
f"支付_{customer_name}",
|
||||
f"退款_{customer_name}",
|
||||
]
|
||||
user_memo = random.choice(memos)
|
||||
|
||||
return {
|
||||
"accountId": 0,
|
||||
"accountMaskNo": f"{random.randint(100000000000000, 999999999999999)}",
|
||||
"accountingDate": accounting_date,
|
||||
"accountingDateId": accounting_date_id,
|
||||
"archivingFlag": 0,
|
||||
"attachments": 0,
|
||||
"balanceAmount": balance_amount,
|
||||
"bank": "ZJRCU",
|
||||
"bankComments": "",
|
||||
"bankStatementId": 12847662 + index,
|
||||
"bankTrxNumber": uuid.uuid4().hex,
|
||||
"batchId": log_id,
|
||||
"cashType": "1",
|
||||
"commentsNum": 0,
|
||||
"crAmount": cr_amount,
|
||||
"createDate": create_date,
|
||||
"createdBy": "902001",
|
||||
"cretNo": "230902199012261247",
|
||||
"currency": "CNY",
|
||||
"customerAccountMaskNo": customer_account,
|
||||
"customerBank": "",
|
||||
"customerId": -1,
|
||||
"customerName": customer_name,
|
||||
"customerReference": "",
|
||||
"downPaymentFlag": 0,
|
||||
"drAmount": dr_amount,
|
||||
"exceptionType": "",
|
||||
"groupId": group_id,
|
||||
"internalFlag": 0,
|
||||
"leId": 16308,
|
||||
"leName": "张传伟",
|
||||
"overrideBsId": 0,
|
||||
"paymentMethod": "",
|
||||
"sourceCatalogId": 0,
|
||||
"split": 0,
|
||||
"subBankstatementId": 0,
|
||||
"toDoFlag": 0,
|
||||
"transAmount": trans_amount,
|
||||
"transFlag": trans_flag,
|
||||
"transTypeId": 0,
|
||||
"transformAmount": 0,
|
||||
"transformCrAmount": 0,
|
||||
"transformDrAmount": 0,
|
||||
"transfromBalanceAmount": 0,
|
||||
"trxBalance": 0,
|
||||
"trxDate": trx_date,
|
||||
"uploadSequnceNumber": index + 1,
|
||||
"userMemo": user_memo
|
||||
}
|
||||
|
||||
|
||||
|
||||
def _generate_statements(self, group_id: int, log_id: int, count: int) -> List[Dict]:
|
||||
"""生成指定数量的流水记录
|
||||
|
||||
Args:
|
||||
group_id: 项目ID
|
||||
log_id: 文件ID
|
||||
count: 生成数量
|
||||
|
||||
Returns:
|
||||
流水记录列表
|
||||
"""
|
||||
statements = []
|
||||
for i in range(count):
|
||||
statements.append(self._generate_random_statement(i, group_id, log_id))
|
||||
return statements
|
||||
|
||||
def get_bank_statement(self, request: Union[Dict, object]) -> Dict:
|
||||
"""获取银行流水列表
|
||||
|
||||
@@ -16,21 +153,32 @@ class StatementService:
|
||||
"""
|
||||
# 支持 dict 或对象
|
||||
if isinstance(request, dict):
|
||||
group_id = request.get("groupId", 1000)
|
||||
log_id = request.get("logId", 10000)
|
||||
page_now = request.get("pageNow", 1)
|
||||
page_size = request.get("pageSize", 10)
|
||||
else:
|
||||
group_id = request.groupId
|
||||
log_id = request.logId
|
||||
page_now = request.pageNow
|
||||
page_size = request.pageSize
|
||||
|
||||
# 加载模板
|
||||
template = ResponseBuilder.load_template("bank_statement")
|
||||
statements = template["success_response"]["data"]["bankStatementList"]
|
||||
total_count = len(statements)
|
||||
# 检查缓存中是否已有该logId的数据
|
||||
if log_id not in self._cache:
|
||||
# 随机生成总条数(1200-1500之间)
|
||||
total_count = random.randint(1200, 1500)
|
||||
# 生成所有流水记录
|
||||
all_statements = self._generate_statements(group_id, log_id, total_count)
|
||||
# 存入缓存
|
||||
self._cache[log_id] = (all_statements, total_count)
|
||||
|
||||
# 从缓存获取数据
|
||||
all_statements, total_count = self._cache[log_id]
|
||||
|
||||
# 模拟分页
|
||||
start = (page_now - 1) * page_size
|
||||
end = start + page_size
|
||||
page_data = statements[start:end]
|
||||
page_data = all_statements[start:end]
|
||||
|
||||
return {
|
||||
"code": "200",
|
||||
|
||||
@@ -32,3 +32,17 @@ def sample_token_request():
|
||||
"orgCode": "902000",
|
||||
"departmentCode": "902000",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_inner_flow_request():
|
||||
"""示例拉取行内流水请求"""
|
||||
return {
|
||||
"groupId": 1001,
|
||||
"customerNo": "test_customer_001",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
|
||||
@@ -48,3 +48,129 @@ def test_get_token_error_40101(client):
|
||||
data = response.json()
|
||||
assert data["code"] == "40101"
|
||||
assert data["successResponse"] == False
|
||||
|
||||
|
||||
def test_fetch_inner_flow_success(client, sample_inner_flow_request):
|
||||
"""测试拉取行内流水 - 成功场景"""
|
||||
response = client.post(
|
||||
"/watson/api/project/getJZFileOrZjrcuFile",
|
||||
data=sample_inner_flow_request
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["code"] == "200"
|
||||
assert data["successResponse"] == True
|
||||
assert isinstance(data["data"], list)
|
||||
assert len(data["data"]) == 1
|
||||
assert isinstance(data["data"][0], int)
|
||||
assert 10000 <= data["data"][0] <= 99999
|
||||
|
||||
|
||||
def test_fetch_inner_flow_error_501014(client):
|
||||
"""测试拉取行内流水 - 错误场景 501014"""
|
||||
request_data = {
|
||||
"groupId": 1001,
|
||||
"customerNo": "test_error_501014",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
response = client.post(
|
||||
"/watson/api/project/getJZFileOrZjrcuFile",
|
||||
data=request_data
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["code"] == "501014"
|
||||
assert data["successResponse"] == False
|
||||
|
||||
|
||||
def test_get_upload_status_with_log_id(client):
|
||||
"""测试带 logId 参数查询返回非空 logs"""
|
||||
response = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# 验证基本响应结构
|
||||
assert data["code"] == "200"
|
||||
assert data["status"] == "200"
|
||||
assert data["successResponse"] is True
|
||||
|
||||
# 验证 logs 不为空
|
||||
assert len(data["data"]["logs"]) == 1
|
||||
|
||||
# 验证返回的 logId 正确
|
||||
log = data["data"]["logs"][0]
|
||||
assert log["logId"] == 13994
|
||||
|
||||
# 验证固定成功状态
|
||||
assert log["status"] == -5
|
||||
assert log["uploadStatusDesc"] == "data.wait.confirm.newaccount"
|
||||
|
||||
# 验证 logMeta 格式正确
|
||||
assert log["logMeta"] == "{\"lostHeader\":[],\"balanceAmount\":\"-1\"}"
|
||||
|
||||
|
||||
def test_get_upload_status_without_log_id(client):
|
||||
"""测试不带 logId 参数查询返回空 logs 数组"""
|
||||
response = client.get("/watson/api/project/bs/upload?groupId=1000")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# 验证基本响应结构
|
||||
assert data["code"] == "200"
|
||||
assert data["status"] == "200"
|
||||
assert data["successResponse"] is True
|
||||
|
||||
# 验证 logs 为空
|
||||
assert len(data["data"]["logs"]) == 0
|
||||
|
||||
# 验证其他字段存在
|
||||
assert data["data"]["status"] == ""
|
||||
assert data["data"]["accountId"] == 8954
|
||||
assert data["data"]["currency"] == "CNY"
|
||||
|
||||
|
||||
def test_deterministic_data_generation(client):
|
||||
"""测试相同 logId 多次查询返回相同的核心字段值"""
|
||||
# 第一次查询
|
||||
response1 = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
log1 = response1.json()["data"]["logs"][0]
|
||||
|
||||
# 第二次查询
|
||||
response2 = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
log2 = response2.json()["data"]["logs"][0]
|
||||
|
||||
# 验证关键字段相同
|
||||
assert log1["logId"] == log2["logId"]
|
||||
assert log1["bankName"] == log2["bankName"]
|
||||
assert log1["accountNoList"] == log2["accountNoList"]
|
||||
assert log1["enterpriseNameList"] == log2["enterpriseNameList"]
|
||||
assert log1["status"] == log2["status"]
|
||||
assert log1["logMeta"] == log2["logMeta"]
|
||||
assert log1["templateName"] == log2["templateName"]
|
||||
assert log1["trxDateStartId"] == log2["trxDateStartId"]
|
||||
assert log1["trxDateEndId"] == log2["trxDateEndId"]
|
||||
|
||||
|
||||
def test_field_completeness(client):
|
||||
"""测试返回数据包含完整的 26 个字段"""
|
||||
response = client.get("/watson/api/project/bs/upload?groupId=1000&logId=13994")
|
||||
log = response.json()["data"]["logs"][0]
|
||||
|
||||
# 验证所有必需字段存在
|
||||
required_fields = [
|
||||
"accountNoList", "bankName", "dataTypeInfo", "downloadFileName",
|
||||
"enterpriseNameList", "fileSize", "fileUploadBy", "fileUploadByUserName",
|
||||
"fileUploadTime", "isSplit", "leId", "logId", "logMeta", "logType",
|
||||
"loginLeId", "lostHeader", "realBankName", "rows", "source", "status",
|
||||
"templateName", "totalRecords", "trxDateEndId", "trxDateStartId",
|
||||
"uploadFileName", "uploadStatusDesc"
|
||||
]
|
||||
|
||||
for field in required_fields:
|
||||
assert field in log, f"缺少字段: {field}"
|
||||
|
||||
@@ -7,6 +7,7 @@ class ErrorSimulator:
|
||||
|
||||
# 错误码映射表
|
||||
ERROR_CODES = {
|
||||
"40100": {"code": "40100", "message": "未知异常"},
|
||||
"40101": {"code": "40101", "message": "appId错误"},
|
||||
"40102": {"code": "40102", "message": "appSecretCode错误"},
|
||||
"40104": {"code": "40104", "message": "可使用项目次数为0,无法创建项目"},
|
||||
|
||||
109
lsfx-mock-server/verify_implementation.py
Normal file
109
lsfx-mock-server/verify_implementation.py
Normal file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""验证所有7个接口是否正常工作"""
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# 添加项目根目录到 Python 路径
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
|
||||
def test_interfaces():
|
||||
"""测试所有接口"""
|
||||
from services.token_service import TokenService
|
||||
from services.file_service import FileService
|
||||
from services.statement_service import StatementService
|
||||
from utils.error_simulator import ErrorSimulator
|
||||
|
||||
print("=" * 60)
|
||||
print("Interface Alignment Verification Test")
|
||||
print("=" * 60)
|
||||
|
||||
# 1. 验证 TokenService
|
||||
print("\n[1/6] TokenService initialization...")
|
||||
token_svc = TokenService()
|
||||
print(" [OK] TokenService initialized")
|
||||
|
||||
# 2. 验证 FileService
|
||||
print("\n[2/6] FileService initialization...")
|
||||
file_svc = FileService()
|
||||
print(" [OK] FileService initialized")
|
||||
|
||||
# 3. 验证 StatementService
|
||||
print("\n[3/6] StatementService initialization...")
|
||||
stmt_svc = StatementService()
|
||||
print(" [OK] StatementService initialized")
|
||||
|
||||
# 4. 验证错误码
|
||||
print("\n[4/6] Error codes verification...")
|
||||
assert "40100" in ErrorSimulator.ERROR_CODES, "Error code 40100 not found"
|
||||
assert ErrorSimulator.ERROR_CODES["40100"]["message"] == "未知异常", "Error message incorrect"
|
||||
print(" [OK] Error code 40100 added")
|
||||
|
||||
# 5. 验证响应模板文件
|
||||
print("\n[5/6] Response template files verification...")
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
responses_dir = Path("config/responses")
|
||||
|
||||
# 检查 token.json
|
||||
with open(responses_dir / "token.json", encoding='utf-8') as f:
|
||||
token_data = json.load(f)
|
||||
assert isinstance(token_data["success_response"]["data"]["analysisType"], int), "analysisType should be integer"
|
||||
print(" [OK] token.json format correct (analysisType is integer)")
|
||||
|
||||
# 检查 upload_status.json
|
||||
assert (responses_dir / "upload_status.json").exists(), "upload_status.json not found"
|
||||
print(" [OK] upload_status.json created")
|
||||
|
||||
# 检查 bank_statement.json
|
||||
with open(responses_dir / "bank_statement.json", encoding='utf-8') as f:
|
||||
stmt_data = json.load(f)
|
||||
assert len(stmt_data["success_response"]["data"]["bankStatementList"]) > 0, "bankStatementList is empty"
|
||||
print(" [OK] bank_statement.json format correct")
|
||||
|
||||
# 6. 验证 FileRecord 字段
|
||||
print("\n[6/6] FileRecord fields verification...")
|
||||
from services.file_service import FileRecord
|
||||
|
||||
record = FileRecord(
|
||||
log_id=10001,
|
||||
group_id=1000,
|
||||
file_name="test.csv"
|
||||
)
|
||||
|
||||
# 检查所有必需字段是否存在
|
||||
required_fields = [
|
||||
'account_no_list', 'enterprise_name_list', 'bank_name', 'real_bank_name',
|
||||
'template_name', 'data_type_info', 'file_size', 'download_file_name',
|
||||
'file_package_id', 'file_upload_by', 'file_upload_by_user_name',
|
||||
'file_upload_time', 'le_id', 'login_le_id', 'log_type', 'log_meta',
|
||||
'lost_header', 'rows', 'source', 'total_records', 'is_split',
|
||||
'trx_date_start_id', 'trx_date_end_id'
|
||||
]
|
||||
|
||||
for field in required_fields:
|
||||
assert hasattr(record, field), f"FileRecord missing field: {field}"
|
||||
|
||||
print(" [OK] FileRecord contains all {} required fields".format(len(required_fields)))
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("[SUCCESS] All verifications passed!")
|
||||
print("=" * 60)
|
||||
|
||||
print("\nInterface List:")
|
||||
print("1. POST /account/common/getToken")
|
||||
print("2. POST /watson/api/project/remoteUploadSplitFile")
|
||||
print("3. POST /watson/api/project/getJZFileOrZjrcuFile")
|
||||
print("4. POST /watson/api/project/upload/getpendings")
|
||||
print("5. GET /watson/api/project/bs/upload [NEW]")
|
||||
print("6. POST /watson/api/project/batchDeleteUploadFile")
|
||||
print("7. POST /watson/api/project/getBSByLogId")
|
||||
|
||||
print("\nNext Steps:")
|
||||
print("- Run: python main.py")
|
||||
print("- Visit: http://localhost:8000/docs")
|
||||
print("- Test all 7 interfaces")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_interfaces()
|
||||
@@ -111,7 +111,8 @@ spring:
|
||||
lsfx:
|
||||
api:
|
||||
# Mock Server(本地测试)
|
||||
base-url: http://localhost:8000
|
||||
# base-url: http://localhost:8000
|
||||
base-url: http://116.62.17.81:62320
|
||||
# 测试环境
|
||||
# base-url: http://158.234.196.5:82/c4c3
|
||||
# 生产环境
|
||||
|
||||
@@ -106,7 +106,7 @@ spring:
|
||||
lsfx:
|
||||
api:
|
||||
# Mock Server(本地测试)
|
||||
base-url: http://localhost:8000
|
||||
base-url: http://116.62.17.81:62320
|
||||
# 测试环境
|
||||
# base-url: http://158.234.196.5:82/c4c3
|
||||
# 生产环境
|
||||
|
||||
35
ruoyi-ui/src/api/ccdiBaseStaffAsset.js
Normal file
35
ruoyi-ui/src/api/ccdiBaseStaffAsset.js
Normal file
@@ -0,0 +1,35 @@
|
||||
import request from '@/utils/request'
|
||||
|
||||
// 下载员工资产导入模板
|
||||
export function importBaseStaffAssetTemplate() {
|
||||
return request({
|
||||
url: '/ccdi/baseStaff/asset/importTemplate',
|
||||
method: 'post'
|
||||
})
|
||||
}
|
||||
|
||||
// 导入员工资产数据
|
||||
export function importBaseStaffAssetData(data) {
|
||||
return request({
|
||||
url: '/ccdi/baseStaff/asset/importData',
|
||||
method: 'post',
|
||||
data: data
|
||||
})
|
||||
}
|
||||
|
||||
// 查询员工资产导入状态
|
||||
export function getBaseStaffAssetImportStatus(taskId) {
|
||||
return request({
|
||||
url: '/ccdi/baseStaff/asset/importStatus/' + taskId,
|
||||
method: 'get'
|
||||
})
|
||||
}
|
||||
|
||||
// 查询员工资产导入失败记录
|
||||
export function getBaseStaffAssetImportFailures(taskId, pageNum, pageSize) {
|
||||
return request({
|
||||
url: '/ccdi/baseStaff/asset/importFailures/' + taskId,
|
||||
method: 'get',
|
||||
params: { pageNum, pageSize }
|
||||
})
|
||||
}
|
||||
@@ -417,7 +417,7 @@
|
||||
<el-link type="primary" :underline="false" style="font-size: 12px; vertical-align: baseline;" @click="importAssetTemplate">下载员工资产模板</el-link>
|
||||
</div>
|
||||
<div class="el-upload__tip" slot="tip">
|
||||
<span>仅允许导入"xls"或"xlsx"格式文件,系统将根据 personId/person_id 自动识别归属员工。</span>
|
||||
<span>仅支持导入员工本人资产数据,文件需为"xls"或"xlsx"格式,系统将根据 personId/person_id 自动识别归属员工。</span>
|
||||
</div>
|
||||
</el-upload>
|
||||
<div slot="footer" class="dialog-footer">
|
||||
@@ -520,9 +520,9 @@ import {
|
||||
updateBaseStaff
|
||||
} from "@/api/ccdiBaseStaff";
|
||||
import {
|
||||
getAssetImportFailures,
|
||||
getAssetImportStatus
|
||||
} from "@/api/ccdiAssetInfo";
|
||||
getBaseStaffAssetImportFailures,
|
||||
getBaseStaffAssetImportStatus
|
||||
} from "@/api/ccdiBaseStaffAsset";
|
||||
import {deptTreeSelect} from "@/api/system/user";
|
||||
import {getToken} from "@/utils/auth";
|
||||
import Treeselect from "@riophae/vue-treeselect";
|
||||
@@ -653,7 +653,7 @@ export default {
|
||||
title: "",
|
||||
isUploading: false,
|
||||
headers: { Authorization: "Bearer " + getToken() },
|
||||
url: process.env.VUE_APP_BASE_API + "/ccdi/assetInfo/importData"
|
||||
url: process.env.VUE_APP_BASE_API + "/ccdi/baseStaff/asset/importData"
|
||||
},
|
||||
assetFailureDialogVisible: false,
|
||||
assetFailureList: [],
|
||||
@@ -1182,7 +1182,7 @@ export default {
|
||||
this.download('ccdi/baseStaff/importTemplate', {}, `员工信息模板_${new Date().getTime()}.xlsx`)
|
||||
},
|
||||
importAssetTemplate() {
|
||||
this.download('ccdi/assetInfo/importTemplate', {}, `员工资产信息模板_${new Date().getTime()}.xlsx`)
|
||||
this.download('ccdi/baseStaff/asset/importTemplate', {}, `员工资产信息模板_${new Date().getTime()}.xlsx`)
|
||||
},
|
||||
// 文件上传中处理
|
||||
handleFileUploadProgress(event, file, fileList) {
|
||||
@@ -1336,7 +1336,7 @@ export default {
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await getAssetImportStatus(taskId);
|
||||
const response = await getBaseStaffAssetImportStatus(taskId);
|
||||
|
||||
if (response.data && response.data.status !== 'PROCESSING') {
|
||||
clearInterval(this.assetPollingTimer);
|
||||
@@ -1466,7 +1466,7 @@ export default {
|
||||
},
|
||||
getAssetFailureList() {
|
||||
this.assetFailureLoading = true;
|
||||
getAssetImportFailures(
|
||||
getBaseStaffAssetImportFailures(
|
||||
this.assetCurrentTaskId,
|
||||
this.assetFailureQueryParams.pageNum,
|
||||
this.assetFailureQueryParams.pageSize
|
||||
|
||||
@@ -6,16 +6,19 @@ const baseStaffApiPath = path.resolve(
|
||||
__dirname,
|
||||
"../../src/api/ccdiBaseStaff.js"
|
||||
);
|
||||
const assetApiPath = path.resolve(
|
||||
const baseStaffAssetApiPath = path.resolve(
|
||||
__dirname,
|
||||
"../../src/api/ccdiAssetInfo.js"
|
||||
"../../src/api/ccdiBaseStaffAsset.js"
|
||||
);
|
||||
|
||||
assert(fs.existsSync(baseStaffApiPath), "未找到员工 API 文件 ccdiBaseStaff.js");
|
||||
assert(fs.existsSync(assetApiPath), "未找到员工资产 API 文件 ccdiAssetInfo.js");
|
||||
assert(
|
||||
fs.existsSync(baseStaffAssetApiPath),
|
||||
"未找到员工资产 API 文件 ccdiBaseStaffAsset.js"
|
||||
);
|
||||
|
||||
const baseStaffSource = fs.readFileSync(baseStaffApiPath, "utf8");
|
||||
const assetSource = fs.readFileSync(assetApiPath, "utf8");
|
||||
const assetSource = fs.readFileSync(baseStaffAssetApiPath, "utf8");
|
||||
|
||||
[
|
||||
"export function addBaseStaff(data)",
|
||||
@@ -29,16 +32,25 @@ const assetSource = fs.readFileSync(assetApiPath, "utf8");
|
||||
});
|
||||
|
||||
[
|
||||
"export function importAssetTemplate()",
|
||||
"export function importAssetData(data)",
|
||||
"export function getAssetImportStatus(taskId)",
|
||||
"export function getAssetImportFailures(taskId, pageNum, pageSize)",
|
||||
"export function importBaseStaffAssetTemplate()",
|
||||
"export function importBaseStaffAssetData(data)",
|
||||
"export function getBaseStaffAssetImportStatus(taskId)",
|
||||
"export function getBaseStaffAssetImportFailures(taskId, pageNum, pageSize)",
|
||||
"/ccdi/baseStaff/asset/importTemplate",
|
||||
"/ccdi/baseStaff/asset/importData",
|
||||
"/ccdi/baseStaff/asset/importStatus/",
|
||||
"/ccdi/baseStaff/asset/importFailures/",
|
||||
].forEach((token) => {
|
||||
assert(assetSource.includes(token), `员工资产 API 缺少关键契约: ${token}`);
|
||||
});
|
||||
|
||||
[
|
||||
"/ccdi/assetInfo/importTemplate",
|
||||
"/ccdi/assetInfo/importData",
|
||||
"/ccdi/assetInfo/importStatus/",
|
||||
"/ccdi/assetInfo/importFailures/",
|
||||
].forEach((token) => {
|
||||
assert(assetSource.includes(token), `员工资产 API 缺少关键契约: ${token}`);
|
||||
assert(!assetSource.includes(token), `员工资产 API 不应再引用旧接口: ${token}`);
|
||||
});
|
||||
|
||||
console.log("employee-asset-api-contract test passed");
|
||||
|
||||
@@ -17,6 +17,13 @@ const source = fs.readFileSync(componentPath, "utf8");
|
||||
"assetFailureDialogVisible",
|
||||
"employee_asset_import_last_task",
|
||||
"员工资产数据导入",
|
||||
"下载员工资产模板",
|
||||
"@/api/ccdiBaseStaffAsset",
|
||||
"/ccdi/baseStaff/asset/importData",
|
||||
"ccdi/baseStaff/asset/importTemplate",
|
||||
"getBaseStaffAssetImportStatus",
|
||||
"getBaseStaffAssetImportFailures",
|
||||
"仅支持导入员工本人资产数据",
|
||||
].forEach((token) => {
|
||||
assert(
|
||||
source.includes(token),
|
||||
@@ -24,4 +31,17 @@ const source = fs.readFileSync(componentPath, "utf8");
|
||||
);
|
||||
});
|
||||
|
||||
[
|
||||
'from "@/api/ccdiAssetInfo"',
|
||||
"/ccdi/assetInfo/importData",
|
||||
"ccdi/assetInfo/importTemplate",
|
||||
"getAssetImportStatus(taskId)",
|
||||
"getAssetImportFailures(",
|
||||
].forEach((token) => {
|
||||
assert(
|
||||
!source.includes(token),
|
||||
`员工资产导入 UI 不应再引用亲属资产接口: ${token}`
|
||||
);
|
||||
});
|
||||
|
||||
console.log("employee-asset-import-ui test passed");
|
||||
|
||||
@@ -42,4 +42,13 @@ const assetSource = fs.readFileSync(assetApiPath, "utf8");
|
||||
assert(assetSource.includes(token), `亲属资产 API 缺少关键契约: ${token}`);
|
||||
});
|
||||
|
||||
[
|
||||
"/ccdi/baseStaff/asset/importTemplate",
|
||||
"/ccdi/baseStaff/asset/importData",
|
||||
"/ccdi/baseStaff/asset/importStatus/",
|
||||
"/ccdi/baseStaff/asset/importFailures/",
|
||||
].forEach((token) => {
|
||||
assert(!assetSource.includes(token), `亲属资产 API 不应引用员工资产接口: ${token}`);
|
||||
});
|
||||
|
||||
console.log("staff-family-asset-api-contract test passed");
|
||||
|
||||
@@ -24,6 +24,10 @@ const source = fs.readFileSync(componentPath, "utf8");
|
||||
"startAssetImportStatusPolling(taskId)",
|
||||
"getAssetFailureList()",
|
||||
"clearAssetImportHistory()",
|
||||
'from "@/api/ccdiAssetInfo"',
|
||||
"/ccdi/assetInfo/importData",
|
||||
"ccdi/assetInfo/importTemplate",
|
||||
"亲属资产信息模板_",
|
||||
].forEach((token) => {
|
||||
assert(source.includes(token), `亲属资产导入 UI 缺少关键结构或状态: ${token}`);
|
||||
});
|
||||
@@ -37,4 +41,16 @@ const source = fs.readFileSync(componentPath, "utf8");
|
||||
assert(source.includes(token), `详情展示或禁改逻辑缺少关键结构: ${token}`);
|
||||
});
|
||||
|
||||
[
|
||||
"@/api/ccdiBaseStaffAsset",
|
||||
"/ccdi/baseStaff/asset/importData",
|
||||
"ccdi/baseStaff/asset/importTemplate",
|
||||
"下载员工资产模板",
|
||||
].forEach((token) => {
|
||||
assert(
|
||||
!source.includes(token),
|
||||
`亲属资产导入 UI 不应引用员工资产导入实现: ${token}`
|
||||
);
|
||||
});
|
||||
|
||||
console.log("staff-family-asset-detail-import-ui test passed");
|
||||
|
||||
Reference in New Issue
Block a user