Compare commits
46 Commits
dev-ui
...
c278d11390
| Author | SHA1 | Date | |
|---|---|---|---|
| c278d11390 | |||
| e0629f22e5 | |||
| 03ecbbd204 | |||
| eabd38fa58 | |||
| 03a4acb63a | |||
| 3286795f98 | |||
| 4c6ca52e7e | |||
| cc1a4538af | |||
| 5aaf6c83be | |||
| cb3265e796 | |||
| 8798aa9230 | |||
| 2fdf5f1546 | |||
| a32be65bf1 | |||
| 51810a325e | |||
| 6b24e02ba9 | |||
| d831edcaa4 | |||
| af63607069 | |||
| 0abc84c571 | |||
| 7dafabf7cb | |||
| 4dca2b2b63 | |||
| 001597d5e8 | |||
| 4b5ac7388c | |||
| 1e0813a84c | |||
| c8d45416cf | |||
| 09119a2365 | |||
| 5de46eabc5 | |||
| bcb2e39099 | |||
| 09b4cfe3c4 | |||
| c5a00f26ad | |||
| d4dc66a514 | |||
| 2877e26fa5 | |||
| 1a19dcbc13 | |||
| f981dc9906 | |||
| f0e2595a2b | |||
| 37e0c231a7 | |||
| 1397f12057 | |||
| 46e476e35b | |||
| bfac1f10d2 | |||
| d01362cc72 | |||
| 2aee9ff76e | |||
| 5b91cee935 | |||
| a3f49dc176 | |||
| 127a59bf78 | |||
| 988c2d3572 | |||
| f4a72a6110 | |||
| 3741ef5fe4 |
10
.gitignore
vendored
10
.gitignore
vendored
@@ -79,4 +79,12 @@ output/
|
||||
|
||||
logs/
|
||||
|
||||
.DS_Store
|
||||
.DS_Store
|
||||
|
||||
ruoyi-ui/vue.config.js
|
||||
|
||||
*/src/test/
|
||||
|
||||
.pytest_cache/
|
||||
|
||||
tests/
|
||||
17
.mcp.json
17
.mcp.json
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "node",
|
||||
"args": [
|
||||
"C:/Users/wkc/.codex/mcp-tools/mysql-server/node_modules/@fhuang/mcp-mysql-server/build/index.js"
|
||||
],
|
||||
"env": {
|
||||
"MYSQL_DATABASE": "ccdi",
|
||||
"MYSQL_HOST": "116.62.17.81",
|
||||
"MYSQL_PASSWORD": "Kfcx@1234",
|
||||
"MYSQL_PORT": "3306",
|
||||
"MYSQL_USER": "root"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
24
.opencode
24
.opencode
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"$schema": "https://opencode.ai/config.json",
|
||||
"plugin": [
|
||||
"oh-my-opencode@latest"
|
||||
],
|
||||
"agent": {
|
||||
"Sisyphus-Junior": {
|
||||
"mode": "subagent",
|
||||
"model": "glm/glm-5"
|
||||
},
|
||||
"oracle": {
|
||||
"mode": "subagent",
|
||||
"model": "gmn/gpt-5.3-codex"
|
||||
},
|
||||
"Metis (Plan Consultant)": {
|
||||
"mode": "subagent",
|
||||
"model": "gmn/gpt-5.3-codex"
|
||||
},
|
||||
"Momus (Plan Critic)": {
|
||||
"mode": "subagent",
|
||||
"model": "gmn/gpt-5.3-codex"
|
||||
}
|
||||
}
|
||||
}
|
||||
BIN
assets/异常账户.xlsx
Normal file
BIN
assets/异常账户.xlsx
Normal file
Binary file not shown.
@@ -57,6 +57,12 @@
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ruoyi</groupId>
|
||||
<artifactId>ccdi-lsfx</artifactId>
|
||||
<version>3.9.1</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
|
||||
@@ -0,0 +1,146 @@
|
||||
package com.ruoyi.info.collection.controller;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.ruoyi.common.annotation.Log;
|
||||
import com.ruoyi.common.core.controller.BaseController;
|
||||
import com.ruoyi.common.core.domain.AjaxResult;
|
||||
import com.ruoyi.common.core.page.PageDomain;
|
||||
import com.ruoyi.common.core.page.TableDataInfo;
|
||||
import com.ruoyi.common.core.page.TableSupport;
|
||||
import com.ruoyi.common.enums.BusinessType;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoAddDTO;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoEditDTO;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoQueryDTO;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiEnterpriseBaseInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.CcdiEnterpriseBaseInfoVO;
|
||||
import com.ruoyi.info.collection.domain.vo.EnterpriseBaseInfoImportFailureVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportResultVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportStatusVO;
|
||||
import com.ruoyi.info.collection.service.ICcdiEnterpriseBaseInfoImportService;
|
||||
import com.ruoyi.info.collection.service.ICcdiEnterpriseBaseInfoService;
|
||||
import com.ruoyi.info.collection.utils.EasyExcelUtil;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import jakarta.annotation.Resource;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.springframework.security.access.prepost.PreAuthorize;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.PutMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 实体库管理 Controller
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
@Tag(name = "实体库管理")
|
||||
@RestController
|
||||
@RequestMapping("/ccdi/enterpriseBaseInfo")
|
||||
public class CcdiEnterpriseBaseInfoController extends BaseController {
|
||||
|
||||
@Resource
|
||||
private ICcdiEnterpriseBaseInfoService enterpriseBaseInfoService;
|
||||
|
||||
@Resource
|
||||
private ICcdiEnterpriseBaseInfoImportService enterpriseBaseInfoImportService;
|
||||
|
||||
@Operation(summary = "查询实体库列表")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:enterpriseBaseInfo:list')")
|
||||
@GetMapping("/list")
|
||||
public TableDataInfo list(CcdiEnterpriseBaseInfoQueryDTO queryDTO) {
|
||||
PageDomain pageDomain = TableSupport.buildPageRequest();
|
||||
Page<CcdiEnterpriseBaseInfoVO> page = new Page<>(pageDomain.getPageNum(), pageDomain.getPageSize());
|
||||
Page<CcdiEnterpriseBaseInfoVO> result = enterpriseBaseInfoService.selectEnterpriseBaseInfoPage(page, queryDTO);
|
||||
return getDataTable(result.getRecords(), result.getTotal());
|
||||
}
|
||||
|
||||
@Operation(summary = "获取实体库详细信息")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:enterpriseBaseInfo:query')")
|
||||
@GetMapping("/{socialCreditCode}")
|
||||
public AjaxResult getInfo(@PathVariable String socialCreditCode) {
|
||||
return success(enterpriseBaseInfoService.selectEnterpriseBaseInfoById(socialCreditCode));
|
||||
}
|
||||
|
||||
@Operation(summary = "新增实体库信息")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:enterpriseBaseInfo:add')")
|
||||
@Log(title = "实体库管理", businessType = BusinessType.INSERT)
|
||||
@PostMapping
|
||||
public AjaxResult add(@Validated @RequestBody CcdiEnterpriseBaseInfoAddDTO addDTO) {
|
||||
return toAjax(enterpriseBaseInfoService.insertEnterpriseBaseInfo(addDTO));
|
||||
}
|
||||
|
||||
@Operation(summary = "修改实体库信息")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:enterpriseBaseInfo:edit')")
|
||||
@Log(title = "实体库管理", businessType = BusinessType.UPDATE)
|
||||
@PutMapping
|
||||
public AjaxResult edit(@Validated @RequestBody CcdiEnterpriseBaseInfoEditDTO editDTO) {
|
||||
return toAjax(enterpriseBaseInfoService.updateEnterpriseBaseInfo(editDTO));
|
||||
}
|
||||
|
||||
@Operation(summary = "删除实体库信息")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:enterpriseBaseInfo:remove')")
|
||||
@Log(title = "实体库管理", businessType = BusinessType.DELETE)
|
||||
@DeleteMapping("/{socialCreditCodes}")
|
||||
public AjaxResult remove(@PathVariable String[] socialCreditCodes) {
|
||||
return toAjax(enterpriseBaseInfoService.deleteEnterpriseBaseInfoByIds(socialCreditCodes));
|
||||
}
|
||||
|
||||
@Operation(summary = "下载导入模板")
|
||||
@PostMapping("/importTemplate")
|
||||
public void importTemplate(HttpServletResponse response) {
|
||||
EasyExcelUtil.importTemplateWithDictDropdown(response, CcdiEnterpriseBaseInfoExcel.class, "实体库管理");
|
||||
}
|
||||
|
||||
@Operation(summary = "导入实体库信息")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:enterpriseBaseInfo:import')")
|
||||
@Log(title = "实体库管理", businessType = BusinessType.IMPORT)
|
||||
@PostMapping("/importData")
|
||||
public AjaxResult importData(MultipartFile file) throws Exception {
|
||||
List<CcdiEnterpriseBaseInfoExcel> list = EasyExcelUtil.importExcel(file.getInputStream(), CcdiEnterpriseBaseInfoExcel.class);
|
||||
if (list == null || list.isEmpty()) {
|
||||
return error("至少需要一条数据");
|
||||
}
|
||||
|
||||
String taskId = enterpriseBaseInfoService.importEnterpriseBaseInfo(list);
|
||||
ImportResultVO result = new ImportResultVO();
|
||||
result.setTaskId(taskId);
|
||||
result.setStatus("PROCESSING");
|
||||
result.setMessage("导入任务已提交,正在后台处理");
|
||||
return AjaxResult.success("导入任务已提交,正在后台处理", result);
|
||||
}
|
||||
|
||||
@Operation(summary = "查询导入状态")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:enterpriseBaseInfo:import')")
|
||||
@GetMapping("/importStatus/{taskId}")
|
||||
public AjaxResult getImportStatus(@PathVariable String taskId) {
|
||||
ImportStatusVO status = enterpriseBaseInfoImportService.getImportStatus(taskId);
|
||||
return success(status);
|
||||
}
|
||||
|
||||
@Operation(summary = "查询导入失败记录")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:enterpriseBaseInfo:import')")
|
||||
@GetMapping("/importFailures/{taskId}")
|
||||
public TableDataInfo getImportFailures(@PathVariable String taskId,
|
||||
@RequestParam(defaultValue = "1") Integer pageNum,
|
||||
@RequestParam(defaultValue = "10") Integer pageSize) {
|
||||
List<EnterpriseBaseInfoImportFailureVO> failures = enterpriseBaseInfoImportService.getImportFailures(taskId);
|
||||
int fromIndex = (pageNum - 1) * pageSize;
|
||||
if (fromIndex >= failures.size()) {
|
||||
return getDataTable(new ArrayList<>(), failures.size());
|
||||
}
|
||||
int toIndex = Math.min(fromIndex + pageSize, failures.size());
|
||||
return getDataTable(failures.subList(fromIndex, toIndex), failures.size());
|
||||
}
|
||||
}
|
||||
@@ -138,4 +138,30 @@ public class CcdiEnumController {
|
||||
}
|
||||
return AjaxResult.success(options);
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取实体风险等级选项
|
||||
*/
|
||||
@Operation(summary = "获取实体风险等级选项")
|
||||
@GetMapping("/enterpriseRiskLevel")
|
||||
public AjaxResult getEnterpriseRiskLevelOptions() {
|
||||
List<EnumOptionVO> options = new ArrayList<>();
|
||||
for (EnterpriseRiskLevel level : EnterpriseRiskLevel.values()) {
|
||||
options.add(new EnumOptionVO(level.getCode(), level.getDesc()));
|
||||
}
|
||||
return AjaxResult.success(options);
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取企业来源选项
|
||||
*/
|
||||
@Operation(summary = "获取企业来源选项")
|
||||
@GetMapping("/enterpriseSource")
|
||||
public AjaxResult getEnterpriseSourceOptions() {
|
||||
List<EnumOptionVO> options = new ArrayList<>();
|
||||
for (EnterpriseSource source : EnterpriseSource.values()) {
|
||||
options.add(new EnumOptionVO(source.getCode(), source.getDesc()));
|
||||
}
|
||||
return AjaxResult.success(options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
@@ -56,6 +57,42 @@ public class CcdiAccountInfo implements Serializable {
|
||||
/** 币种 */
|
||||
private String currency;
|
||||
|
||||
/** 是否实控账户:0-否 1-是 */
|
||||
@TableField("is_self_account")
|
||||
private Integer isActualControl;
|
||||
|
||||
/** 月均交易笔数 */
|
||||
@TableField("monthly_avg_trans_count")
|
||||
private Integer avgMonthTxnCount;
|
||||
|
||||
/** 月均交易金额 */
|
||||
@TableField("monthly_avg_trans_amount")
|
||||
private BigDecimal avgMonthTxnAmount;
|
||||
|
||||
/** 交易频率等级 */
|
||||
@TableField("trans_freq_type")
|
||||
private String txnFrequencyLevel;
|
||||
|
||||
/** 借方单笔最高额 */
|
||||
@TableField("dr_max_single_amount")
|
||||
private BigDecimal debitSingleMaxAmount;
|
||||
|
||||
/** 贷方单笔最高额 */
|
||||
@TableField("cr_max_single_amount")
|
||||
private BigDecimal creditSingleMaxAmount;
|
||||
|
||||
/** 借方日累计最高额 */
|
||||
@TableField("dr_max_daily_amount")
|
||||
private BigDecimal debitDailyMaxAmount;
|
||||
|
||||
/** 贷方日累计最高额 */
|
||||
@TableField("cr_max_daily_amount")
|
||||
private BigDecimal creditDailyMaxAmount;
|
||||
|
||||
/** 风险等级 */
|
||||
@TableField("trans_risk_level")
|
||||
private String txnRiskLevel;
|
||||
|
||||
/** 状态:1-正常 2-已销户 */
|
||||
private Integer status;
|
||||
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
package com.ruoyi.info.collection.domain;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.FieldFill;
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* 账户分析结果对象 ccdi_account_result
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-13
|
||||
*/
|
||||
@Data
|
||||
@TableName("ccdi_account_result")
|
||||
public class CcdiAccountResult implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/** 主键ID */
|
||||
@TableId(value = "result_id", type = IdType.AUTO)
|
||||
private Long resultId;
|
||||
|
||||
/** 账户号码 */
|
||||
private String accountNo;
|
||||
|
||||
/** 是否实控账户:0-否 1-是 */
|
||||
@TableField("is_self_account")
|
||||
private Integer isActualControl;
|
||||
|
||||
/** 月均交易笔数 */
|
||||
@TableField("monthly_avg_trans_count")
|
||||
private Integer avgMonthTxnCount;
|
||||
|
||||
/** 月均交易金额 */
|
||||
@TableField("monthly_avg_trans_amount")
|
||||
private BigDecimal avgMonthTxnAmount;
|
||||
|
||||
/** 交易频率等级 */
|
||||
@TableField("trans_freq_type")
|
||||
private String txnFrequencyLevel;
|
||||
|
||||
/** 借方单笔最高额 */
|
||||
@TableField("dr_max_single_amount")
|
||||
private BigDecimal debitSingleMaxAmount;
|
||||
|
||||
/** 贷方单笔最高额 */
|
||||
@TableField("cr_max_single_amount")
|
||||
private BigDecimal creditSingleMaxAmount;
|
||||
|
||||
/** 借方日累计最高额 */
|
||||
@TableField("dr_max_daily_amount")
|
||||
private BigDecimal debitDailyMaxAmount;
|
||||
|
||||
/** 贷方日累计最高额 */
|
||||
@TableField("cr_max_daily_amount")
|
||||
private BigDecimal creditDailyMaxAmount;
|
||||
|
||||
/** 风险等级 */
|
||||
@TableField("trans_risk_level")
|
||||
private String txnRiskLevel;
|
||||
|
||||
/** 创建者 */
|
||||
@TableField(fill = FieldFill.INSERT)
|
||||
private String createBy;
|
||||
|
||||
/** 创建时间 */
|
||||
@TableField(fill = FieldFill.INSERT)
|
||||
private Date createTime;
|
||||
|
||||
/** 更新者 */
|
||||
@TableField(fill = FieldFill.INSERT_UPDATE)
|
||||
private String updateBy;
|
||||
|
||||
/** 更新时间 */
|
||||
@TableField(fill = FieldFill.INSERT_UPDATE)
|
||||
private Date updateTime;
|
||||
}
|
||||
@@ -43,6 +43,10 @@ public class CcdiBaseStaff implements Serializable {
|
||||
/** 入职时间 */
|
||||
private Date hireDate;
|
||||
|
||||
/** 是否党员:0-否 1-是 */
|
||||
@TableField("is_party_member")
|
||||
private Integer partyMember;
|
||||
|
||||
/** 状态 */
|
||||
private String status;
|
||||
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
package com.ruoyi.info.collection.domain;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.FieldFill;
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* 中介关联机构关系对象 ccdi_intermediary_enterprise_relation
|
||||
*/
|
||||
@Data
|
||||
@TableName("ccdi_intermediary_enterprise_relation")
|
||||
public class CcdiIntermediaryEnterpriseRelation implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@TableId(type = IdType.AUTO)
|
||||
private Long id;
|
||||
|
||||
private String intermediaryBizId;
|
||||
|
||||
private String socialCreditCode;
|
||||
|
||||
private String relationPersonPost;
|
||||
|
||||
private String remark;
|
||||
|
||||
@TableField(fill = FieldFill.INSERT)
|
||||
private String createdBy;
|
||||
|
||||
@TableField(fill = FieldFill.INSERT)
|
||||
private Date createTime;
|
||||
|
||||
@TableField(fill = FieldFill.INSERT_UPDATE)
|
||||
private String updatedBy;
|
||||
|
||||
@TableField(fill = FieldFill.INSERT_UPDATE)
|
||||
private Date updateTime;
|
||||
}
|
||||
@@ -53,6 +53,10 @@ public class CcdiBaseStaffAddDTO implements Serializable {
|
||||
/** 入职时间 */
|
||||
private Date hireDate;
|
||||
|
||||
/** 是否党员:0-否 1-是 */
|
||||
@NotNull(message = "是否党员不能为空")
|
||||
private Integer partyMember;
|
||||
|
||||
/** 状态 */
|
||||
@NotBlank(message = "状态不能为空")
|
||||
private String status;
|
||||
|
||||
@@ -52,6 +52,10 @@ public class CcdiBaseStaffEditDTO implements Serializable {
|
||||
/** 入职时间 */
|
||||
private Date hireDate;
|
||||
|
||||
/** 是否党员:0-否 1-是 */
|
||||
@NotNull(message = "是否党员不能为空")
|
||||
private Integer partyMember;
|
||||
|
||||
/** 状态 */
|
||||
private String status;
|
||||
|
||||
|
||||
@@ -0,0 +1,107 @@
|
||||
package com.ruoyi.info.collection.domain.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* 实体库管理新增 DTO
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "实体库管理新增DTO")
|
||||
public class CcdiEnterpriseBaseInfoAddDTO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "统一社会信用代码")
|
||||
@NotBlank(message = "统一社会信用代码不能为空")
|
||||
@Pattern(regexp = "^[0-9A-HJ-NPQRTUWXY]{2}\\d{6}[0-9A-HJ-NPQRTUWXY]{10}$", message = "统一社会信用代码格式不正确")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "企业名称")
|
||||
@NotBlank(message = "企业名称不能为空")
|
||||
@Size(max = 200, message = "企业名称长度不能超过200个字符")
|
||||
private String enterpriseName;
|
||||
|
||||
@Schema(description = "企业类型")
|
||||
@Size(max = 50, message = "企业类型长度不能超过50个字符")
|
||||
private String enterpriseType;
|
||||
|
||||
@Schema(description = "企业性质")
|
||||
@Size(max = 50, message = "企业性质长度不能超过50个字符")
|
||||
private String enterpriseNature;
|
||||
|
||||
@Schema(description = "行业分类")
|
||||
@Size(max = 100, message = "行业分类长度不能超过100个字符")
|
||||
private String industryClass;
|
||||
|
||||
@Schema(description = "所属行业")
|
||||
@Size(max = 100, message = "所属行业长度不能超过100个字符")
|
||||
private String industryName;
|
||||
|
||||
@Schema(description = "成立日期")
|
||||
private Date establishDate;
|
||||
|
||||
@Schema(description = "注册地址")
|
||||
@Size(max = 500, message = "注册地址长度不能超过500个字符")
|
||||
private String registerAddress;
|
||||
|
||||
@Schema(description = "法定代表人")
|
||||
@Size(max = 100, message = "法定代表人长度不能超过100个字符")
|
||||
private String legalRepresentative;
|
||||
|
||||
@Schema(description = "法定代表人证件类型")
|
||||
@Size(max = 50, message = "法定代表人证件类型长度不能超过50个字符")
|
||||
private String legalCertType;
|
||||
|
||||
@Schema(description = "法定代表人证件号码")
|
||||
@Size(max = 50, message = "法定代表人证件号码长度不能超过50个字符")
|
||||
private String legalCertNo;
|
||||
|
||||
@Schema(description = "股东1")
|
||||
@Size(max = 100, message = "股东1长度不能超过100个字符")
|
||||
private String shareholder1;
|
||||
|
||||
@Schema(description = "股东2")
|
||||
@Size(max = 100, message = "股东2长度不能超过100个字符")
|
||||
private String shareholder2;
|
||||
|
||||
@Schema(description = "股东3")
|
||||
@Size(max = 100, message = "股东3长度不能超过100个字符")
|
||||
private String shareholder3;
|
||||
|
||||
@Schema(description = "股东4")
|
||||
@Size(max = 100, message = "股东4长度不能超过100个字符")
|
||||
private String shareholder4;
|
||||
|
||||
@Schema(description = "股东5")
|
||||
@Size(max = 100, message = "股东5长度不能超过100个字符")
|
||||
private String shareholder5;
|
||||
|
||||
@Schema(description = "经营状态")
|
||||
@NotBlank(message = "经营状态不能为空")
|
||||
@Size(max = 50, message = "经营状态长度不能超过50个字符")
|
||||
private String status;
|
||||
|
||||
@Schema(description = "风险等级")
|
||||
@NotBlank(message = "风险等级不能为空")
|
||||
private String riskLevel;
|
||||
|
||||
@Schema(description = "企业来源")
|
||||
@NotBlank(message = "企业来源不能为空")
|
||||
private String entSource;
|
||||
|
||||
@Schema(description = "数据来源")
|
||||
@NotBlank(message = "数据来源不能为空")
|
||||
private String dataSource;
|
||||
}
|
||||
@@ -0,0 +1,107 @@
|
||||
package com.ruoyi.info.collection.domain.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* 实体库管理编辑 DTO
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "实体库管理编辑DTO")
|
||||
public class CcdiEnterpriseBaseInfoEditDTO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "统一社会信用代码")
|
||||
@NotBlank(message = "统一社会信用代码不能为空")
|
||||
@Pattern(regexp = "^[0-9A-HJ-NPQRTUWXY]{2}\\d{6}[0-9A-HJ-NPQRTUWXY]{10}$", message = "统一社会信用代码格式不正确")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "企业名称")
|
||||
@NotBlank(message = "企业名称不能为空")
|
||||
@Size(max = 200, message = "企业名称长度不能超过200个字符")
|
||||
private String enterpriseName;
|
||||
|
||||
@Schema(description = "企业类型")
|
||||
@Size(max = 50, message = "企业类型长度不能超过50个字符")
|
||||
private String enterpriseType;
|
||||
|
||||
@Schema(description = "企业性质")
|
||||
@Size(max = 50, message = "企业性质长度不能超过50个字符")
|
||||
private String enterpriseNature;
|
||||
|
||||
@Schema(description = "行业分类")
|
||||
@Size(max = 100, message = "行业分类长度不能超过100个字符")
|
||||
private String industryClass;
|
||||
|
||||
@Schema(description = "所属行业")
|
||||
@Size(max = 100, message = "所属行业长度不能超过100个字符")
|
||||
private String industryName;
|
||||
|
||||
@Schema(description = "成立日期")
|
||||
private Date establishDate;
|
||||
|
||||
@Schema(description = "注册地址")
|
||||
@Size(max = 500, message = "注册地址长度不能超过500个字符")
|
||||
private String registerAddress;
|
||||
|
||||
@Schema(description = "法定代表人")
|
||||
@Size(max = 100, message = "法定代表人长度不能超过100个字符")
|
||||
private String legalRepresentative;
|
||||
|
||||
@Schema(description = "法定代表人证件类型")
|
||||
@Size(max = 50, message = "法定代表人证件类型长度不能超过50个字符")
|
||||
private String legalCertType;
|
||||
|
||||
@Schema(description = "法定代表人证件号码")
|
||||
@Size(max = 50, message = "法定代表人证件号码长度不能超过50个字符")
|
||||
private String legalCertNo;
|
||||
|
||||
@Schema(description = "股东1")
|
||||
@Size(max = 100, message = "股东1长度不能超过100个字符")
|
||||
private String shareholder1;
|
||||
|
||||
@Schema(description = "股东2")
|
||||
@Size(max = 100, message = "股东2长度不能超过100个字符")
|
||||
private String shareholder2;
|
||||
|
||||
@Schema(description = "股东3")
|
||||
@Size(max = 100, message = "股东3长度不能超过100个字符")
|
||||
private String shareholder3;
|
||||
|
||||
@Schema(description = "股东4")
|
||||
@Size(max = 100, message = "股东4长度不能超过100个字符")
|
||||
private String shareholder4;
|
||||
|
||||
@Schema(description = "股东5")
|
||||
@Size(max = 100, message = "股东5长度不能超过100个字符")
|
||||
private String shareholder5;
|
||||
|
||||
@Schema(description = "经营状态")
|
||||
@NotBlank(message = "经营状态不能为空")
|
||||
@Size(max = 50, message = "经营状态长度不能超过50个字符")
|
||||
private String status;
|
||||
|
||||
@Schema(description = "风险等级")
|
||||
@NotBlank(message = "风险等级不能为空")
|
||||
private String riskLevel;
|
||||
|
||||
@Schema(description = "企业来源")
|
||||
@NotBlank(message = "企业来源不能为空")
|
||||
private String entSource;
|
||||
|
||||
@Schema(description = "数据来源")
|
||||
@NotBlank(message = "数据来源不能为空")
|
||||
private String dataSource;
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
package com.ruoyi.info.collection.domain.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* 实体库管理查询 DTO
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "实体库管理查询DTO")
|
||||
public class CcdiEnterpriseBaseInfoQueryDTO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "企业名称")
|
||||
private String enterpriseName;
|
||||
|
||||
@Schema(description = "统一社会信用代码")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "企业类型")
|
||||
private String enterpriseType;
|
||||
|
||||
@Schema(description = "企业性质")
|
||||
private String enterpriseNature;
|
||||
|
||||
@Schema(description = "行业分类")
|
||||
private String industryClass;
|
||||
|
||||
@Schema(description = "经营状态")
|
||||
private String status;
|
||||
|
||||
@Schema(description = "风险等级")
|
||||
private String riskLevel;
|
||||
|
||||
@Schema(description = "企业来源")
|
||||
private String entSource;
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
package com.ruoyi.info.collection.domain.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* 中介关联机构新增DTO
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "中介关联机构新增DTO")
|
||||
public class CcdiIntermediaryEnterpriseRelationAddDTO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "统一社会信用代码")
|
||||
@NotBlank(message = "统一社会信用代码不能为空")
|
||||
@Size(max = 18, message = "统一社会信用代码长度不能超过18个字符")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "关联角色/职务")
|
||||
@Size(max = 100, message = "关联角色/职务长度不能超过100个字符")
|
||||
private String relationPersonPost;
|
||||
|
||||
@Schema(description = "备注")
|
||||
@Size(max = 500, message = "备注长度不能超过500个字符")
|
||||
private String remark;
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
package com.ruoyi.info.collection.domain.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* 中介关联机构编辑DTO
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "中介关联机构编辑DTO")
|
||||
public class CcdiIntermediaryEnterpriseRelationEditDTO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "主键ID")
|
||||
@NotNull(message = "主键ID不能为空")
|
||||
private Long id;
|
||||
|
||||
@Schema(description = "统一社会信用代码")
|
||||
@NotBlank(message = "统一社会信用代码不能为空")
|
||||
@Size(max = 18, message = "统一社会信用代码长度不能超过18个字符")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "关联角色/职务")
|
||||
@Size(max = 100, message = "关联角色/职务长度不能超过100个字符")
|
||||
private String relationPersonPost;
|
||||
|
||||
@Schema(description = "备注")
|
||||
@Size(max = 500, message = "备注长度不能超过500个字符")
|
||||
private String remark;
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
package com.ruoyi.info.collection.domain.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* 中介亲属新增DTO
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "中介亲属新增DTO")
|
||||
public class CcdiIntermediaryRelativeAddDTO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "姓名")
|
||||
@NotBlank(message = "姓名不能为空")
|
||||
@Size(max = 100, message = "姓名长度不能超过100个字符")
|
||||
private String name;
|
||||
|
||||
@Schema(description = "人员类型")
|
||||
private String personType;
|
||||
|
||||
@Schema(description = "亲属关系")
|
||||
@NotBlank(message = "亲属关系不能为空")
|
||||
@Size(max = 50, message = "亲属关系长度不能超过50个字符")
|
||||
private String personSubType;
|
||||
|
||||
@Schema(description = "性别")
|
||||
private String gender;
|
||||
|
||||
@Schema(description = "证件类型")
|
||||
private String idType;
|
||||
|
||||
@Schema(description = "证件号码")
|
||||
@NotBlank(message = "证件号码不能为空")
|
||||
@Size(max = 50, message = "证件号码长度不能超过50个字符")
|
||||
private String personId;
|
||||
|
||||
@Schema(description = "手机号码")
|
||||
@Size(max = 20, message = "手机号码长度不能超过20个字符")
|
||||
private String mobile;
|
||||
|
||||
@Schema(description = "微信号")
|
||||
@Size(max = 50, message = "微信号长度不能超过50个字符")
|
||||
private String wechatNo;
|
||||
|
||||
@Schema(description = "联系地址")
|
||||
@Size(max = 200, message = "联系地址长度不能超过200个字符")
|
||||
private String contactAddress;
|
||||
|
||||
@Schema(description = "所在公司")
|
||||
@Size(max = 200, message = "所在公司长度不能超过200个字符")
|
||||
private String company;
|
||||
|
||||
@Schema(description = "企业统一信用码")
|
||||
@Size(max = 50, message = "企业统一信用码长度不能超过50个字符")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "职位")
|
||||
@Size(max = 100, message = "职位长度不能超过100个字符")
|
||||
private String position;
|
||||
|
||||
@Schema(description = "备注")
|
||||
@Size(max = 500, message = "备注长度不能超过500个字符")
|
||||
private String remark;
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
package com.ruoyi.info.collection.domain.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* 中介亲属编辑DTO
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "中介亲属编辑DTO")
|
||||
public class CcdiIntermediaryRelativeEditDTO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "人员ID")
|
||||
@NotBlank(message = "人员ID不能为空")
|
||||
private String bizId;
|
||||
|
||||
@Schema(description = "姓名")
|
||||
@NotBlank(message = "姓名不能为空")
|
||||
@Size(max = 100, message = "姓名长度不能超过100个字符")
|
||||
private String name;
|
||||
|
||||
@Schema(description = "人员类型")
|
||||
private String personType;
|
||||
|
||||
@Schema(description = "亲属关系")
|
||||
@NotBlank(message = "亲属关系不能为空")
|
||||
@Size(max = 50, message = "亲属关系长度不能超过50个字符")
|
||||
private String personSubType;
|
||||
|
||||
@Schema(description = "性别")
|
||||
private String gender;
|
||||
|
||||
@Schema(description = "证件类型")
|
||||
private String idType;
|
||||
|
||||
@Schema(description = "证件号码")
|
||||
@Size(max = 50, message = "证件号码长度不能超过50个字符")
|
||||
private String personId;
|
||||
|
||||
@Schema(description = "手机号码")
|
||||
@Size(max = 20, message = "手机号码长度不能超过20个字符")
|
||||
private String mobile;
|
||||
|
||||
@Schema(description = "微信号")
|
||||
@Size(max = 50, message = "微信号长度不能超过50个字符")
|
||||
private String wechatNo;
|
||||
|
||||
@Schema(description = "联系地址")
|
||||
@Size(max = 200, message = "联系地址长度不能超过200个字符")
|
||||
private String contactAddress;
|
||||
|
||||
@Schema(description = "所在公司")
|
||||
@Size(max = 200, message = "所在公司长度不能超过200个字符")
|
||||
private String company;
|
||||
|
||||
@Schema(description = "企业统一信用码")
|
||||
@Size(max = 50, message = "企业统一信用码长度不能超过50个字符")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "职位")
|
||||
@Size(max = 100, message = "职位长度不能超过100个字符")
|
||||
private String position;
|
||||
|
||||
@Schema(description = "备注")
|
||||
@Size(max = 500, message = "备注长度不能超过500个字符")
|
||||
private String remark;
|
||||
}
|
||||
@@ -63,8 +63,15 @@ public class CcdiBaseStaffExcel implements Serializable {
|
||||
@ColumnWidth(15)
|
||||
private Date hireDate;
|
||||
|
||||
/** 是否党员 */
|
||||
@ExcelProperty(value = "是否党员", index = 7)
|
||||
@ColumnWidth(12)
|
||||
@DictDropdown(dictType = "ccdi_yes_no_flag")
|
||||
@Required
|
||||
private Integer partyMember;
|
||||
|
||||
/** 状态 */
|
||||
@ExcelProperty(value = "状态", index = 7)
|
||||
@ExcelProperty(value = "状态", index = 8)
|
||||
@ColumnWidth(10)
|
||||
@DictDropdown(dictType = "ccdi_employee_status")
|
||||
@Required
|
||||
|
||||
@@ -0,0 +1,106 @@
|
||||
package com.ruoyi.info.collection.domain.excel;
|
||||
|
||||
import com.alibaba.excel.annotation.ExcelProperty;
|
||||
import com.alibaba.excel.annotation.write.style.ColumnWidth;
|
||||
import com.ruoyi.common.annotation.DictDropdown;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* 实体库管理 Excel 导入模板对象
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
@Data
|
||||
public class CcdiEnterpriseBaseInfoExcel implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@ExcelProperty(value = "统一社会信用代码*", index = 0)
|
||||
@ColumnWidth(24)
|
||||
private String socialCreditCode;
|
||||
|
||||
@ExcelProperty(value = "企业名称*", index = 1)
|
||||
@ColumnWidth(30)
|
||||
private String enterpriseName;
|
||||
|
||||
@ExcelProperty(value = "企业类型", index = 2)
|
||||
@ColumnWidth(18)
|
||||
@DictDropdown(dictType = "ccdi_entity_type")
|
||||
private String enterpriseType;
|
||||
|
||||
@ExcelProperty(value = "企业性质", index = 3)
|
||||
@ColumnWidth(18)
|
||||
@DictDropdown(dictType = "ccdi_enterprise_nature")
|
||||
private String enterpriseNature;
|
||||
|
||||
@ExcelProperty(value = "行业分类", index = 4)
|
||||
@ColumnWidth(18)
|
||||
private String industryClass;
|
||||
|
||||
@ExcelProperty(value = "所属行业", index = 5)
|
||||
@ColumnWidth(18)
|
||||
private String industryName;
|
||||
|
||||
@ExcelProperty(value = "成立日期", index = 6)
|
||||
@ColumnWidth(16)
|
||||
private Date establishDate;
|
||||
|
||||
@ExcelProperty(value = "注册地址", index = 7)
|
||||
@ColumnWidth(36)
|
||||
private String registerAddress;
|
||||
|
||||
@ExcelProperty(value = "法定代表人", index = 8)
|
||||
@ColumnWidth(18)
|
||||
private String legalRepresentative;
|
||||
|
||||
@ExcelProperty(value = "法定代表人证件类型", index = 9)
|
||||
@ColumnWidth(22)
|
||||
@DictDropdown(dictType = "ccdi_certificate_type")
|
||||
private String legalCertType;
|
||||
|
||||
@ExcelProperty(value = "法定代表人证件号码", index = 10)
|
||||
@ColumnWidth(24)
|
||||
private String legalCertNo;
|
||||
|
||||
@ExcelProperty(value = "股东1", index = 11)
|
||||
@ColumnWidth(18)
|
||||
private String shareholder1;
|
||||
|
||||
@ExcelProperty(value = "股东2", index = 12)
|
||||
@ColumnWidth(18)
|
||||
private String shareholder2;
|
||||
|
||||
@ExcelProperty(value = "股东3", index = 13)
|
||||
@ColumnWidth(18)
|
||||
private String shareholder3;
|
||||
|
||||
@ExcelProperty(value = "股东4", index = 14)
|
||||
@ColumnWidth(18)
|
||||
private String shareholder4;
|
||||
|
||||
@ExcelProperty(value = "股东5", index = 15)
|
||||
@ColumnWidth(18)
|
||||
private String shareholder5;
|
||||
|
||||
@ExcelProperty(value = "经营状态*", index = 16)
|
||||
@ColumnWidth(16)
|
||||
private String status;
|
||||
|
||||
@ExcelProperty(value = "风险等级*", index = 17)
|
||||
@ColumnWidth(16)
|
||||
private String riskLevel;
|
||||
|
||||
@ExcelProperty(value = "企业来源*", index = 18)
|
||||
@ColumnWidth(18)
|
||||
private String entSource;
|
||||
|
||||
@ExcelProperty(value = "数据来源*", index = 19)
|
||||
@ColumnWidth(18)
|
||||
private String dataSource;
|
||||
}
|
||||
@@ -44,6 +44,9 @@ public class CcdiBaseStaffVO implements Serializable {
|
||||
/** 入职时间 */
|
||||
private Date hireDate;
|
||||
|
||||
/** 是否党员:0-否 1-是 */
|
||||
private Integer partyMember;
|
||||
|
||||
/** 状态 */
|
||||
private String status;
|
||||
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
package com.ruoyi.info.collection.domain.vo;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* 实体库管理 VO
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "实体库管理VO")
|
||||
public class CcdiEnterpriseBaseInfoVO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "统一社会信用代码")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "企业名称")
|
||||
private String enterpriseName;
|
||||
|
||||
@Schema(description = "企业类型")
|
||||
private String enterpriseType;
|
||||
|
||||
@Schema(description = "企业性质")
|
||||
private String enterpriseNature;
|
||||
|
||||
@Schema(description = "行业分类")
|
||||
private String industryClass;
|
||||
|
||||
@Schema(description = "所属行业")
|
||||
private String industryName;
|
||||
|
||||
@Schema(description = "成立日期")
|
||||
private Date establishDate;
|
||||
|
||||
@Schema(description = "注册地址")
|
||||
private String registerAddress;
|
||||
|
||||
@Schema(description = "法定代表人")
|
||||
private String legalRepresentative;
|
||||
|
||||
@Schema(description = "法定代表人证件类型")
|
||||
private String legalCertType;
|
||||
|
||||
@Schema(description = "法定代表人证件号码")
|
||||
private String legalCertNo;
|
||||
|
||||
@Schema(description = "股东1")
|
||||
private String shareholder1;
|
||||
|
||||
@Schema(description = "股东2")
|
||||
private String shareholder2;
|
||||
|
||||
@Schema(description = "股东3")
|
||||
private String shareholder3;
|
||||
|
||||
@Schema(description = "股东4")
|
||||
private String shareholder4;
|
||||
|
||||
@Schema(description = "股东5")
|
||||
private String shareholder5;
|
||||
|
||||
@Schema(description = "经营状态")
|
||||
private String status;
|
||||
|
||||
@Schema(description = "风险等级")
|
||||
private String riskLevel;
|
||||
|
||||
@Schema(description = "企业来源")
|
||||
private String entSource;
|
||||
|
||||
@Schema(description = "数据来源")
|
||||
private String dataSource;
|
||||
|
||||
@Schema(description = "创建时间")
|
||||
private Date createTime;
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
package com.ruoyi.info.collection.domain.vo;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* 中介关联机构VO
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "中介关联机构VO")
|
||||
public class CcdiIntermediaryEnterpriseRelationVO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "主键ID")
|
||||
private Long id;
|
||||
|
||||
@Schema(description = "所属中介ID")
|
||||
private String intermediaryBizId;
|
||||
|
||||
@Schema(description = "所属中介姓名")
|
||||
private String intermediaryName;
|
||||
|
||||
@Schema(description = "所属中介证件号")
|
||||
private String intermediaryPersonId;
|
||||
|
||||
@Schema(description = "统一社会信用代码")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "机构名称")
|
||||
private String enterpriseName;
|
||||
|
||||
@Schema(description = "关联角色/职务")
|
||||
private String relationPersonPost;
|
||||
|
||||
@Schema(description = "备注")
|
||||
private String remark;
|
||||
|
||||
@Schema(description = "创建时间")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||
private Date createTime;
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
package com.ruoyi.info.collection.domain.vo;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* 中介亲属VO
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "中介亲属VO")
|
||||
public class CcdiIntermediaryRelativeVO implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Schema(description = "人员ID")
|
||||
private String bizId;
|
||||
|
||||
@Schema(description = "所属中介ID")
|
||||
private String relatedNumId;
|
||||
|
||||
@Schema(description = "姓名")
|
||||
private String name;
|
||||
|
||||
@Schema(description = "人员类型")
|
||||
private String personType;
|
||||
|
||||
@Schema(description = "亲属关系")
|
||||
private String personSubType;
|
||||
|
||||
@Schema(description = "性别")
|
||||
private String gender;
|
||||
|
||||
@Schema(description = "证件类型")
|
||||
private String idType;
|
||||
|
||||
@Schema(description = "证件号码")
|
||||
private String personId;
|
||||
|
||||
@Schema(description = "手机号码")
|
||||
private String mobile;
|
||||
|
||||
@Schema(description = "微信号")
|
||||
private String wechatNo;
|
||||
|
||||
@Schema(description = "联系地址")
|
||||
private String contactAddress;
|
||||
|
||||
@Schema(description = "所在公司")
|
||||
private String company;
|
||||
|
||||
@Schema(description = "企业统一信用码")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "职位")
|
||||
private String position;
|
||||
|
||||
@Schema(description = "备注")
|
||||
private String remark;
|
||||
|
||||
@Schema(description = "创建时间")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||
private Date createTime;
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
package com.ruoyi.info.collection.domain.vo;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 实体库导入失败记录 VO
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "实体库导入失败记录")
|
||||
public class EnterpriseBaseInfoImportFailureVO {
|
||||
|
||||
@Schema(description = "企业名称")
|
||||
private String enterpriseName;
|
||||
|
||||
@Schema(description = "统一社会信用代码")
|
||||
private String socialCreditCode;
|
||||
|
||||
@Schema(description = "企业类型")
|
||||
private String enterpriseType;
|
||||
|
||||
@Schema(description = "企业性质")
|
||||
private String enterpriseNature;
|
||||
|
||||
@Schema(description = "行业分类")
|
||||
private String industryClass;
|
||||
|
||||
@Schema(description = "所属行业")
|
||||
private String industryName;
|
||||
|
||||
@Schema(description = "法定代表人")
|
||||
private String legalRepresentative;
|
||||
|
||||
@Schema(description = "经营状态")
|
||||
private String status;
|
||||
|
||||
@Schema(description = "风险等级")
|
||||
private String riskLevel;
|
||||
|
||||
@Schema(description = "企业来源")
|
||||
private String entSource;
|
||||
|
||||
@Schema(description = "数据来源")
|
||||
private String dataSource;
|
||||
|
||||
@Schema(description = "错误信息")
|
||||
private String errorMessage;
|
||||
}
|
||||
@@ -32,6 +32,9 @@ public class ImportFailureVO {
|
||||
@Schema(description = "年收入")
|
||||
private BigDecimal annualIncome;
|
||||
|
||||
@Schema(description = "是否党员:0-否 1-是")
|
||||
private Integer partyMember;
|
||||
|
||||
@Schema(description = "状态")
|
||||
private String status;
|
||||
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
package com.ruoyi.info.collection.enums;
|
||||
|
||||
/**
|
||||
* 实体风险等级枚举
|
||||
*
|
||||
* @author ruoyi
|
||||
*/
|
||||
public enum EnterpriseRiskLevel {
|
||||
|
||||
HIGH("1", "高风险"),
|
||||
MEDIUM("2", "中风险"),
|
||||
LOW("3", "低风险");
|
||||
|
||||
private final String code;
|
||||
private final String desc;
|
||||
|
||||
EnterpriseRiskLevel(String code, String desc) {
|
||||
this.code = code;
|
||||
this.desc = desc;
|
||||
}
|
||||
|
||||
public String getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public String getDesc() {
|
||||
return desc;
|
||||
}
|
||||
|
||||
public static String getDescByCode(String code) {
|
||||
for (EnterpriseRiskLevel value : values()) {
|
||||
if (value.code.equals(code)) {
|
||||
return value.desc;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static boolean contains(String code) {
|
||||
for (EnterpriseRiskLevel value : values()) {
|
||||
if (value.code.equals(code)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static String resolveCode(String value) {
|
||||
for (EnterpriseRiskLevel item : values()) {
|
||||
if (item.code.equals(value) || item.desc.equals(value)) {
|
||||
return item.code;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
package com.ruoyi.info.collection.enums;
|
||||
|
||||
/**
|
||||
* 企业来源枚举
|
||||
*
|
||||
* @author ruoyi
|
||||
*/
|
||||
public enum EnterpriseSource {
|
||||
|
||||
GENERAL("GENERAL", "一般企业"),
|
||||
EMP_RELATION("EMP_RELATION", "员工关系人"),
|
||||
CREDIT_CUSTOMER("CREDIT_CUSTOMER", "信贷客户"),
|
||||
INTERMEDIARY("INTERMEDIARY", "中介"),
|
||||
BOTH("BOTH", "兼有");
|
||||
|
||||
private final String code;
|
||||
private final String desc;
|
||||
|
||||
EnterpriseSource(String code, String desc) {
|
||||
this.code = code;
|
||||
this.desc = desc;
|
||||
}
|
||||
|
||||
public String getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public String getDesc() {
|
||||
return desc;
|
||||
}
|
||||
|
||||
public static String getDescByCode(String code) {
|
||||
for (EnterpriseSource value : values()) {
|
||||
if (value.code.equals(code)) {
|
||||
return value.desc;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static boolean contains(String code) {
|
||||
for (EnterpriseSource value : values()) {
|
||||
if (value.code.equals(code)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static String resolveCode(String value) {
|
||||
for (EnterpriseSource item : values()) {
|
||||
if (item.code.equals(value) || item.desc.equals(value)) {
|
||||
return item.code;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
package com.ruoyi.info.collection.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.ruoyi.info.collection.domain.CcdiAccountResult;
|
||||
|
||||
/**
|
||||
* 账户分析结果数据层
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-13
|
||||
*/
|
||||
public interface CcdiAccountResultMapper extends BaseMapper<CcdiAccountResult> {
|
||||
}
|
||||
@@ -1,7 +1,10 @@
|
||||
package com.ruoyi.info.collection.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.ruoyi.info.collection.domain.CcdiEnterpriseBaseInfo;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoQueryDTO;
|
||||
import com.ruoyi.info.collection.domain.vo.CcdiEnterpriseBaseInfoVO;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
|
||||
@@ -16,6 +19,16 @@ import java.util.List;
|
||||
@Mapper
|
||||
public interface CcdiEnterpriseBaseInfoMapper extends BaseMapper<CcdiEnterpriseBaseInfo> {
|
||||
|
||||
/**
|
||||
* 分页查询实体库列表
|
||||
*
|
||||
* @param page 分页参数
|
||||
* @param queryDTO 查询条件
|
||||
* @return 分页结果
|
||||
*/
|
||||
Page<CcdiEnterpriseBaseInfoVO> selectEnterpriseBaseInfoPage(Page<CcdiEnterpriseBaseInfoVO> page,
|
||||
@Param("queryDTO") CcdiEnterpriseBaseInfoQueryDTO queryDTO);
|
||||
|
||||
/**
|
||||
* 批量插入实体中介
|
||||
*
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.ruoyi.info.collection.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.ruoyi.info.collection.domain.CcdiIntermediaryEnterpriseRelation;
|
||||
import com.ruoyi.info.collection.domain.vo.CcdiIntermediaryEnterpriseRelationVO;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 中介关联机构关系Mapper
|
||||
*/
|
||||
@Mapper
|
||||
public interface CcdiIntermediaryEnterpriseRelationMapper extends BaseMapper<CcdiIntermediaryEnterpriseRelation> {
|
||||
|
||||
List<CcdiIntermediaryEnterpriseRelationVO> selectByIntermediaryBizId(@Param("bizId") String bizId);
|
||||
|
||||
CcdiIntermediaryEnterpriseRelationVO selectDetailById(@Param("id") Long id);
|
||||
|
||||
boolean existsByIntermediaryBizIdAndSocialCreditCode(@Param("bizId") String bizId,
|
||||
@Param("socialCreditCode") String socialCreditCode);
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package com.ruoyi.info.collection.service;
|
||||
|
||||
import com.ruoyi.info.collection.domain.CcdiEnterpriseBaseInfo;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiEnterpriseBaseInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.EnterpriseBaseInfoImportFailureVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportStatusVO;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* 实体库管理导入 Service 接口
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
public interface ICcdiEnterpriseBaseInfoImportService {
|
||||
|
||||
void importEnterpriseBaseInfoAsync(List<CcdiEnterpriseBaseInfoExcel> excelList, String taskId, String userName);
|
||||
|
||||
ImportStatusVO getImportStatus(String taskId);
|
||||
|
||||
List<EnterpriseBaseInfoImportFailureVO> getImportFailures(String taskId);
|
||||
|
||||
CcdiEnterpriseBaseInfo validateAndBuildEntity(CcdiEnterpriseBaseInfoExcel excel,
|
||||
Set<String> existingCreditCodes,
|
||||
Set<String> processedCreditCodes,
|
||||
String userName);
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
package com.ruoyi.info.collection.service;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoAddDTO;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoEditDTO;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoQueryDTO;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiEnterpriseBaseInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.CcdiEnterpriseBaseInfoVO;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 实体库管理 Service 接口
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
public interface ICcdiEnterpriseBaseInfoService {
|
||||
|
||||
Page<CcdiEnterpriseBaseInfoVO> selectEnterpriseBaseInfoPage(Page<CcdiEnterpriseBaseInfoVO> page,
|
||||
CcdiEnterpriseBaseInfoQueryDTO queryDTO);
|
||||
|
||||
CcdiEnterpriseBaseInfoVO selectEnterpriseBaseInfoById(String socialCreditCode);
|
||||
|
||||
int insertEnterpriseBaseInfo(CcdiEnterpriseBaseInfoAddDTO addDTO);
|
||||
|
||||
int updateEnterpriseBaseInfo(CcdiEnterpriseBaseInfoEditDTO editDTO);
|
||||
|
||||
int deleteEnterpriseBaseInfoByIds(String[] socialCreditCodes);
|
||||
|
||||
List<CcdiEnterpriseBaseInfoExcel> selectEnterpriseBaseInfoListForExport(CcdiEnterpriseBaseInfoQueryDTO queryDTO);
|
||||
|
||||
String importEnterpriseBaseInfo(List<CcdiEnterpriseBaseInfoExcel> excelList);
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.ruoyi.common.utils.StringUtils;
|
||||
import com.ruoyi.info.collection.domain.CcdiAccountInfo;
|
||||
import com.ruoyi.info.collection.domain.CcdiAccountResult;
|
||||
import com.ruoyi.info.collection.domain.CcdiBaseStaff;
|
||||
import com.ruoyi.info.collection.domain.CcdiStaffFmyRelation;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiAccountInfoAddDTO;
|
||||
@@ -16,7 +15,6 @@ import com.ruoyi.info.collection.domain.vo.CcdiAccountInfoVO;
|
||||
import com.ruoyi.info.collection.domain.vo.CcdiAccountRelationOptionVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportResult;
|
||||
import com.ruoyi.info.collection.mapper.CcdiAccountInfoMapper;
|
||||
import com.ruoyi.info.collection.mapper.CcdiAccountResultMapper;
|
||||
import com.ruoyi.info.collection.mapper.CcdiBaseStaffMapper;
|
||||
import com.ruoyi.info.collection.mapper.CcdiStaffFmyRelationMapper;
|
||||
import com.ruoyi.info.collection.service.ICcdiAccountInfoService;
|
||||
@@ -56,9 +54,6 @@ public class CcdiAccountInfoServiceImpl implements ICcdiAccountInfoService {
|
||||
@Resource
|
||||
private CcdiAccountInfoMapper accountInfoMapper;
|
||||
|
||||
@Resource
|
||||
private CcdiAccountResultMapper accountResultMapper;
|
||||
|
||||
@Resource
|
||||
private CcdiBaseStaffMapper baseStaffMapper;
|
||||
|
||||
@@ -87,9 +82,8 @@ public class CcdiAccountInfoServiceImpl implements ICcdiAccountInfoService {
|
||||
|
||||
CcdiAccountInfo accountInfo = new CcdiAccountInfo();
|
||||
BeanUtils.copyProperties(addDTO, accountInfo);
|
||||
int result = accountInfoMapper.insert(accountInfo);
|
||||
syncAccountResult(accountInfo.getBankScope(), null, accountInfo.getAccountNo(), addDTO);
|
||||
return result;
|
||||
prepareAnalysisFields(accountInfo);
|
||||
return accountInfoMapper.insert(accountInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -110,26 +104,13 @@ public class CcdiAccountInfoServiceImpl implements ICcdiAccountInfoService {
|
||||
|
||||
CcdiAccountInfo accountInfo = new CcdiAccountInfo();
|
||||
BeanUtils.copyProperties(editDTO, accountInfo);
|
||||
int result = accountInfoMapper.updateById(accountInfo);
|
||||
syncAccountResult(accountInfo.getBankScope(), existing, accountInfo.getAccountNo(), editDTO);
|
||||
return result;
|
||||
prepareAnalysisFields(accountInfo);
|
||||
return accountInfoMapper.updateById(accountInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public int deleteAccountInfoByIds(Long[] ids) {
|
||||
List<CcdiAccountInfo> accountList = accountInfoMapper.selectBatchIds(Arrays.asList(ids));
|
||||
if (!accountList.isEmpty()) {
|
||||
List<String> accountNos = accountList.stream()
|
||||
.map(CcdiAccountInfo::getAccountNo)
|
||||
.filter(StringUtils::isNotEmpty)
|
||||
.toList();
|
||||
if (!accountNos.isEmpty()) {
|
||||
LambdaQueryWrapper<CcdiAccountResult> resultWrapper = new LambdaQueryWrapper<>();
|
||||
resultWrapper.in(CcdiAccountResult::getAccountNo, accountNos);
|
||||
accountResultMapper.delete(resultWrapper);
|
||||
}
|
||||
}
|
||||
return accountInfoMapper.deleteBatchIds(Arrays.asList(ids));
|
||||
}
|
||||
|
||||
@@ -250,51 +231,38 @@ public class CcdiAccountInfoServiceImpl implements ICcdiAccountInfoService {
|
||||
}
|
||||
}
|
||||
|
||||
private void syncAccountResult(String newBankScope, CcdiAccountInfo existing, String accountNo, Object dto) {
|
||||
String oldBankScope = existing == null ? null : existing.getBankScope();
|
||||
String oldAccountNo = existing == null ? null : existing.getAccountNo();
|
||||
|
||||
if (existing != null && "EXTERNAL".equals(oldBankScope)
|
||||
&& (!"EXTERNAL".equals(newBankScope) || !StringUtils.equals(oldAccountNo, accountNo))) {
|
||||
LambdaQueryWrapper<CcdiAccountResult> deleteWrapper = new LambdaQueryWrapper<>();
|
||||
deleteWrapper.eq(CcdiAccountResult::getAccountNo, oldAccountNo);
|
||||
accountResultMapper.delete(deleteWrapper);
|
||||
}
|
||||
|
||||
if (!"EXTERNAL".equals(newBankScope)) {
|
||||
private void prepareAnalysisFields(CcdiAccountInfo accountInfo) {
|
||||
if (!"EXTERNAL".equals(accountInfo.getBankScope())) {
|
||||
clearAnalysisFields(accountInfo);
|
||||
return;
|
||||
}
|
||||
if (accountInfo.getIsActualControl() == null) {
|
||||
accountInfo.setIsActualControl(1);
|
||||
}
|
||||
if (accountInfo.getAvgMonthTxnCount() == null) {
|
||||
accountInfo.setAvgMonthTxnCount(0);
|
||||
}
|
||||
if (accountInfo.getAvgMonthTxnAmount() == null) {
|
||||
accountInfo.setAvgMonthTxnAmount(BigDecimal.ZERO);
|
||||
}
|
||||
if (StringUtils.isEmpty(accountInfo.getTxnFrequencyLevel())) {
|
||||
accountInfo.setTxnFrequencyLevel("MEDIUM");
|
||||
}
|
||||
if (StringUtils.isEmpty(accountInfo.getTxnRiskLevel())) {
|
||||
accountInfo.setTxnRiskLevel("LOW");
|
||||
}
|
||||
}
|
||||
|
||||
LambdaQueryWrapper<CcdiAccountResult> wrapper = new LambdaQueryWrapper<>();
|
||||
wrapper.eq(CcdiAccountResult::getAccountNo, accountNo);
|
||||
CcdiAccountResult existingResult = accountResultMapper.selectOne(wrapper);
|
||||
|
||||
CcdiAccountResult accountResult = new CcdiAccountResult();
|
||||
BeanUtils.copyProperties(dto, accountResult);
|
||||
accountResult.setAccountNo(accountNo);
|
||||
if (accountResult.getIsActualControl() == null) {
|
||||
accountResult.setIsActualControl(1);
|
||||
}
|
||||
if (accountResult.getAvgMonthTxnCount() == null) {
|
||||
accountResult.setAvgMonthTxnCount(0);
|
||||
}
|
||||
if (accountResult.getAvgMonthTxnAmount() == null) {
|
||||
accountResult.setAvgMonthTxnAmount(BigDecimal.ZERO);
|
||||
}
|
||||
if (StringUtils.isEmpty(accountResult.getTxnFrequencyLevel())) {
|
||||
accountResult.setTxnFrequencyLevel("MEDIUM");
|
||||
}
|
||||
if (StringUtils.isEmpty(accountResult.getTxnRiskLevel())) {
|
||||
accountResult.setTxnRiskLevel("LOW");
|
||||
}
|
||||
|
||||
if (existingResult == null) {
|
||||
accountResultMapper.insert(accountResult);
|
||||
return;
|
||||
}
|
||||
|
||||
accountResult.setResultId(existingResult.getResultId());
|
||||
accountResultMapper.updateById(accountResult);
|
||||
private void clearAnalysisFields(CcdiAccountInfo accountInfo) {
|
||||
accountInfo.setIsActualControl(null);
|
||||
accountInfo.setAvgMonthTxnCount(null);
|
||||
accountInfo.setAvgMonthTxnAmount(null);
|
||||
accountInfo.setTxnFrequencyLevel(null);
|
||||
accountInfo.setDebitSingleMaxAmount(null);
|
||||
accountInfo.setCreditSingleMaxAmount(null);
|
||||
accountInfo.setDebitDailyMaxAmount(null);
|
||||
accountInfo.setCreditDailyMaxAmount(null);
|
||||
accountInfo.setTxnRiskLevel(null);
|
||||
}
|
||||
|
||||
private void validateAmount(BigDecimal amount, String fieldLabel) {
|
||||
|
||||
@@ -320,6 +320,9 @@ public class CcdiBaseStaffImportServiceImpl implements ICcdiBaseStaffImportServi
|
||||
if (StringUtils.isEmpty(addDTO.getPhone())) {
|
||||
throw new RuntimeException("电话不能为空");
|
||||
}
|
||||
if (addDTO.getPartyMember() == null) {
|
||||
throw new RuntimeException("是否党员不能为空");
|
||||
}
|
||||
if (StringUtils.isEmpty(addDTO.getStatus())) {
|
||||
throw new RuntimeException("状态不能为空");
|
||||
}
|
||||
@@ -357,6 +360,9 @@ public class CcdiBaseStaffImportServiceImpl implements ICcdiBaseStaffImportServi
|
||||
if (!"0".equals(addDTO.getStatus()) && !"1".equals(addDTO.getStatus())) {
|
||||
throw new RuntimeException("状态只能填写'在职'或'离职'");
|
||||
}
|
||||
if (addDTO.getPartyMember() != 0 && addDTO.getPartyMember() != 1) {
|
||||
throw new RuntimeException("是否党员只能填写'0'或'1'");
|
||||
}
|
||||
|
||||
validateAnnualIncome(addDTO.getAnnualIncome(), "年收入");
|
||||
}
|
||||
|
||||
@@ -112,7 +112,7 @@ public class CcdiBaseStaffServiceImpl implements ICcdiBaseStaffService {
|
||||
CcdiBaseStaff staff = baseStaffMapper.selectById(staffId);
|
||||
CcdiBaseStaffVO vo = convertToVO(staff);
|
||||
if (staff != null) {
|
||||
vo.setAssetInfoList(assetInfoService.selectByFamilyId(staff.getIdCard()).stream().map(asset -> {
|
||||
vo.setAssetInfoList(assetInfoService.selectByFamilyIdAndPersonId(staff.getIdCard(), staff.getIdCard()).stream().map(asset -> {
|
||||
CcdiAssetInfoVO assetInfoVO = new CcdiAssetInfoVO();
|
||||
BeanUtils.copyProperties(asset, assetInfoVO);
|
||||
return assetInfoVO;
|
||||
@@ -131,6 +131,7 @@ public class CcdiBaseStaffServiceImpl implements ICcdiBaseStaffService {
|
||||
@Transactional
|
||||
public int insertBaseStaff(CcdiBaseStaffAddDTO addDTO) {
|
||||
validateAnnualIncome(addDTO.getAnnualIncome(), "年收入");
|
||||
validatePartyMember(addDTO.getPartyMember(), "是否党员");
|
||||
// 检查员工ID唯一性
|
||||
if (baseStaffMapper.selectById(addDTO.getStaffId()) != null) {
|
||||
throw new RuntimeException("该员工ID已存在");
|
||||
@@ -161,6 +162,7 @@ public class CcdiBaseStaffServiceImpl implements ICcdiBaseStaffService {
|
||||
@Transactional
|
||||
public int updateBaseStaff(CcdiBaseStaffEditDTO editDTO) {
|
||||
validateAnnualIncome(editDTO.getAnnualIncome(), "年收入");
|
||||
validatePartyMember(editDTO.getPartyMember(), "是否党员");
|
||||
CcdiBaseStaff existing = baseStaffMapper.selectById(editDTO.getStaffId());
|
||||
if (existing == null) {
|
||||
throw new RuntimeException("员工不存在");
|
||||
@@ -291,4 +293,13 @@ public class CcdiBaseStaffServiceImpl implements ICcdiBaseStaffService {
|
||||
}
|
||||
}
|
||||
|
||||
private void validatePartyMember(Integer partyMember, String fieldLabel) {
|
||||
if (partyMember == null) {
|
||||
throw new RuntimeException(fieldLabel + "不能为空");
|
||||
}
|
||||
if (partyMember != 0 && partyMember != 1) {
|
||||
throw new RuntimeException(fieldLabel + "只能填写'0'或'1'");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,225 @@
|
||||
package com.ruoyi.info.collection.service.impl;
|
||||
|
||||
import com.alibaba.fastjson2.JSON;
|
||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||
import com.ruoyi.common.utils.StringUtils;
|
||||
import com.ruoyi.info.collection.domain.CcdiEnterpriseBaseInfo;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiEnterpriseBaseInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.EnterpriseBaseInfoImportFailureVO;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportResult;
|
||||
import com.ruoyi.info.collection.domain.vo.ImportStatusVO;
|
||||
import com.ruoyi.info.collection.enums.DataSource;
|
||||
import com.ruoyi.info.collection.enums.EnterpriseRiskLevel;
|
||||
import com.ruoyi.info.collection.enums.EnterpriseSource;
|
||||
import com.ruoyi.info.collection.mapper.CcdiEnterpriseBaseInfoMapper;
|
||||
import com.ruoyi.info.collection.service.ICcdiEnterpriseBaseInfoImportService;
|
||||
import jakarta.annotation.Resource;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.data.redis.core.RedisTemplate;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.scheduling.annotation.EnableAsync;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* 实体库管理导入 Service 实现
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
@Service
|
||||
@EnableAsync
|
||||
public class CcdiEnterpriseBaseInfoImportServiceImpl implements ICcdiEnterpriseBaseInfoImportService {
|
||||
|
||||
@Resource
|
||||
private CcdiEnterpriseBaseInfoMapper enterpriseBaseInfoMapper;
|
||||
|
||||
@Resource
|
||||
private RedisTemplate<String, Object> redisTemplate;
|
||||
|
||||
@Override
|
||||
@Async
|
||||
public void importEnterpriseBaseInfoAsync(List<CcdiEnterpriseBaseInfoExcel> excelList, String taskId, String userName) {
|
||||
List<CcdiEnterpriseBaseInfo> successRecords = new ArrayList<>();
|
||||
List<EnterpriseBaseInfoImportFailureVO> failures = new ArrayList<>();
|
||||
Set<String> existingCreditCodes = getExistingCreditCodes(excelList);
|
||||
Set<String> processedCreditCodes = new HashSet<>();
|
||||
|
||||
for (CcdiEnterpriseBaseInfoExcel excel : excelList) {
|
||||
try {
|
||||
CcdiEnterpriseBaseInfo entity = validateAndBuildEntity(excel, existingCreditCodes, processedCreditCodes, userName);
|
||||
successRecords.add(entity);
|
||||
processedCreditCodes.add(entity.getSocialCreditCode());
|
||||
} catch (Exception e) {
|
||||
EnterpriseBaseInfoImportFailureVO failureVO = new EnterpriseBaseInfoImportFailureVO();
|
||||
BeanUtils.copyProperties(excel, failureVO);
|
||||
failureVO.setErrorMessage(e.getMessage());
|
||||
failures.add(failureVO);
|
||||
}
|
||||
}
|
||||
|
||||
if (!successRecords.isEmpty()) {
|
||||
saveBatch(successRecords, 500);
|
||||
}
|
||||
|
||||
if (!failures.isEmpty()) {
|
||||
redisTemplate.opsForValue().set(buildFailuresKey(taskId), failures, 7, TimeUnit.DAYS);
|
||||
}
|
||||
|
||||
ImportResult result = new ImportResult();
|
||||
result.setTotalCount(excelList.size());
|
||||
result.setSuccessCount(successRecords.size());
|
||||
result.setFailureCount(failures.size());
|
||||
updateImportStatus(taskId, failures.isEmpty() ? "SUCCESS" : "PARTIAL_SUCCESS", result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImportStatusVO getImportStatus(String taskId) {
|
||||
String key = buildStatusKey(taskId);
|
||||
Boolean exists = redisTemplate.hasKey(key);
|
||||
if (Boolean.FALSE.equals(exists)) {
|
||||
throw new RuntimeException("任务不存在或已过期");
|
||||
}
|
||||
|
||||
Map<Object, Object> statusMap = redisTemplate.opsForHash().entries(key);
|
||||
ImportStatusVO statusVO = new ImportStatusVO();
|
||||
statusVO.setTaskId((String) statusMap.get("taskId"));
|
||||
statusVO.setStatus((String) statusMap.get("status"));
|
||||
statusVO.setTotalCount((Integer) statusMap.get("totalCount"));
|
||||
statusVO.setSuccessCount((Integer) statusMap.get("successCount"));
|
||||
statusVO.setFailureCount((Integer) statusMap.get("failureCount"));
|
||||
statusVO.setProgress((Integer) statusMap.get("progress"));
|
||||
statusVO.setStartTime((Long) statusMap.get("startTime"));
|
||||
statusVO.setEndTime((Long) statusMap.get("endTime"));
|
||||
statusVO.setMessage((String) statusMap.get("message"));
|
||||
return statusVO;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<EnterpriseBaseInfoImportFailureVO> getImportFailures(String taskId) {
|
||||
Object failuresObj = redisTemplate.opsForValue().get(buildFailuresKey(taskId));
|
||||
if (failuresObj == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return JSON.parseArray(JSON.toJSONString(failuresObj), EnterpriseBaseInfoImportFailureVO.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CcdiEnterpriseBaseInfo validateAndBuildEntity(CcdiEnterpriseBaseInfoExcel excel,
|
||||
Set<String> existingCreditCodes,
|
||||
Set<String> processedCreditCodes,
|
||||
String userName) {
|
||||
if (excel == null) {
|
||||
throw new RuntimeException("导入数据不能为空");
|
||||
}
|
||||
if (StringUtils.isEmpty(excel.getEnterpriseName())) {
|
||||
throw new RuntimeException("企业名称不能为空");
|
||||
}
|
||||
if (StringUtils.isEmpty(excel.getSocialCreditCode())) {
|
||||
throw new RuntimeException("统一社会信用代码不能为空");
|
||||
}
|
||||
if (!excel.getSocialCreditCode().matches("^[0-9A-HJ-NPQRTUWXY]{2}\\d{6}[0-9A-HJ-NPQRTUWXY]{10}$")) {
|
||||
throw new RuntimeException("统一社会信用代码格式不正确");
|
||||
}
|
||||
if (StringUtils.isEmpty(excel.getStatus())) {
|
||||
throw new RuntimeException("经营状态不能为空");
|
||||
}
|
||||
|
||||
String riskLevel = EnterpriseRiskLevel.resolveCode(StringUtils.trim(excel.getRiskLevel()));
|
||||
if (riskLevel == null) {
|
||||
throw new RuntimeException("风险等级不在允许范围内");
|
||||
}
|
||||
String entSource = EnterpriseSource.resolveCode(StringUtils.trim(excel.getEntSource()));
|
||||
if (entSource == null) {
|
||||
throw new RuntimeException("企业来源不在允许范围内");
|
||||
}
|
||||
String dataSource = resolveDataSourceCode(StringUtils.trim(excel.getDataSource()));
|
||||
if (dataSource == null) {
|
||||
throw new RuntimeException("数据来源不在允许范围内");
|
||||
}
|
||||
|
||||
if (existingCreditCodes.contains(excel.getSocialCreditCode())) {
|
||||
throw new RuntimeException(String.format("统一社会信用代码[%s]已存在,请勿重复导入", excel.getSocialCreditCode()));
|
||||
}
|
||||
if (processedCreditCodes.contains(excel.getSocialCreditCode())) {
|
||||
throw new RuntimeException(String.format("统一社会信用代码[%s]在导入文件中重复,已跳过此条记录", excel.getSocialCreditCode()));
|
||||
}
|
||||
|
||||
CcdiEnterpriseBaseInfo entity = new CcdiEnterpriseBaseInfo();
|
||||
BeanUtils.copyProperties(excel, entity);
|
||||
entity.setRiskLevel(riskLevel);
|
||||
entity.setEntSource(entSource);
|
||||
entity.setDataSource(dataSource);
|
||||
entity.setStatus(StringUtils.trim(excel.getStatus()));
|
||||
entity.setCreatedBy(userName);
|
||||
entity.setUpdatedBy(userName);
|
||||
return entity;
|
||||
}
|
||||
|
||||
private Set<String> getExistingCreditCodes(List<CcdiEnterpriseBaseInfoExcel> excelList) {
|
||||
List<String> creditCodes = excelList.stream()
|
||||
.map(CcdiEnterpriseBaseInfoExcel::getSocialCreditCode)
|
||||
.filter(StringUtils::isNotEmpty)
|
||||
.collect(Collectors.toList());
|
||||
if (creditCodes.isEmpty()) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
LambdaQueryWrapper<CcdiEnterpriseBaseInfo> wrapper = new LambdaQueryWrapper<>();
|
||||
wrapper.in(CcdiEnterpriseBaseInfo::getSocialCreditCode, creditCodes);
|
||||
return enterpriseBaseInfoMapper.selectList(wrapper).stream()
|
||||
.map(CcdiEnterpriseBaseInfo::getSocialCreditCode)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
private int saveBatch(List<CcdiEnterpriseBaseInfo> list, int batchSize) {
|
||||
int total = 0;
|
||||
for (int i = 0; i < list.size(); i += batchSize) {
|
||||
int end = Math.min(i + batchSize, list.size());
|
||||
total += enterpriseBaseInfoMapper.insertBatch(list.subList(i, end));
|
||||
}
|
||||
return total;
|
||||
}
|
||||
|
||||
private void updateImportStatus(String taskId, String status, ImportResult result) {
|
||||
Map<String, Object> statusData = new HashMap<>();
|
||||
statusData.put("status", status);
|
||||
statusData.put("successCount", result.getSuccessCount());
|
||||
statusData.put("failureCount", result.getFailureCount());
|
||||
statusData.put("progress", 100);
|
||||
statusData.put("endTime", System.currentTimeMillis());
|
||||
if ("SUCCESS".equals(status)) {
|
||||
statusData.put("message", "全部成功!共导入" + result.getTotalCount() + "条数据");
|
||||
} else {
|
||||
statusData.put("message", "成功" + result.getSuccessCount() + "条,失败" + result.getFailureCount() + "条");
|
||||
}
|
||||
redisTemplate.opsForHash().putAll(buildStatusKey(taskId), statusData);
|
||||
}
|
||||
|
||||
private String resolveDataSourceCode(String value) {
|
||||
for (DataSource source : DataSource.values()) {
|
||||
if (source.getCode().equals(value) || source.getDesc().equals(value)) {
|
||||
return source.getCode();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private String buildStatusKey(String taskId) {
|
||||
return "import:enterpriseBaseInfo:" + taskId;
|
||||
}
|
||||
|
||||
private String buildFailuresKey(String taskId) {
|
||||
return "import:enterpriseBaseInfo:" + taskId + ":failures";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,182 @@
|
||||
package com.ruoyi.info.collection.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.ruoyi.common.utils.SecurityUtils;
|
||||
import com.ruoyi.common.utils.StringUtils;
|
||||
import com.ruoyi.info.collection.domain.CcdiEnterpriseBaseInfo;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoAddDTO;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoEditDTO;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiEnterpriseBaseInfoQueryDTO;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiEnterpriseBaseInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.vo.CcdiEnterpriseBaseInfoVO;
|
||||
import com.ruoyi.info.collection.enums.DataSource;
|
||||
import com.ruoyi.info.collection.enums.EnterpriseRiskLevel;
|
||||
import com.ruoyi.info.collection.enums.EnterpriseSource;
|
||||
import com.ruoyi.info.collection.mapper.CcdiEnterpriseBaseInfoMapper;
|
||||
import com.ruoyi.info.collection.service.ICcdiEnterpriseBaseInfoImportService;
|
||||
import com.ruoyi.info.collection.service.ICcdiEnterpriseBaseInfoService;
|
||||
import jakarta.annotation.Resource;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.data.redis.core.RedisTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* 实体库管理 Service 实现
|
||||
*
|
||||
* @author ruoyi
|
||||
* @date 2026-04-17
|
||||
*/
|
||||
@Service
|
||||
public class CcdiEnterpriseBaseInfoServiceImpl implements ICcdiEnterpriseBaseInfoService {
|
||||
|
||||
@Resource
|
||||
private CcdiEnterpriseBaseInfoMapper enterpriseBaseInfoMapper;
|
||||
|
||||
@Resource
|
||||
private ICcdiEnterpriseBaseInfoImportService enterpriseBaseInfoImportService;
|
||||
|
||||
@Resource
|
||||
private RedisTemplate<String, Object> redisTemplate;
|
||||
|
||||
@Override
|
||||
public Page<CcdiEnterpriseBaseInfoVO> selectEnterpriseBaseInfoPage(Page<CcdiEnterpriseBaseInfoVO> page,
|
||||
CcdiEnterpriseBaseInfoQueryDTO queryDTO) {
|
||||
return enterpriseBaseInfoMapper.selectEnterpriseBaseInfoPage(page, queryDTO);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CcdiEnterpriseBaseInfoVO selectEnterpriseBaseInfoById(String socialCreditCode) {
|
||||
CcdiEnterpriseBaseInfo entity = enterpriseBaseInfoMapper.selectById(socialCreditCode);
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
CcdiEnterpriseBaseInfoVO vo = new CcdiEnterpriseBaseInfoVO();
|
||||
BeanUtils.copyProperties(entity, vo);
|
||||
return vo;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public int insertEnterpriseBaseInfo(CcdiEnterpriseBaseInfoAddDTO addDTO) {
|
||||
if (enterpriseBaseInfoMapper.selectById(addDTO.getSocialCreditCode()) != null) {
|
||||
throw new RuntimeException("该统一社会信用代码已存在");
|
||||
}
|
||||
validateEnumFields(addDTO.getStatus(), addDTO.getRiskLevel(), addDTO.getEntSource(), addDTO.getDataSource());
|
||||
|
||||
CcdiEnterpriseBaseInfo entity = new CcdiEnterpriseBaseInfo();
|
||||
BeanUtils.copyProperties(addDTO, entity);
|
||||
return enterpriseBaseInfoMapper.insert(entity);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public int updateEnterpriseBaseInfo(CcdiEnterpriseBaseInfoEditDTO editDTO) {
|
||||
CcdiEnterpriseBaseInfo existing = enterpriseBaseInfoMapper.selectById(editDTO.getSocialCreditCode());
|
||||
if (existing == null) {
|
||||
throw new RuntimeException("实体库记录不存在");
|
||||
}
|
||||
validateEnumFields(editDTO.getStatus(), editDTO.getRiskLevel(), editDTO.getEntSource(), editDTO.getDataSource());
|
||||
|
||||
CcdiEnterpriseBaseInfo entity = new CcdiEnterpriseBaseInfo();
|
||||
BeanUtils.copyProperties(editDTO, entity);
|
||||
return enterpriseBaseInfoMapper.updateById(entity);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public int deleteEnterpriseBaseInfoByIds(String[] socialCreditCodes) {
|
||||
if (socialCreditCodes == null || socialCreditCodes.length == 0) {
|
||||
return 0;
|
||||
}
|
||||
return enterpriseBaseInfoMapper.deleteBatchIds(List.of(socialCreditCodes));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CcdiEnterpriseBaseInfoExcel> selectEnterpriseBaseInfoListForExport(CcdiEnterpriseBaseInfoQueryDTO queryDTO) {
|
||||
LambdaQueryWrapper<CcdiEnterpriseBaseInfo> wrapper = buildQueryWrapper(queryDTO);
|
||||
return enterpriseBaseInfoMapper.selectList(wrapper).stream().map(entity -> {
|
||||
CcdiEnterpriseBaseInfoExcel excel = new CcdiEnterpriseBaseInfoExcel();
|
||||
BeanUtils.copyProperties(entity, excel);
|
||||
return excel;
|
||||
}).toList();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public String importEnterpriseBaseInfo(List<CcdiEnterpriseBaseInfoExcel> excelList) {
|
||||
String taskId = UUID.randomUUID().toString();
|
||||
String statusKey = "import:enterpriseBaseInfo:" + taskId;
|
||||
|
||||
Map<String, Object> statusData = new HashMap<>();
|
||||
statusData.put("taskId", taskId);
|
||||
statusData.put("status", "PROCESSING");
|
||||
statusData.put("totalCount", excelList.size());
|
||||
statusData.put("successCount", 0);
|
||||
statusData.put("failureCount", 0);
|
||||
statusData.put("progress", 0);
|
||||
statusData.put("startTime", System.currentTimeMillis());
|
||||
statusData.put("message", "正在处理...");
|
||||
|
||||
redisTemplate.opsForHash().putAll(statusKey, statusData);
|
||||
redisTemplate.expire(statusKey, 7, TimeUnit.DAYS);
|
||||
|
||||
enterpriseBaseInfoImportService.importEnterpriseBaseInfoAsync(excelList, taskId, SecurityUtils.getUsername());
|
||||
return taskId;
|
||||
}
|
||||
|
||||
private LambdaQueryWrapper<CcdiEnterpriseBaseInfo> buildQueryWrapper(CcdiEnterpriseBaseInfoQueryDTO queryDTO) {
|
||||
LambdaQueryWrapper<CcdiEnterpriseBaseInfo> wrapper = new LambdaQueryWrapper<>();
|
||||
if (queryDTO == null) {
|
||||
return wrapper.orderByDesc(CcdiEnterpriseBaseInfo::getCreateTime);
|
||||
}
|
||||
wrapper.like(StringUtils.isNotEmpty(queryDTO.getEnterpriseName()),
|
||||
CcdiEnterpriseBaseInfo::getEnterpriseName, queryDTO.getEnterpriseName());
|
||||
wrapper.eq(StringUtils.isNotEmpty(queryDTO.getSocialCreditCode()),
|
||||
CcdiEnterpriseBaseInfo::getSocialCreditCode, queryDTO.getSocialCreditCode());
|
||||
wrapper.eq(StringUtils.isNotEmpty(queryDTO.getEnterpriseType()),
|
||||
CcdiEnterpriseBaseInfo::getEnterpriseType, queryDTO.getEnterpriseType());
|
||||
wrapper.eq(StringUtils.isNotEmpty(queryDTO.getEnterpriseNature()),
|
||||
CcdiEnterpriseBaseInfo::getEnterpriseNature, queryDTO.getEnterpriseNature());
|
||||
wrapper.like(StringUtils.isNotEmpty(queryDTO.getIndustryClass()),
|
||||
CcdiEnterpriseBaseInfo::getIndustryClass, queryDTO.getIndustryClass());
|
||||
wrapper.eq(StringUtils.isNotEmpty(queryDTO.getStatus()),
|
||||
CcdiEnterpriseBaseInfo::getStatus, queryDTO.getStatus());
|
||||
wrapper.eq(StringUtils.isNotEmpty(queryDTO.getRiskLevel()),
|
||||
CcdiEnterpriseBaseInfo::getRiskLevel, queryDTO.getRiskLevel());
|
||||
wrapper.eq(StringUtils.isNotEmpty(queryDTO.getEntSource()),
|
||||
CcdiEnterpriseBaseInfo::getEntSource, queryDTO.getEntSource());
|
||||
return wrapper.orderByDesc(CcdiEnterpriseBaseInfo::getCreateTime);
|
||||
}
|
||||
|
||||
private void validateEnumFields(String status, String riskLevel, String entSource, String dataSource) {
|
||||
if (StringUtils.isEmpty(status)) {
|
||||
throw new RuntimeException("经营状态不能为空");
|
||||
}
|
||||
if (!EnterpriseRiskLevel.contains(riskLevel)) {
|
||||
throw new RuntimeException("风险等级不在允许范围内");
|
||||
}
|
||||
if (!EnterpriseSource.contains(entSource)) {
|
||||
throw new RuntimeException("企业来源不在允许范围内");
|
||||
}
|
||||
if (!containsDataSource(dataSource)) {
|
||||
throw new RuntimeException("数据来源不在允许范围内");
|
||||
}
|
||||
}
|
||||
|
||||
private boolean containsDataSource(String code) {
|
||||
for (DataSource source : DataSource.values()) {
|
||||
if (source.getCode().equals(code)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -67,15 +67,15 @@
|
||||
ai.status AS status,
|
||||
ai.effective_date AS effectiveDate,
|
||||
ai.invalid_date AS invalidDate,
|
||||
ar.is_self_account AS isActualControl,
|
||||
ar.monthly_avg_trans_count AS avgMonthTxnCount,
|
||||
ar.monthly_avg_trans_amount AS avgMonthTxnAmount,
|
||||
ar.trans_freq_type AS txnFrequencyLevel,
|
||||
ar.dr_max_single_amount AS debitSingleMaxAmount,
|
||||
ar.cr_max_single_amount AS creditSingleMaxAmount,
|
||||
ar.dr_max_daily_amount AS debitDailyMaxAmount,
|
||||
ar.cr_max_daily_amount AS creditDailyMaxAmount,
|
||||
ar.trans_risk_level AS txnRiskLevel,
|
||||
ai.is_self_account AS isActualControl,
|
||||
ai.monthly_avg_trans_count AS avgMonthTxnCount,
|
||||
ai.monthly_avg_trans_amount AS avgMonthTxnAmount,
|
||||
ai.trans_freq_type AS txnFrequencyLevel,
|
||||
ai.dr_max_single_amount AS debitSingleMaxAmount,
|
||||
ai.cr_max_single_amount AS creditSingleMaxAmount,
|
||||
ai.dr_max_daily_amount AS debitDailyMaxAmount,
|
||||
ai.cr_max_daily_amount AS creditDailyMaxAmount,
|
||||
ai.trans_risk_level AS txnRiskLevel,
|
||||
ai.create_by AS createBy,
|
||||
ai.create_time AS createTime,
|
||||
ai.update_by AS updateBy,
|
||||
@@ -107,10 +107,10 @@
|
||||
AND ai.account_type = #{query.accountType}
|
||||
</if>
|
||||
<if test="query.isActualControl != null">
|
||||
AND ar.is_self_account = #{query.isActualControl}
|
||||
AND ai.is_self_account = #{query.isActualControl}
|
||||
</if>
|
||||
<if test="query.riskLevel != null and query.riskLevel != ''">
|
||||
AND ar.trans_risk_level = #{query.riskLevel}
|
||||
AND ai.trans_risk_level = #{query.riskLevel}
|
||||
</if>
|
||||
<if test="query.status != null">
|
||||
AND ai.status = #{query.status}
|
||||
@@ -121,7 +121,6 @@
|
||||
SELECT
|
||||
<include refid="AccountInfoSelectColumns"/>
|
||||
FROM ccdi_account_info ai
|
||||
LEFT JOIN ccdi_account_result ar ON ai.account_no = ar.account_no
|
||||
LEFT JOIN ccdi_base_staff bs ON ai.owner_type = 'EMPLOYEE' AND ai.owner_id = bs.id_card
|
||||
LEFT JOIN ccdi_staff_fmy_relation fr ON ai.owner_type = 'RELATION' AND ai.owner_id = fr.relation_cert_no
|
||||
LEFT JOIN ccdi_base_staff bsRel ON fr.person_id = bsRel.id_card
|
||||
@@ -133,7 +132,6 @@
|
||||
SELECT
|
||||
<include refid="AccountInfoSelectColumns"/>
|
||||
FROM ccdi_account_info ai
|
||||
LEFT JOIN ccdi_account_result ar ON ai.account_no = ar.account_no
|
||||
LEFT JOIN ccdi_base_staff bs ON ai.owner_type = 'EMPLOYEE' AND ai.owner_id = bs.id_card
|
||||
LEFT JOIN ccdi_staff_fmy_relation fr ON ai.owner_type = 'RELATION' AND ai.owner_id = fr.relation_cert_no
|
||||
LEFT JOIN ccdi_base_staff bsRel ON fr.person_id = bsRel.id_card
|
||||
@@ -145,7 +143,6 @@
|
||||
SELECT
|
||||
<include refid="AccountInfoSelectColumns"/>
|
||||
FROM ccdi_account_info ai
|
||||
LEFT JOIN ccdi_account_result ar ON ai.account_no = ar.account_no
|
||||
LEFT JOIN ccdi_base_staff bs ON ai.owner_type = 'EMPLOYEE' AND ai.owner_id = bs.id_card
|
||||
LEFT JOIN ccdi_staff_fmy_relation fr ON ai.owner_type = 'RELATION' AND ai.owner_id = fr.relation_cert_no
|
||||
LEFT JOIN ccdi_base_staff bsRel ON fr.person_id = bsRel.id_card
|
||||
|
||||
@@ -14,13 +14,14 @@
|
||||
<result property="phone" column="phone"/>
|
||||
<result property="annualIncome" column="annual_income"/>
|
||||
<result property="hireDate" column="hire_date"/>
|
||||
<result property="partyMember" column="is_party_member"/>
|
||||
<result property="status" column="status"/>
|
||||
<result property="createTime" column="create_time"/>
|
||||
</resultMap>
|
||||
|
||||
<select id="selectBaseStaffPageWithDept" resultMap="CcdiBaseStaffVOResult">
|
||||
SELECT
|
||||
e.staff_id, e.name, e.dept_id, e.id_card, e.phone, e.annual_income, e.hire_date, e.status, e.create_time,
|
||||
e.staff_id, e.name, e.dept_id, e.id_card, e.phone, e.annual_income, e.hire_date, e.is_party_member, e.status, e.create_time,
|
||||
d.dept_name
|
||||
FROM ccdi_base_staff e
|
||||
LEFT JOIN sys_dept d ON e.dept_id = d.dept_id
|
||||
@@ -47,12 +48,12 @@
|
||||
<!-- 批量插入或更新员工信息(只更新非null字段) -->
|
||||
<insert id="insertOrUpdateBatch" parameterType="java.util.List">
|
||||
INSERT INTO ccdi_base_staff
|
||||
(staff_id, name, dept_id, id_card, phone, annual_income, hire_date, status,
|
||||
(staff_id, name, dept_id, id_card, phone, annual_income, hire_date, is_party_member, status,
|
||||
create_time, create_by, update_by, update_time)
|
||||
VALUES
|
||||
<foreach collection="list" item="item" separator=",">
|
||||
(#{item.staffId}, #{item.name}, #{item.deptId}, #{item.idCard},
|
||||
#{item.phone}, #{item.annualIncome}, #{item.hireDate}, #{item.status}, NOW(),
|
||||
#{item.phone}, #{item.annualIncome}, #{item.hireDate}, #{item.partyMember}, #{item.status}, NOW(),
|
||||
#{item.createBy}, #{item.updateBy}, NOW())
|
||||
</foreach>
|
||||
ON DUPLICATE KEY UPDATE
|
||||
@@ -61,6 +62,7 @@
|
||||
phone = COALESCE(VALUES(phone), phone),
|
||||
annual_income = COALESCE(VALUES(annual_income), annual_income),
|
||||
hire_date = COALESCE(VALUES(hire_date), hire_date),
|
||||
is_party_member = COALESCE(VALUES(is_party_member), is_party_member),
|
||||
status = COALESCE(VALUES(status), status),
|
||||
update_by = COALESCE(VALUES(update_by), update_by),
|
||||
update_time = NOW()
|
||||
@@ -69,12 +71,12 @@
|
||||
<!-- 批量插入员工信息 -->
|
||||
<insert id="insertBatch" parameterType="java.util.List">
|
||||
INSERT INTO ccdi_base_staff
|
||||
(staff_id, name, dept_id, id_card, phone, annual_income, hire_date, status,
|
||||
(staff_id, name, dept_id, id_card, phone, annual_income, hire_date, is_party_member, status,
|
||||
create_time, create_by, update_by, update_time)
|
||||
VALUES
|
||||
<foreach collection="list" item="item" separator=",">
|
||||
(#{item.staffId}, #{item.name}, #{item.deptId}, #{item.idCard},
|
||||
#{item.phone}, #{item.annualIncome}, #{item.hireDate}, #{item.status}, NOW(),
|
||||
#{item.phone}, #{item.annualIncome}, #{item.hireDate}, #{item.partyMember}, #{item.status}, NOW(),
|
||||
#{item.createBy}, #{item.updateBy}, NOW())
|
||||
</foreach>
|
||||
</insert>
|
||||
|
||||
@@ -4,6 +4,83 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
|
||||
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
<mapper namespace="com.ruoyi.info.collection.mapper.CcdiEnterpriseBaseInfoMapper">
|
||||
|
||||
<resultMap id="CcdiEnterpriseBaseInfoVoResultMap" type="com.ruoyi.info.collection.domain.vo.CcdiEnterpriseBaseInfoVO">
|
||||
<id property="socialCreditCode" column="social_credit_code"/>
|
||||
<result property="enterpriseName" column="enterprise_name"/>
|
||||
<result property="enterpriseType" column="enterprise_type"/>
|
||||
<result property="enterpriseNature" column="enterprise_nature"/>
|
||||
<result property="industryClass" column="industry_class"/>
|
||||
<result property="industryName" column="industry_name"/>
|
||||
<result property="establishDate" column="establish_date"/>
|
||||
<result property="registerAddress" column="register_address"/>
|
||||
<result property="legalRepresentative" column="legal_representative"/>
|
||||
<result property="legalCertType" column="legal_cert_type"/>
|
||||
<result property="legalCertNo" column="legal_cert_no"/>
|
||||
<result property="shareholder1" column="shareholder1"/>
|
||||
<result property="shareholder2" column="shareholder2"/>
|
||||
<result property="shareholder3" column="shareholder3"/>
|
||||
<result property="shareholder4" column="shareholder4"/>
|
||||
<result property="shareholder5" column="shareholder5"/>
|
||||
<result property="status" column="status"/>
|
||||
<result property="riskLevel" column="risk_level"/>
|
||||
<result property="entSource" column="ent_source"/>
|
||||
<result property="dataSource" column="data_source"/>
|
||||
<result property="createTime" column="create_time"/>
|
||||
</resultMap>
|
||||
|
||||
<select id="selectEnterpriseBaseInfoPage" resultMap="CcdiEnterpriseBaseInfoVoResultMap">
|
||||
SELECT
|
||||
social_credit_code,
|
||||
enterprise_name,
|
||||
enterprise_type,
|
||||
enterprise_nature,
|
||||
industry_class,
|
||||
industry_name,
|
||||
establish_date,
|
||||
register_address,
|
||||
legal_representative,
|
||||
legal_cert_type,
|
||||
legal_cert_no,
|
||||
shareholder1,
|
||||
shareholder2,
|
||||
shareholder3,
|
||||
shareholder4,
|
||||
shareholder5,
|
||||
status,
|
||||
risk_level,
|
||||
ent_source,
|
||||
data_source,
|
||||
create_time
|
||||
FROM ccdi_enterprise_base_info
|
||||
<where>
|
||||
<if test="queryDTO != null and queryDTO.enterpriseName != null and queryDTO.enterpriseName != ''">
|
||||
AND enterprise_name LIKE CONCAT('%', #{queryDTO.enterpriseName}, '%')
|
||||
</if>
|
||||
<if test="queryDTO != null and queryDTO.socialCreditCode != null and queryDTO.socialCreditCode != ''">
|
||||
AND social_credit_code = #{queryDTO.socialCreditCode}
|
||||
</if>
|
||||
<if test="queryDTO != null and queryDTO.enterpriseType != null and queryDTO.enterpriseType != ''">
|
||||
AND enterprise_type = #{queryDTO.enterpriseType}
|
||||
</if>
|
||||
<if test="queryDTO != null and queryDTO.enterpriseNature != null and queryDTO.enterpriseNature != ''">
|
||||
AND enterprise_nature = #{queryDTO.enterpriseNature}
|
||||
</if>
|
||||
<if test="queryDTO != null and queryDTO.industryClass != null and queryDTO.industryClass != ''">
|
||||
AND industry_class LIKE CONCAT('%', #{queryDTO.industryClass}, '%')
|
||||
</if>
|
||||
<if test="queryDTO != null and queryDTO.status != null and queryDTO.status != ''">
|
||||
AND status = #{queryDTO.status}
|
||||
</if>
|
||||
<if test="queryDTO != null and queryDTO.riskLevel != null and queryDTO.riskLevel != ''">
|
||||
AND risk_level = #{queryDTO.riskLevel}
|
||||
</if>
|
||||
<if test="queryDTO != null and queryDTO.entSource != null and queryDTO.entSource != ''">
|
||||
AND ent_source = #{queryDTO.entSource}
|
||||
</if>
|
||||
</where>
|
||||
ORDER BY create_time DESC
|
||||
</select>
|
||||
|
||||
<!-- 批量插入实体中介 -->
|
||||
<insert id="insertBatch" parameterType="java.util.List">
|
||||
INSERT INTO ccdi_enterprise_base_info (
|
||||
@@ -21,7 +98,7 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
|
||||
#{item.legalRepresentative}, #{item.legalCertType}, #{item.legalCertNo},
|
||||
#{item.shareholder1}, #{item.shareholder2}, #{item.shareholder3}, #{item.shareholder4}, #{item.shareholder5},
|
||||
#{item.status}, #{item.riskLevel}, #{item.entSource}, #{item.dataSource},
|
||||
#{item.createdBy}, #{item.updatedBy}, #{item.createTime}, #{item.updateTime}
|
||||
#{item.createdBy}, #{item.updatedBy}, NOW(), NOW()
|
||||
)
|
||||
</foreach>
|
||||
</insert>
|
||||
@@ -43,7 +120,7 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
|
||||
#{item.legalRepresentative}, #{item.legalCertType}, #{item.legalCertNo},
|
||||
#{item.shareholder1}, #{item.shareholder2}, #{item.shareholder3}, #{item.shareholder4}, #{item.shareholder5},
|
||||
#{item.status}, #{item.riskLevel}, #{item.entSource}, #{item.dataSource},
|
||||
#{item.createdBy}, #{item.updatedBy}, #{item.createTime}, #{item.updateTime}
|
||||
#{item.createdBy}, #{item.updatedBy}, NOW(), NOW()
|
||||
)
|
||||
</foreach>
|
||||
ON DUPLICATE KEY UPDATE
|
||||
@@ -67,7 +144,7 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
|
||||
ent_source = VALUES(ent_source),
|
||||
data_source = VALUES(data_source),
|
||||
updated_by = VALUES(updated_by),
|
||||
update_time = VALUES(update_time)
|
||||
update_time = NOW()
|
||||
</insert>
|
||||
|
||||
<!-- 批量更新实体中介 -->
|
||||
@@ -95,7 +172,7 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
|
||||
<if test="item.entSource != null">ent_source = #{item.entSource},</if>
|
||||
<if test="item.dataSource != null">data_source = #{item.dataSource},</if>
|
||||
<if test="item.updatedBy != null">updated_by = #{item.updatedBy},</if>
|
||||
update_time = #{item.updateTime}
|
||||
update_time = NOW()
|
||||
</set>
|
||||
WHERE social_credit_code = #{item.socialCreditCode}
|
||||
</foreach>
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!DOCTYPE mapper
|
||||
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
|
||||
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
<mapper namespace="com.ruoyi.info.collection.mapper.CcdiIntermediaryEnterpriseRelationMapper">
|
||||
|
||||
<resultMap id="CcdiIntermediaryEnterpriseRelationVOResult"
|
||||
type="com.ruoyi.info.collection.domain.vo.CcdiIntermediaryEnterpriseRelationVO">
|
||||
<id property="id" column="id"/>
|
||||
<result property="intermediaryBizId" column="intermediary_biz_id"/>
|
||||
<result property="intermediaryName" column="intermediary_name"/>
|
||||
<result property="intermediaryPersonId" column="intermediary_person_id"/>
|
||||
<result property="socialCreditCode" column="social_credit_code"/>
|
||||
<result property="enterpriseName" column="enterprise_name"/>
|
||||
<result property="relationPersonPost" column="relation_person_post"/>
|
||||
<result property="remark" column="remark"/>
|
||||
<result property="createTime" column="create_time"/>
|
||||
</resultMap>
|
||||
|
||||
<select id="selectByIntermediaryBizId" resultMap="CcdiIntermediaryEnterpriseRelationVOResult">
|
||||
SELECT
|
||||
rel.id,
|
||||
rel.intermediary_biz_id,
|
||||
parent.name AS intermediary_name,
|
||||
parent.person_id AS intermediary_person_id,
|
||||
rel.social_credit_code,
|
||||
ent.enterprise_name,
|
||||
rel.relation_person_post,
|
||||
rel.remark,
|
||||
rel.create_time
|
||||
FROM ccdi_intermediary_enterprise_relation rel
|
||||
INNER JOIN ccdi_biz_intermediary parent
|
||||
ON rel.intermediary_biz_id = parent.biz_id
|
||||
LEFT JOIN ccdi_enterprise_base_info ent
|
||||
ON rel.social_credit_code = ent.social_credit_code
|
||||
WHERE rel.intermediary_biz_id = #{bizId}
|
||||
ORDER BY rel.create_time DESC
|
||||
</select>
|
||||
|
||||
<select id="selectDetailById" resultMap="CcdiIntermediaryEnterpriseRelationVOResult">
|
||||
SELECT
|
||||
rel.id,
|
||||
rel.intermediary_biz_id,
|
||||
parent.name AS intermediary_name,
|
||||
parent.person_id AS intermediary_person_id,
|
||||
rel.social_credit_code,
|
||||
ent.enterprise_name,
|
||||
rel.relation_person_post,
|
||||
rel.remark,
|
||||
rel.create_time
|
||||
FROM ccdi_intermediary_enterprise_relation rel
|
||||
INNER JOIN ccdi_biz_intermediary parent
|
||||
ON rel.intermediary_biz_id = parent.biz_id
|
||||
LEFT JOIN ccdi_enterprise_base_info ent
|
||||
ON rel.social_credit_code = ent.social_credit_code
|
||||
WHERE rel.id = #{id}
|
||||
</select>
|
||||
|
||||
<select id="existsByIntermediaryBizIdAndSocialCreditCode" resultType="boolean">
|
||||
SELECT COUNT(1) > 0
|
||||
FROM ccdi_intermediary_enterprise_relation
|
||||
WHERE intermediary_biz_id = #{bizId}
|
||||
AND social_credit_code = #{socialCreditCode}
|
||||
</select>
|
||||
|
||||
</mapper>
|
||||
@@ -0,0 +1,109 @@
|
||||
package com.ruoyi.info.collection.mapper;
|
||||
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiAccountInfoQueryDTO;
|
||||
import org.apache.ibatis.builder.xml.XMLMapperBuilder;
|
||||
import org.apache.ibatis.mapping.BoundSql;
|
||||
import org.apache.ibatis.mapping.Environment;
|
||||
import org.apache.ibatis.mapping.MappedStatement;
|
||||
import org.apache.ibatis.scripting.xmltags.XMLLanguageDriver;
|
||||
import org.apache.ibatis.session.Configuration;
|
||||
import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;
|
||||
import org.apache.ibatis.type.TypeAliasRegistry;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class CcdiAccountInfoMapperTest {
|
||||
|
||||
private static final String RESOURCE = "mapper/info/collection/CcdiAccountInfoMapper.xml";
|
||||
|
||||
@Test
|
||||
void selectAccountInfoPage_shouldReadAnalysisColumnsFromAccountInfoTableOnly() throws Exception {
|
||||
MappedStatement mappedStatement = loadMappedStatement(
|
||||
"com.ruoyi.info.collection.mapper.CcdiAccountInfoMapper.selectAccountInfoPage");
|
||||
|
||||
String sql = renderSql(mappedStatement, Map.of("query", new CcdiAccountInfoQueryDTO())).toLowerCase();
|
||||
|
||||
assertTrue(sql.contains("from ccdi_account_info ai"), sql);
|
||||
assertFalse(sql.contains("ccdi_account_result"), sql);
|
||||
assertTrue(sql.contains("ai.is_self_account as isactualcontrol"), sql);
|
||||
assertTrue(sql.contains("ai.monthly_avg_trans_count as avgmonthtxncount"), sql);
|
||||
assertTrue(sql.contains("ai.trans_risk_level as txnrisklevel"), sql);
|
||||
}
|
||||
|
||||
private MappedStatement loadMappedStatement(String statementId) throws Exception {
|
||||
Configuration configuration = new Configuration();
|
||||
configuration.setEnvironment(new Environment("test", new JdbcTransactionFactory(), new NoOpDataSource()));
|
||||
registerTypeAliases(configuration.getTypeAliasRegistry());
|
||||
configuration.getLanguageRegistry().register(XMLLanguageDriver.class);
|
||||
configuration.addMapper(CcdiAccountInfoMapper.class);
|
||||
|
||||
try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream(RESOURCE)) {
|
||||
XMLMapperBuilder xmlMapperBuilder =
|
||||
new XMLMapperBuilder(inputStream, configuration, RESOURCE, configuration.getSqlFragments());
|
||||
xmlMapperBuilder.parse();
|
||||
}
|
||||
return configuration.getMappedStatement(statementId);
|
||||
}
|
||||
|
||||
private String renderSql(MappedStatement mappedStatement, Map<String, Object> params) {
|
||||
BoundSql boundSql = mappedStatement.getBoundSql(new HashMap<>(params));
|
||||
return boundSql.getSql().replaceAll("\\s+", " ").trim();
|
||||
}
|
||||
|
||||
private void registerTypeAliases(TypeAliasRegistry typeAliasRegistry) {
|
||||
typeAliasRegistry.registerAlias("map", Map.class);
|
||||
}
|
||||
|
||||
private static class NoOpDataSource implements DataSource {
|
||||
|
||||
@Override
|
||||
public java.sql.Connection getConnection() {
|
||||
throw new UnsupportedOperationException("Not required for SQL rendering tests");
|
||||
}
|
||||
|
||||
@Override
|
||||
public java.sql.Connection getConnection(String username, String password) {
|
||||
throw new UnsupportedOperationException("Not required for SQL rendering tests");
|
||||
}
|
||||
|
||||
@Override
|
||||
public java.io.PrintWriter getLogWriter() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLogWriter(java.io.PrintWriter out) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLoginTimeout(int seconds) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getLoginTimeout() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public java.util.logging.Logger getParentLogger() {
|
||||
return java.util.logging.Logger.getGlobal();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T unwrap(Class<T> iface) {
|
||||
throw new UnsupportedOperationException("Not supported");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWrapperFor(Class<?> iface) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,8 @@ class CcdiBaseStaffMapperTest {
|
||||
|
||||
assertTrue(xml.contains("annual_income"), xml);
|
||||
assertTrue(xml.contains("#{item.annualIncome}"), xml);
|
||||
assertTrue(xml.contains("is_party_member"), xml);
|
||||
assertTrue(xml.contains("#{item.partyMember}"), xml);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,124 @@
|
||||
package com.ruoyi.info.collection.service;
|
||||
|
||||
import com.ruoyi.info.collection.domain.CcdiAccountInfo;
|
||||
import com.ruoyi.info.collection.domain.CcdiBaseStaff;
|
||||
import com.ruoyi.info.collection.domain.dto.CcdiAccountInfoAddDTO;
|
||||
import com.ruoyi.info.collection.mapper.CcdiAccountInfoMapper;
|
||||
import com.ruoyi.info.collection.mapper.CcdiBaseStaffMapper;
|
||||
import com.ruoyi.info.collection.mapper.CcdiStaffFmyRelationMapper;
|
||||
import com.ruoyi.info.collection.service.impl.CcdiAccountInfoServiceImpl;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.beans.BeanWrapperImpl;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Date;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class CcdiAccountInfoServiceImplTest {
|
||||
|
||||
@InjectMocks
|
||||
private CcdiAccountInfoServiceImpl service;
|
||||
|
||||
@Mock
|
||||
private CcdiAccountInfoMapper accountInfoMapper;
|
||||
|
||||
@Mock
|
||||
private CcdiBaseStaffMapper baseStaffMapper;
|
||||
|
||||
@Mock
|
||||
private CcdiStaffFmyRelationMapper staffFmyRelationMapper;
|
||||
|
||||
@Test
|
||||
void insertExternalAccount_shouldPersistAnalysisFieldsOnAccountInfo() {
|
||||
CcdiAccountInfoAddDTO dto = buildBaseAddDto();
|
||||
dto.setOwnerType("EXTERNAL");
|
||||
dto.setOwnerId("330101199001010011");
|
||||
dto.setBankScope("EXTERNAL");
|
||||
dto.setIsActualControl(0);
|
||||
dto.setAvgMonthTxnCount(6);
|
||||
dto.setAvgMonthTxnAmount(new BigDecimal("1234.56"));
|
||||
dto.setTxnFrequencyLevel("HIGH");
|
||||
dto.setDebitSingleMaxAmount(new BigDecimal("100.00"));
|
||||
dto.setCreditSingleMaxAmount(new BigDecimal("200.00"));
|
||||
dto.setDebitDailyMaxAmount(new BigDecimal("300.00"));
|
||||
dto.setCreditDailyMaxAmount(new BigDecimal("400.00"));
|
||||
dto.setTxnRiskLevel("MEDIUM");
|
||||
|
||||
when(accountInfoMapper.selectCount(any())).thenReturn(0L);
|
||||
when(accountInfoMapper.insert(any(CcdiAccountInfo.class))).thenReturn(1);
|
||||
|
||||
service.insertAccountInfo(dto);
|
||||
|
||||
ArgumentCaptor<CcdiAccountInfo> captor = ArgumentCaptor.forClass(CcdiAccountInfo.class);
|
||||
verify(accountInfoMapper).insert(captor.capture());
|
||||
BeanWrapperImpl wrapper = new BeanWrapperImpl(captor.getValue());
|
||||
assertEquals(0, wrapper.getPropertyValue("isActualControl"));
|
||||
assertEquals(6, wrapper.getPropertyValue("avgMonthTxnCount"));
|
||||
assertEquals(new BigDecimal("1234.56"), wrapper.getPropertyValue("avgMonthTxnAmount"));
|
||||
assertEquals("HIGH", wrapper.getPropertyValue("txnFrequencyLevel"));
|
||||
assertEquals("MEDIUM", wrapper.getPropertyValue("txnRiskLevel"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void insertInternalAccount_shouldClearAnalysisFieldsOnAccountInfo() {
|
||||
CcdiAccountInfoAddDTO dto = buildBaseAddDto();
|
||||
dto.setOwnerType("EMPLOYEE");
|
||||
dto.setOwnerId("330101199001010022");
|
||||
dto.setBankScope("INTERNAL");
|
||||
dto.setIsActualControl(1);
|
||||
dto.setAvgMonthTxnCount(8);
|
||||
dto.setAvgMonthTxnAmount(new BigDecimal("9988.66"));
|
||||
dto.setTxnFrequencyLevel("HIGH");
|
||||
dto.setDebitSingleMaxAmount(new BigDecimal("111.11"));
|
||||
dto.setCreditSingleMaxAmount(new BigDecimal("222.22"));
|
||||
dto.setDebitDailyMaxAmount(new BigDecimal("333.33"));
|
||||
dto.setCreditDailyMaxAmount(new BigDecimal("444.44"));
|
||||
dto.setTxnRiskLevel("HIGH");
|
||||
|
||||
CcdiBaseStaff staff = new CcdiBaseStaff();
|
||||
staff.setIdCard(dto.getOwnerId());
|
||||
|
||||
when(baseStaffMapper.selectOne(any())).thenReturn(staff);
|
||||
when(accountInfoMapper.selectCount(any())).thenReturn(0L);
|
||||
when(accountInfoMapper.insert(any(CcdiAccountInfo.class))).thenReturn(1);
|
||||
|
||||
service.insertAccountInfo(dto);
|
||||
|
||||
ArgumentCaptor<CcdiAccountInfo> captor = ArgumentCaptor.forClass(CcdiAccountInfo.class);
|
||||
verify(accountInfoMapper).insert(captor.capture());
|
||||
BeanWrapperImpl wrapper = new BeanWrapperImpl(captor.getValue());
|
||||
assertNull(wrapper.getPropertyValue("isActualControl"));
|
||||
assertNull(wrapper.getPropertyValue("avgMonthTxnCount"));
|
||||
assertNull(wrapper.getPropertyValue("avgMonthTxnAmount"));
|
||||
assertNull(wrapper.getPropertyValue("txnFrequencyLevel"));
|
||||
assertNull(wrapper.getPropertyValue("debitSingleMaxAmount"));
|
||||
assertNull(wrapper.getPropertyValue("creditSingleMaxAmount"));
|
||||
assertNull(wrapper.getPropertyValue("debitDailyMaxAmount"));
|
||||
assertNull(wrapper.getPropertyValue("creditDailyMaxAmount"));
|
||||
assertNull(wrapper.getPropertyValue("txnRiskLevel"));
|
||||
}
|
||||
|
||||
private CcdiAccountInfoAddDTO buildBaseAddDto() {
|
||||
CcdiAccountInfoAddDTO dto = new CcdiAccountInfoAddDTO();
|
||||
dto.setAccountNo("6222024000000001");
|
||||
dto.setAccountType("BANK");
|
||||
dto.setAccountName("测试账户");
|
||||
dto.setOpenBank("中国银行");
|
||||
dto.setBankCode("BOC");
|
||||
dto.setCurrency("CNY");
|
||||
dto.setStatus(1);
|
||||
dto.setEffectiveDate(new Date());
|
||||
return dto;
|
||||
}
|
||||
}
|
||||
@@ -27,6 +27,28 @@ class CcdiBaseStaffImportServiceImplTest {
|
||||
assertDoesNotThrow(() -> service.validateStaffData(buildDto(new BigDecimal("12345.67")), false, Collections.emptySet(), Collections.emptySet()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateStaffData_shouldAllowPartyMemberValuesZeroAndOne() {
|
||||
CcdiBaseStaffAddDTO nonPartyMember = buildDto(null);
|
||||
nonPartyMember.setPartyMember(0);
|
||||
CcdiBaseStaffAddDTO partyMember = buildDto(null);
|
||||
partyMember.setPartyMember(1);
|
||||
|
||||
assertDoesNotThrow(() -> service.validateStaffData(nonPartyMember, false, Collections.emptySet(), Collections.emptySet()));
|
||||
assertDoesNotThrow(() -> service.validateStaffData(partyMember, false, Collections.emptySet(), Collections.emptySet()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateStaffData_shouldRejectInvalidPartyMemberValue() {
|
||||
CcdiBaseStaffAddDTO dto = buildDto(null);
|
||||
dto.setPartyMember(2);
|
||||
|
||||
RuntimeException exception = assertThrows(RuntimeException.class,
|
||||
() -> service.validateStaffData(dto, false, Set.of(), Set.of()));
|
||||
|
||||
assertEquals("是否党员只能填写'0'或'1'", exception.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateStaffData_shouldRejectNegativeAnnualIncome() {
|
||||
RuntimeException exception = assertThrows(RuntimeException.class,
|
||||
@@ -51,6 +73,7 @@ class CcdiBaseStaffImportServiceImplTest {
|
||||
dto.setIdCard("320101199001010014");
|
||||
dto.setPhone("13812345678");
|
||||
dto.setStatus("0");
|
||||
dto.setPartyMember(1);
|
||||
dto.setAnnualIncome(annualIncome);
|
||||
return dto;
|
||||
}
|
||||
|
||||
@@ -55,6 +55,7 @@ class CcdiBaseStaffServiceImplTest {
|
||||
addDTO.setIdCard("320101199001010011");
|
||||
addDTO.setPhone("13812345678");
|
||||
addDTO.setStatus("0");
|
||||
addDTO.setPartyMember(1);
|
||||
addDTO.setAnnualIncome(new BigDecimal("12345.67"));
|
||||
addDTO.setAssetInfoList(List.of(
|
||||
buildAssetDto("房产"),
|
||||
@@ -70,6 +71,7 @@ class CcdiBaseStaffServiceImplTest {
|
||||
assertEquals(1, result);
|
||||
ArgumentCaptor<CcdiBaseStaff> staffCaptor = ArgumentCaptor.forClass(CcdiBaseStaff.class);
|
||||
verify(baseStaffMapper).insert(staffCaptor.capture());
|
||||
assertEquals(1, staffCaptor.getValue().getPartyMember());
|
||||
assertEquals(new BigDecimal("12345.67"), staffCaptor.getValue().getAnnualIncome());
|
||||
ArgumentCaptor<List<CcdiAssetInfoDTO>> captor = ArgumentCaptor.forClass(List.class);
|
||||
verify(assetInfoService).replaceByFamilyId(eq("320101199001010011"), captor.capture());
|
||||
@@ -92,6 +94,7 @@ class CcdiBaseStaffServiceImplTest {
|
||||
editDTO.setIdCard("320101199001010011");
|
||||
editDTO.setPhone("13812345678");
|
||||
editDTO.setStatus("0");
|
||||
editDTO.setPartyMember(0);
|
||||
editDTO.setAnnualIncome(new BigDecimal("45678.90"));
|
||||
editDTO.setAssetInfoList(List.of(buildAssetDto("车辆")));
|
||||
|
||||
@@ -104,6 +107,7 @@ class CcdiBaseStaffServiceImplTest {
|
||||
assertEquals(1, result);
|
||||
ArgumentCaptor<CcdiBaseStaff> staffCaptor = ArgumentCaptor.forClass(CcdiBaseStaff.class);
|
||||
verify(baseStaffMapper).updateById(staffCaptor.capture());
|
||||
assertEquals(0, staffCaptor.getValue().getPartyMember());
|
||||
assertEquals(new BigDecimal("45678.90"), staffCaptor.getValue().getAnnualIncome());
|
||||
verify(assetInfoService, never()).deleteByFamilyId("320101199001010011");
|
||||
verify(assetInfoService).replaceByFamilyId("320101199001010011", editDTO.getAssetInfoList());
|
||||
@@ -122,6 +126,7 @@ class CcdiBaseStaffServiceImplTest {
|
||||
editDTO.setIdCard("320101199001010011");
|
||||
editDTO.setPhone("13812345678");
|
||||
editDTO.setStatus("0");
|
||||
editDTO.setPartyMember(1);
|
||||
editDTO.setAssetInfoList(List.of(buildAssetDto("车辆")));
|
||||
|
||||
when(baseStaffMapper.selectById(1001L)).thenReturn(existing);
|
||||
@@ -135,17 +140,18 @@ class CcdiBaseStaffServiceImplTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void selectBaseStaffById_shouldReturnAssetInfoList() {
|
||||
void selectBaseStaffById_shouldReturnSelfOwnedAssetInfoList() {
|
||||
CcdiBaseStaff staff = new CcdiBaseStaff();
|
||||
staff.setStaffId(1001L);
|
||||
staff.setName("张三");
|
||||
staff.setIdCard("320101199001010011");
|
||||
staff.setStatus("0");
|
||||
staff.setPartyMember(1);
|
||||
staff.setAnnualIncome(new BigDecimal("88888.88"));
|
||||
|
||||
CcdiAssetInfo assetInfo = new CcdiAssetInfo();
|
||||
assetInfo.setFamilyId("320101199001010011");
|
||||
assetInfo.setPersonId("320101199201010022");
|
||||
assetInfo.setPersonId("320101199001010011");
|
||||
assetInfo.setAssetMainType("车辆");
|
||||
assetInfo.setAssetSubType("小汽车");
|
||||
assetInfo.setAssetName("家庭车辆");
|
||||
@@ -153,14 +159,16 @@ class CcdiBaseStaffServiceImplTest {
|
||||
assetInfo.setAssetStatus("正常");
|
||||
|
||||
when(baseStaffMapper.selectById(1001L)).thenReturn(staff);
|
||||
when(assetInfoService.selectByFamilyId("320101199001010011")).thenReturn(List.of(assetInfo));
|
||||
when(assetInfoService.selectByFamilyIdAndPersonId("320101199001010011", "320101199001010011"))
|
||||
.thenReturn(List.of(assetInfo));
|
||||
|
||||
CcdiBaseStaffVO result = service.selectBaseStaffById(1001L);
|
||||
|
||||
assertNotNull(result.getAssetInfoList());
|
||||
assertEquals(1, result.getPartyMember());
|
||||
assertEquals(new BigDecimal("88888.88"), result.getAnnualIncome());
|
||||
assertEquals(1, result.getAssetInfoList().size());
|
||||
assertEquals("320101199201010022", result.getAssetInfoList().get(0).getPersonId());
|
||||
assertEquals("320101199001010011", result.getAssetInfoList().get(0).getPersonId());
|
||||
assertEquals("车辆", result.getAssetInfoList().get(0).getAssetMainType());
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.ruoyi.info.collection.utils;
|
||||
|
||||
import com.ruoyi.common.core.domain.entity.SysDictData;
|
||||
import com.ruoyi.common.utils.DictUtils;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiBaseStaffExcel;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiAssetInfoExcel;
|
||||
import com.ruoyi.info.collection.domain.excel.CcdiStaffFmyRelationExcel;
|
||||
import org.apache.poi.ss.usermodel.CellStyle;
|
||||
@@ -72,6 +73,31 @@ class EasyExcelUtilTemplateTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void importTemplateWithDictDropdown_shouldAddPartyMemberDropdownToBaseStaffTemplate() throws Exception {
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
|
||||
try (MockedStatic<DictUtils> mocked = mockStatic(DictUtils.class)) {
|
||||
mocked.when(() -> DictUtils.getDictCache("ccdi_employee_status"))
|
||||
.thenReturn(List.of(
|
||||
buildDictData("在职", "0"),
|
||||
buildDictData("离职", "1")
|
||||
));
|
||||
mocked.when(() -> DictUtils.getDictCache("ccdi_yes_no_flag"))
|
||||
.thenReturn(List.of(
|
||||
buildDictData("是", "1"),
|
||||
buildDictData("否", "0")
|
||||
));
|
||||
|
||||
EasyExcelUtil.importTemplateWithDictDropdown(response, CcdiBaseStaffExcel.class, "员工信息");
|
||||
}
|
||||
|
||||
try (Workbook workbook = WorkbookFactory.create(new ByteArrayInputStream(response.getContentAsByteArray()))) {
|
||||
Sheet sheet = workbook.getSheetAt(0);
|
||||
assertTrue(hasValidationOnColumn(sheet, 7), "是否党员列应包含下拉校验");
|
||||
}
|
||||
}
|
||||
|
||||
private void assertTextColumn(Sheet sheet, int columnIndex) {
|
||||
CellStyle style = sheet.getColumnStyle(columnIndex);
|
||||
assertNotNull(style, "文本列应设置默认样式");
|
||||
@@ -90,9 +116,13 @@ class EasyExcelUtilTemplateTest {
|
||||
}
|
||||
|
||||
private SysDictData buildDictData(String label) {
|
||||
return buildDictData(label, label);
|
||||
}
|
||||
|
||||
private SysDictData buildDictData(String label, String value) {
|
||||
SysDictData dictData = new SysDictData();
|
||||
dictData.setDictLabel(label);
|
||||
dictData.setDictValue(label);
|
||||
dictData.setDictValue(value);
|
||||
return dictData;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
package com.ruoyi.lsfx.client;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.ruoyi.lsfx.domain.response.CreditParseResponse;
|
||||
import com.ruoyi.lsfx.exception.LsfxApiException;
|
||||
import com.ruoyi.lsfx.util.HttpUtil;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.mockito.ArgumentMatchers.anyMap;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.ArgumentMatchers.isNull;
|
||||
import static org.mockito.Mockito.argThat;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class CreditParseClientTest {
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
@Mock
|
||||
private HttpUtil httpUtil;
|
||||
|
||||
@InjectMocks
|
||||
private CreditParseClient client;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
ReflectionTestUtils.setField(client, "creditParseUrl", "http://credit-host/xfeature-mngs/conversation/htmlEval");
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldDeserializeCreditParseResponse() throws Exception {
|
||||
String json = """
|
||||
{
|
||||
"message": "成功",
|
||||
"status_code": "0",
|
||||
"payload": {
|
||||
"lx_header": {"query_cert_no": "3301"},
|
||||
"lx_debt": {"uncle_bank_house_bal": "12.00"},
|
||||
"lx_publictype": {"civil_cnt": 1}
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
CreditParseResponse response = objectMapper.readValue(json, CreditParseResponse.class);
|
||||
|
||||
assertEquals("0", response.getStatusCode());
|
||||
assertEquals("3301", response.getPayload().getLxHeader().get("query_cert_no"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldCallConfiguredUrlWithMultipartParams() {
|
||||
File file = new File("sample.html");
|
||||
CreditParseResponse response = new CreditParseResponse();
|
||||
response.setStatusCode("0");
|
||||
|
||||
when(httpUtil.uploadFile(eq("http://credit-host/xfeature-mngs/conversation/htmlEval"), anyMap(), isNull(), eq(CreditParseResponse.class)))
|
||||
.thenReturn(response);
|
||||
|
||||
CreditParseResponse actual = client.parse("LXCUSTALL", "PERSON", file);
|
||||
|
||||
assertEquals("0", actual.getStatusCode());
|
||||
verify(httpUtil).uploadFile(eq("http://credit-host/xfeature-mngs/conversation/htmlEval"), argThat(params ->
|
||||
"LXCUSTALL".equals(params.get("model"))
|
||||
&& "PERSON".equals(params.get("hType"))
|
||||
&& file.equals(params.get("file"))
|
||||
), isNull(), eq(CreditParseResponse.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldWrapHttpErrorsAsLsfxApiException() {
|
||||
when(httpUtil.uploadFile(anyString(), anyMap(), isNull(), eq(CreditParseResponse.class)))
|
||||
.thenThrow(new LsfxApiException("网络失败"));
|
||||
|
||||
assertThrows(LsfxApiException.class,
|
||||
() -> client.parse("LXCUSTALL", "PERSON", new File("sample.html")));
|
||||
}
|
||||
}
|
||||
@@ -43,6 +43,12 @@
|
||||
<artifactId>springdoc-openapi-starter-webmvc-ui</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- easyexcel工具 -->
|
||||
<dependency>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>easyexcel</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- 测试依赖 -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.ruoyi.ccdi.project.controller;
|
||||
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectAbnormalAccountQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectEmployeeCreditNegativeQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectPersonAnalysisDetailQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskModelPeopleQueryDTO;
|
||||
@@ -7,6 +8,7 @@ import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskPeopleQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectSuspiciousTransactionQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectRiskPeopleOverviewExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectSuspiciousTransactionExcel;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountPageVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeCreditNegativePageVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectPersonAnalysisDetailVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewDashboardVO;
|
||||
@@ -130,6 +132,17 @@ public class CcdiProjectOverviewController extends BaseController {
|
||||
return AjaxResult.success(pageVO);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询异常账户人员信息
|
||||
*/
|
||||
@GetMapping("/abnormal-account-people")
|
||||
@Operation(summary = "查询异常账户人员信息")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:project:query')")
|
||||
public AjaxResult getAbnormalAccountPeople(CcdiProjectAbnormalAccountQueryDTO queryDTO) {
|
||||
CcdiProjectAbnormalAccountPageVO pageVO = overviewService.getAbnormalAccountPeople(queryDTO);
|
||||
return AjaxResult.success(pageVO);
|
||||
}
|
||||
|
||||
/**
|
||||
* 导出涉疑交易明细
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
package com.ruoyi.ccdi.project.domain.dto;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 异常账户人员信息查询 DTO
|
||||
*/
|
||||
@Data
|
||||
public class CcdiProjectAbnormalAccountQueryDTO {
|
||||
|
||||
/** 项目ID */
|
||||
private Long projectId;
|
||||
|
||||
/** 页码 */
|
||||
private Integer pageNum;
|
||||
|
||||
/** 每页数量 */
|
||||
private Integer pageSize;
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package com.ruoyi.ccdi.project.domain.excel;
|
||||
|
||||
import com.ruoyi.common.annotation.Excel;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 异常账户人员信息导出对象
|
||||
*/
|
||||
@Data
|
||||
public class CcdiProjectAbnormalAccountExcel {
|
||||
|
||||
@Excel(name = "账号")
|
||||
private String accountNo;
|
||||
|
||||
@Excel(name = "开户人")
|
||||
private String accountName;
|
||||
|
||||
@Excel(name = "银行")
|
||||
private String bankName;
|
||||
|
||||
@Excel(name = "异常类型")
|
||||
private String abnormalType;
|
||||
|
||||
@Excel(name = "异常发生时间")
|
||||
private String abnormalTime;
|
||||
|
||||
@Excel(name = "状态")
|
||||
private String status;
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
package com.ruoyi.ccdi.project.domain.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 异常账户人员信息行对象
|
||||
*/
|
||||
@Data
|
||||
public class CcdiProjectAbnormalAccountItemVO {
|
||||
|
||||
private String accountNo;
|
||||
|
||||
private String accountName;
|
||||
|
||||
private String bankName;
|
||||
|
||||
private String abnormalType;
|
||||
|
||||
private String abnormalTime;
|
||||
|
||||
private String status;
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
package com.ruoyi.ccdi.project.domain.vo;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 异常账户人员信息分页结果
|
||||
*/
|
||||
@Data
|
||||
public class CcdiProjectAbnormalAccountPageVO {
|
||||
|
||||
private List<CcdiProjectAbnormalAccountItemVO> rows = new ArrayList<>();
|
||||
|
||||
private Long total = 0L;
|
||||
}
|
||||
@@ -292,6 +292,22 @@ public interface CcdiBankTagAnalysisMapper {
|
||||
*/
|
||||
List<BankTagObjectHitVO> selectSalaryUnusedObjects(@Param("projectId") Long projectId);
|
||||
|
||||
/**
|
||||
* 突然销户
|
||||
*
|
||||
* @param projectId 项目ID
|
||||
* @return 对象命中结果
|
||||
*/
|
||||
List<BankTagObjectHitVO> selectSuddenAccountClosureObjects(@Param("projectId") Long projectId);
|
||||
|
||||
/**
|
||||
* 休眠账户大额启用
|
||||
*
|
||||
* @param projectId 项目ID
|
||||
* @return 对象命中结果
|
||||
*/
|
||||
List<BankTagObjectHitVO> selectDormantAccountLargeActivationObjects(@Param("projectId") Long projectId);
|
||||
|
||||
/**
|
||||
* 大额炒股
|
||||
*
|
||||
|
||||
@@ -2,10 +2,12 @@ package com.ruoyi.ccdi.project.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.ruoyi.ccdi.project.domain.CcdiProject;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectAbnormalAccountQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectEmployeeCreditNegativeQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskModelPeopleQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskPeopleQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectSuspiciousTransactionQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountItemVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiBankStatementListVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeCreditNegativeItemVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeRiskAggregateVO;
|
||||
@@ -106,6 +108,26 @@ public interface CcdiProjectOverviewMapper {
|
||||
@Param("query") CcdiProjectEmployeeCreditNegativeQueryDTO query
|
||||
);
|
||||
|
||||
/**
|
||||
* 分页查询异常账户人员信息
|
||||
*
|
||||
* @param page 分页参数
|
||||
* @param query 查询条件
|
||||
* @return 分页结果
|
||||
*/
|
||||
Page<CcdiProjectAbnormalAccountItemVO> selectAbnormalAccountPage(
|
||||
Page<CcdiProjectAbnormalAccountItemVO> page,
|
||||
@Param("query") CcdiProjectAbnormalAccountQueryDTO query
|
||||
);
|
||||
|
||||
/**
|
||||
* 查询异常账户人员信息导出列表
|
||||
*
|
||||
* @param projectId 项目ID
|
||||
* @return 导出列表
|
||||
*/
|
||||
List<CcdiProjectAbnormalAccountItemVO> selectAbnormalAccountList(@Param("projectId") Long projectId);
|
||||
|
||||
/**
|
||||
* 查询项目员工负面征信导出列表
|
||||
*
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
package com.ruoyi.ccdi.project.service;
|
||||
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectAbnormalAccountQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectEmployeeCreditNegativeQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectPersonAnalysisDetailQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskModelPeopleQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskPeopleQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectSuspiciousTransactionQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectAbnormalAccountExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectEmployeeCreditNegativeExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectRiskPeopleOverviewExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectSuspiciousTransactionExcel;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountPageVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeCreditNegativePageVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectPersonAnalysisDetailVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewDashboardVO;
|
||||
@@ -144,6 +147,28 @@ public interface ICcdiProjectOverviewService {
|
||||
return new CcdiProjectEmployeeCreditNegativePageVO();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询异常账户人员信息
|
||||
*
|
||||
* @param queryDTO 查询条件
|
||||
* @return 分页结果
|
||||
*/
|
||||
default CcdiProjectAbnormalAccountPageVO getAbnormalAccountPeople(
|
||||
CcdiProjectAbnormalAccountQueryDTO queryDTO
|
||||
) {
|
||||
return new CcdiProjectAbnormalAccountPageVO();
|
||||
}
|
||||
|
||||
/**
|
||||
* 导出异常账户人员信息
|
||||
*
|
||||
* @param projectId 项目ID
|
||||
* @return 导出列表
|
||||
*/
|
||||
default List<CcdiProjectAbnormalAccountExcel> exportAbnormalAccountPeople(Long projectId) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* 重算结果总览员工结果并同步项目风险人数
|
||||
*
|
||||
|
||||
@@ -288,6 +288,8 @@ public class CcdiBankTagServiceImpl implements ICcdiBankTagService {
|
||||
case "WITHDRAW_AMT" -> analysisMapper.selectWithdrawAmtObjects(projectId);
|
||||
case "SALARY_QUICK_TRANSFER" -> analysisMapper.selectSalaryQuickTransferObjects(projectId);
|
||||
case "SALARY_UNUSED" -> analysisMapper.selectSalaryUnusedObjects(projectId);
|
||||
case "SUDDEN_ACCOUNT_CLOSURE" -> analysisMapper.selectSuddenAccountClosureObjects(projectId);
|
||||
case "DORMANT_ACCOUNT_LARGE_ACTIVATION" -> analysisMapper.selectDormantAccountLargeActivationObjects(projectId);
|
||||
case "PROXY_ACCOUNT_OPERATION" -> analysisMapper.selectProxyAccountOperationObjects(projectId);
|
||||
default -> List.of();
|
||||
};
|
||||
|
||||
@@ -2,15 +2,19 @@ package com.ruoyi.ccdi.project.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.ruoyi.ccdi.project.domain.CcdiProject;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectAbnormalAccountQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectEmployeeCreditNegativeQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectPersonAnalysisDetailQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskModelPeopleQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskPeopleQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectSuspiciousTransactionQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectAbnormalAccountExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectEmployeeCreditNegativeExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectRiskPeopleOverviewExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectSuspiciousTransactionExcel;
|
||||
import com.ruoyi.ccdi.project.domain.entity.CcdiProjectOverviewEmployeeResult;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountItemVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountPageVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiBankStatementListVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiBankStatementHitTagVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeCreditNegativeItemVO;
|
||||
@@ -258,6 +262,31 @@ public class CcdiProjectOverviewServiceImpl implements ICcdiProjectOverviewServi
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CcdiProjectAbnormalAccountPageVO getAbnormalAccountPeople(CcdiProjectAbnormalAccountQueryDTO queryDTO) {
|
||||
ensureProjectExists(queryDTO.getProjectId());
|
||||
|
||||
Page<CcdiProjectAbnormalAccountItemVO> page = new Page<>(
|
||||
defaultAbnormalAccountPageNum(queryDTO.getPageNum()),
|
||||
defaultAbnormalAccountPageSize(queryDTO.getPageSize())
|
||||
);
|
||||
Page<CcdiProjectAbnormalAccountItemVO> resultPage = overviewMapper.selectAbnormalAccountPage(page, queryDTO);
|
||||
|
||||
CcdiProjectAbnormalAccountPageVO result = new CcdiProjectAbnormalAccountPageVO();
|
||||
result.setRows(defaultList(resultPage == null ? null : resultPage.getRecords()));
|
||||
result.setTotal(resultPage == null ? 0L : resultPage.getTotal());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CcdiProjectAbnormalAccountExcel> exportAbnormalAccountPeople(Long projectId) {
|
||||
ensureProjectExists(projectId);
|
||||
|
||||
return defaultList(overviewMapper.selectAbnormalAccountList(projectId)).stream()
|
||||
.map(this::buildAbnormalAccountExcelRow)
|
||||
.toList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void exportRiskDetails(HttpServletResponse response, Long projectId) {
|
||||
CcdiProjectSuspiciousTransactionQueryDTO queryDTO = new CcdiProjectSuspiciousTransactionQueryDTO();
|
||||
@@ -266,8 +295,9 @@ public class CcdiProjectOverviewServiceImpl implements ICcdiProjectOverviewServi
|
||||
|
||||
List<CcdiProjectSuspiciousTransactionExcel> suspiciousRows = exportSuspiciousTransactions(queryDTO);
|
||||
List<CcdiProjectEmployeeCreditNegativeExcel> creditRows = exportEmployeeCreditNegative(projectId);
|
||||
List<CcdiProjectAbnormalAccountExcel> abnormalRows = exportAbnormalAccountPeople(projectId);
|
||||
try {
|
||||
workbookExporter.export(response, projectId, suspiciousRows, creditRows);
|
||||
workbookExporter.export(response, projectId, suspiciousRows, creditRows, abnormalRows);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException("导出风险明细失败");
|
||||
}
|
||||
@@ -420,6 +450,14 @@ public class CcdiProjectOverviewServiceImpl implements ICcdiProjectOverviewServi
|
||||
return pageSize == null || pageSize <= 0 ? 5L : pageSize.longValue();
|
||||
}
|
||||
|
||||
private long defaultAbnormalAccountPageNum(Integer pageNum) {
|
||||
return pageNum == null || pageNum <= 0 ? 1L : pageNum.longValue();
|
||||
}
|
||||
|
||||
private long defaultAbnormalAccountPageSize(Integer pageSize) {
|
||||
return pageSize == null || pageSize <= 0 ? 5L : pageSize.longValue();
|
||||
}
|
||||
|
||||
private long defaultPageNum(Integer pageNum) {
|
||||
return pageNum == null || pageNum < 1 ? 1L : pageNum.longValue();
|
||||
}
|
||||
@@ -462,6 +500,17 @@ public class CcdiProjectOverviewServiceImpl implements ICcdiProjectOverviewServi
|
||||
return row;
|
||||
}
|
||||
|
||||
private CcdiProjectAbnormalAccountExcel buildAbnormalAccountExcelRow(CcdiProjectAbnormalAccountItemVO item) {
|
||||
CcdiProjectAbnormalAccountExcel row = new CcdiProjectAbnormalAccountExcel();
|
||||
row.setAccountNo(item.getAccountNo());
|
||||
row.setAccountName(item.getAccountName());
|
||||
row.setBankName(item.getBankName());
|
||||
row.setAbnormalType(item.getAbnormalType());
|
||||
row.setAbnormalTime(item.getAbnormalTime());
|
||||
row.setStatus(item.getStatus());
|
||||
return row;
|
||||
}
|
||||
|
||||
private String formatRelatedStaff(String relatedStaffName, String relatedStaffCode) {
|
||||
if (relatedStaffName == null || relatedStaffName.isBlank()) {
|
||||
return null;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.ruoyi.ccdi.project.service.impl;
|
||||
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectAbnormalAccountExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectEmployeeCreditNegativeExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectSuspiciousTransactionExcel;
|
||||
import com.ruoyi.common.utils.file.FileUtils;
|
||||
@@ -27,7 +28,8 @@ public class CcdiProjectRiskDetailWorkbookExporter {
|
||||
HttpServletResponse response,
|
||||
Long projectId,
|
||||
List<CcdiProjectSuspiciousTransactionExcel> suspiciousRows,
|
||||
List<CcdiProjectEmployeeCreditNegativeExcel> creditRows
|
||||
List<CcdiProjectEmployeeCreditNegativeExcel> creditRows,
|
||||
List<CcdiProjectAbnormalAccountExcel> abnormalRows
|
||||
) throws IOException {
|
||||
response.setContentType(CONTENT_TYPE);
|
||||
FileUtils.setAttachmentResponseHeader(response, "风险明细_" + projectId + ".xlsx");
|
||||
@@ -35,7 +37,7 @@ public class CcdiProjectRiskDetailWorkbookExporter {
|
||||
try (Workbook workbook = new XSSFWorkbook()) {
|
||||
writeSuspiciousSheet(workbook.createSheet("涉疑交易明细"), suspiciousRows);
|
||||
writeCreditSheet(workbook.createSheet("员工负面征信信息"), creditRows);
|
||||
writeAbnormalAccountSheet(workbook.createSheet("异常账户人员信息"));
|
||||
writeAbnormalAccountSheet(workbook.createSheet("异常账户人员信息"), abnormalRows);
|
||||
workbook.write(response.getOutputStream());
|
||||
}
|
||||
}
|
||||
@@ -88,10 +90,21 @@ public class CcdiProjectRiskDetailWorkbookExporter {
|
||||
}
|
||||
}
|
||||
|
||||
private void writeAbnormalAccountSheet(Sheet sheet) {
|
||||
private void writeAbnormalAccountSheet(Sheet sheet, List<CcdiProjectAbnormalAccountExcel> rows) {
|
||||
Row header = sheet.createRow(0);
|
||||
String[] headers = { "账号", "开户人", "银行", "异常类型", "异常发生时间", "状态" };
|
||||
writeHeader(header, headers);
|
||||
|
||||
for (int i = 0; i < rows.size(); i++) {
|
||||
CcdiProjectAbnormalAccountExcel item = rows.get(i);
|
||||
Row row = sheet.createRow(i + 1);
|
||||
row.createCell(0).setCellValue(safeText(item.getAccountNo()));
|
||||
row.createCell(1).setCellValue(safeText(item.getAccountName()));
|
||||
row.createCell(2).setCellValue(safeText(item.getBankName()));
|
||||
row.createCell(3).setCellValue(safeText(item.getAbnormalType()));
|
||||
row.createCell(4).setCellValue(safeText(item.getAbnormalTime()));
|
||||
row.createCell(5).setCellValue(safeText(item.getStatus()));
|
||||
}
|
||||
}
|
||||
|
||||
private void writeHeader(Row row, String[] headers) {
|
||||
|
||||
@@ -1211,6 +1211,101 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
|
||||
) t
|
||||
</select>
|
||||
|
||||
<select id="selectSuddenAccountClosureObjects" resultMap="BankTagObjectHitResultMap">
|
||||
select
|
||||
'STAFF_ID_CARD' AS objectType,
|
||||
t.objectKey AS objectKey,
|
||||
CONCAT(
|
||||
'账户', t.accountNo,
|
||||
'于', DATE_FORMAT(t.invalidDate, '%Y-%m-%d'),
|
||||
'销户,销户前30天内最后交易日', DATE_FORMAT(t.lastTxDate, '%Y-%m-%d'),
|
||||
',累计交易金额', CAST(t.windowTotalAmount AS CHAR),
|
||||
'元,单笔最大金额', CAST(t.windowMaxSingleAmount AS CHAR),
|
||||
'元'
|
||||
) AS reasonDetail
|
||||
from (
|
||||
select
|
||||
staff.id_card AS objectKey,
|
||||
ai.account_no AS accountNo,
|
||||
ai.invalid_date AS invalidDate,
|
||||
max(tx.txDate) AS lastTxDate,
|
||||
round(sum(tx.tradeTotalAmount), 2) AS windowTotalAmount,
|
||||
round(max(tx.tradeMaxSingleAmount), 2) AS windowMaxSingleAmount
|
||||
from ccdi_account_info ai
|
||||
inner join ccdi_base_staff staff
|
||||
on staff.id_card = ai.owner_id
|
||||
inner join (
|
||||
select
|
||||
trim(bs.LE_ACCOUNT_NO) AS accountNo,
|
||||
COALESCE(
|
||||
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
|
||||
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
|
||||
) AS txDate,
|
||||
IFNULL(bs.AMOUNT_DR, 0) + IFNULL(bs.AMOUNT_CR, 0) AS tradeTotalAmount,
|
||||
GREATEST(IFNULL(bs.AMOUNT_DR, 0), IFNULL(bs.AMOUNT_CR, 0)) AS tradeMaxSingleAmount
|
||||
from ccdi_bank_statement bs
|
||||
where bs.project_id = #{projectId}
|
||||
and trim(IFNULL(bs.LE_ACCOUNT_NO, '')) != ''
|
||||
) tx
|
||||
on tx.accountNo = trim(ai.account_no)
|
||||
where ai.owner_type = 'EMPLOYEE'
|
||||
and ai.status = 2
|
||||
and ai.invalid_date is not null
|
||||
and tx.txDate >= DATE_SUB(ai.invalid_date, INTERVAL 30 DAY)
|
||||
and tx.txDate < ai.invalid_date
|
||||
group by staff.id_card, ai.account_no, ai.invalid_date
|
||||
) t
|
||||
</select>
|
||||
|
||||
<select id="selectDormantAccountLargeActivationObjects" resultMap="BankTagObjectHitResultMap">
|
||||
select
|
||||
'STAFF_ID_CARD' AS objectType,
|
||||
t.objectKey AS objectKey,
|
||||
CONCAT(
|
||||
'账户', t.accountNo,
|
||||
'开户于', DATE_FORMAT(t.effectiveDate, '%Y-%m-%d'),
|
||||
',首次交易日期', DATE_FORMAT(t.firstTxDate, '%Y-%m-%d'),
|
||||
',沉睡时长', CAST(t.dormantMonths AS CHAR),
|
||||
'个月,启用后累计交易金额', CAST(t.windowTotalAmount AS CHAR),
|
||||
'元,单笔最大金额', CAST(t.windowMaxSingleAmount AS CHAR),
|
||||
'元'
|
||||
) AS reasonDetail
|
||||
from (
|
||||
select
|
||||
staff.id_card AS objectKey,
|
||||
ai.account_no AS accountNo,
|
||||
ai.effective_date AS effectiveDate,
|
||||
min(tx.txDate) AS firstTxDate,
|
||||
timestampdiff(MONTH, ai.effective_date, min(tx.txDate)) AS dormantMonths,
|
||||
round(sum(tx.tradeTotalAmount), 2) AS windowTotalAmount,
|
||||
round(max(tx.tradeMaxSingleAmount), 2) AS windowMaxSingleAmount
|
||||
from ccdi_account_info ai
|
||||
inner join ccdi_base_staff staff
|
||||
on staff.id_card = ai.owner_id
|
||||
inner join (
|
||||
select
|
||||
trim(bs.LE_ACCOUNT_NO) AS accountNo,
|
||||
COALESCE(
|
||||
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
|
||||
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
|
||||
) AS txDate,
|
||||
IFNULL(bs.AMOUNT_DR, 0) + IFNULL(bs.AMOUNT_CR, 0) AS tradeTotalAmount,
|
||||
GREATEST(IFNULL(bs.AMOUNT_DR, 0), IFNULL(bs.AMOUNT_CR, 0)) AS tradeMaxSingleAmount
|
||||
from ccdi_bank_statement bs
|
||||
where bs.project_id = #{projectId}
|
||||
and trim(IFNULL(bs.LE_ACCOUNT_NO, '')) != ''
|
||||
) tx
|
||||
on tx.accountNo = trim(ai.account_no)
|
||||
where ai.owner_type = 'EMPLOYEE'
|
||||
and ai.status = 1
|
||||
and ai.effective_date is not null
|
||||
group by staff.id_card, ai.account_no, ai.effective_date
|
||||
having min(tx.txDate) >= DATE_ADD(ai.effective_date, INTERVAL 6 MONTH)
|
||||
) t
|
||||
where t.windowTotalAmount >= 500000
|
||||
or t.windowMaxSingleAmount >= 100000
|
||||
</select>
|
||||
|
||||
<select id="selectLargeStockTradingStatements" resultMap="BankTagStatementHitResultMap">
|
||||
select
|
||||
bs.bank_statement_id AS bankStatementId,
|
||||
|
||||
@@ -48,6 +48,15 @@
|
||||
<result property="hasNameListHit" column="hasNameListHit"/>
|
||||
</resultMap>
|
||||
|
||||
<resultMap id="AbnormalAccountItemResultMap" type="com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountItemVO">
|
||||
<result property="accountNo" column="accountNo"/>
|
||||
<result property="accountName" column="accountName"/>
|
||||
<result property="bankName" column="bankName"/>
|
||||
<result property="abnormalType" column="abnormalType"/>
|
||||
<result property="abnormalTime" column="abnormal_time"/>
|
||||
<result property="status" column="status"/>
|
||||
</resultMap>
|
||||
|
||||
<sql id="digitTableSql">
|
||||
select 0 as digit
|
||||
union all select 1
|
||||
@@ -644,6 +653,92 @@
|
||||
order by neg.query_date desc, neg.person_id asc
|
||||
</select>
|
||||
|
||||
<sql id="abnormalAccountBaseSql">
|
||||
select
|
||||
account.account_no as accountNo,
|
||||
account.account_no as account_no,
|
||||
coalesce(nullif(account.account_name, ''), staff.name) as accountName,
|
||||
account.bank as bankName,
|
||||
tr.rule_name as abnormalType,
|
||||
tr.rule_code as rule_code,
|
||||
case
|
||||
when tr.rule_code = 'SUDDEN_ACCOUNT_CLOSURE' then date_format(account.invalid_date, '%Y-%m-%d')
|
||||
when tr.rule_code = 'DORMANT_ACCOUNT_LARGE_ACTIVATION' then substring(
|
||||
substring_index(
|
||||
substring_index(tr.reason_detail, ',', 2),
|
||||
'首次交易日期',
|
||||
-1
|
||||
),
|
||||
1,
|
||||
10
|
||||
)
|
||||
else null
|
||||
end as abnormal_time,
|
||||
case
|
||||
when account.status = 1 then '正常'
|
||||
when account.status = 2 then '已销户'
|
||||
else cast(account.status as char)
|
||||
end as status
|
||||
from ccdi_bank_statement_tag_result tr
|
||||
inner join ccdi_account_info account
|
||||
on account.owner_type = 'EMPLOYEE'
|
||||
and account.owner_id = tr.object_key
|
||||
and instr(tr.reason_detail, account.account_no) > 0
|
||||
left join ccdi_base_staff staff
|
||||
on staff.id_card = tr.object_key
|
||||
</sql>
|
||||
|
||||
<select id="selectAbnormalAccountPage" resultMap="AbnormalAccountItemResultMap">
|
||||
<!-- tr.model_code = 'ABNORMAL_ACCOUNT' -->
|
||||
<!-- tr.bank_statement_id is null -->
|
||||
<!-- account.owner_type = 'EMPLOYEE' -->
|
||||
<!-- tr.reason_detail -->
|
||||
<!-- instr(tr.reason_detail, account.account_no) > 0 -->
|
||||
<!-- when account.status = 1 then '正常' -->
|
||||
<!-- when account.status = 2 then '已销户' -->
|
||||
<!-- when tr.rule_code = 'SUDDEN_ACCOUNT_CLOSURE' -->
|
||||
<!-- when tr.rule_code = 'DORMANT_ACCOUNT_LARGE_ACTIVATION' -->
|
||||
<!-- order by abnormal_time desc, account.account_no asc, tr.rule_code asc -->
|
||||
select
|
||||
abnormal.accountNo,
|
||||
abnormal.accountName,
|
||||
abnormal.bankName,
|
||||
abnormal.abnormalType,
|
||||
abnormal.abnormal_time,
|
||||
abnormal.status
|
||||
from (
|
||||
<include refid="abnormalAccountBaseSql"/>
|
||||
where tr.project_id = #{query.projectId}
|
||||
and tr.model_code = 'ABNORMAL_ACCOUNT'
|
||||
and tr.bank_statement_id is null
|
||||
) abnormal
|
||||
<!-- order by abnormal_time desc, account.account_no asc, tr.rule_code asc -->
|
||||
order by abnormal.abnormal_time desc, abnormal.account_no asc, abnormal.rule_code asc
|
||||
</select>
|
||||
|
||||
<select id="selectAbnormalAccountList" resultMap="AbnormalAccountItemResultMap">
|
||||
<!-- tr.model_code = 'ABNORMAL_ACCOUNT' -->
|
||||
<!-- tr.bank_statement_id is null -->
|
||||
<!-- account.owner_type = 'EMPLOYEE' -->
|
||||
<!-- tr.reason_detail -->
|
||||
<!-- order by abnormal_time desc, account.account_no asc, tr.rule_code asc -->
|
||||
select
|
||||
abnormal.accountNo,
|
||||
abnormal.accountName,
|
||||
abnormal.bankName,
|
||||
abnormal.abnormalType,
|
||||
abnormal.abnormal_time,
|
||||
abnormal.status
|
||||
from (
|
||||
<include refid="abnormalAccountBaseSql"/>
|
||||
where tr.project_id = #{projectId}
|
||||
and tr.model_code = 'ABNORMAL_ACCOUNT'
|
||||
and tr.bank_statement_id is null
|
||||
) abnormal
|
||||
<!-- order by abnormal_time desc, account.account_no asc, tr.rule_code asc -->
|
||||
order by abnormal.abnormal_time desc, abnormal.account_no asc, abnormal.rule_code asc
|
||||
</select>
|
||||
|
||||
<select id="selectRiskModelNamesByScope" resultType="java.lang.String">
|
||||
select
|
||||
json_unquote(json_extract(result.model_hit_summary_json, concat('$[', idx.idx, '].modelName'))) as model_name
|
||||
|
||||
@@ -9,6 +9,7 @@ import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.security.access.prepost.PreAuthorize;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
@@ -126,6 +127,26 @@ class CcdiProjectOverviewControllerContractTest {
|
||||
assertEquals(AjaxResult.class, method.getReturnType());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldExposeAbnormalAccountPeopleEndpointContract() throws Exception {
|
||||
Class<?> controllerClass = Class.forName("com.ruoyi.ccdi.project.controller.CcdiProjectOverviewController");
|
||||
Class<?> queryDtoClass =
|
||||
Class.forName("com.ruoyi.ccdi.project.domain.dto.CcdiProjectAbnormalAccountQueryDTO");
|
||||
|
||||
Method method = controllerClass.getMethod("getAbnormalAccountPeople", queryDtoClass);
|
||||
GetMapping getMapping = method.getAnnotation(GetMapping.class);
|
||||
Operation operation = method.getAnnotation(Operation.class);
|
||||
PreAuthorize preAuthorize = method.getAnnotation(PreAuthorize.class);
|
||||
|
||||
assertNotNull(getMapping);
|
||||
assertEquals("/abnormal-account-people", getMapping.value()[0]);
|
||||
assertNotNull(operation);
|
||||
assertEquals("查询异常账户人员信息", operation.summary());
|
||||
assertNotNull(preAuthorize);
|
||||
assertEquals("@ss.hasPermi('ccdi:project:query')", preAuthorize.value());
|
||||
assertEquals(queryDtoClass, method.getParameterTypes()[0]);
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldExposeSuspiciousTransactionsExportEndpointContract() throws Exception {
|
||||
Class<?> controllerClass = Class.forName("com.ruoyi.ccdi.project.controller.CcdiProjectOverviewController");
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
package com.ruoyi.ccdi.project.controller;
|
||||
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectAbnormalAccountQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectEmployeeCreditNegativeQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectPersonAnalysisDetailQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectSuspiciousTransactionQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountPageVO;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectRiskPeopleOverviewExcel;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeCreditNegativeItemVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeCreditNegativePageVO;
|
||||
@@ -244,6 +246,36 @@ class CcdiProjectOverviewControllerTest {
|
||||
assertNotNull(operation);
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldExposeAbnormalAccountPeopleEndpoint() throws Exception {
|
||||
CcdiProjectAbnormalAccountQueryDTO queryDTO = new CcdiProjectAbnormalAccountQueryDTO();
|
||||
queryDTO.setProjectId(40L);
|
||||
|
||||
CcdiProjectAbnormalAccountPageVO pageVO = new CcdiProjectAbnormalAccountPageVO();
|
||||
when(overviewService.getAbnormalAccountPeople(queryDTO)).thenReturn(pageVO);
|
||||
|
||||
AjaxResult result = controller.getAbnormalAccountPeople(queryDTO);
|
||||
|
||||
assertEquals(200, result.get("code"));
|
||||
assertEquals(pageVO, result.get("data"));
|
||||
verify(overviewService).getAbnormalAccountPeople(same(queryDTO));
|
||||
|
||||
Method method = CcdiProjectOverviewController.class.getMethod(
|
||||
"getAbnormalAccountPeople",
|
||||
CcdiProjectAbnormalAccountQueryDTO.class
|
||||
);
|
||||
GetMapping getMapping = method.getAnnotation(GetMapping.class);
|
||||
PreAuthorize preAuthorize = method.getAnnotation(PreAuthorize.class);
|
||||
Operation operation = method.getAnnotation(Operation.class);
|
||||
|
||||
assertNotNull(getMapping);
|
||||
assertEquals("/abnormal-account-people", getMapping.value()[0]);
|
||||
assertNotNull(preAuthorize);
|
||||
assertEquals("@ss.hasPermi('ccdi:project:query')", preAuthorize.value());
|
||||
assertNotNull(operation);
|
||||
assertEquals("查询异常账户人员信息", operation.summary());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldExposeSuspiciousTransactionsExportEndpoint() throws Exception {
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
|
||||
@@ -121,6 +121,36 @@ class CcdiProjectOverviewMapperSqlTest {
|
||||
assertFalse(employeeCreditExportSql.contains("ccdi_debts_info"), employeeCreditExportSql);
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldExposeAbnormalAccountQueries() throws Exception {
|
||||
String xml = Files.readString(Path.of("src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml"));
|
||||
String abnormalPageSql = extractSelect(xml, "selectAbnormalAccountPage");
|
||||
String abnormalExportSql = extractSelect(xml, "selectAbnormalAccountList");
|
||||
|
||||
assertTrue(abnormalPageSql.contains("tr.model_code = 'ABNORMAL_ACCOUNT'"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("tr.bank_statement_id is null"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("account.owner_type = 'EMPLOYEE'"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("tr.reason_detail"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("instr(tr.reason_detail, account.account_no) > 0"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("when account.status = 1 then '正常'"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("when account.status = 2 then '已销户'"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("when tr.rule_code = 'SUDDEN_ACCOUNT_CLOSURE'"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("when tr.rule_code = 'DORMANT_ACCOUNT_LARGE_ACTIVATION'"), abnormalPageSql);
|
||||
assertTrue(
|
||||
abnormalPageSql.contains("order by abnormal_time desc, account.account_no asc, tr.rule_code asc"),
|
||||
abnormalPageSql
|
||||
);
|
||||
|
||||
assertTrue(abnormalExportSql.contains("tr.model_code = 'ABNORMAL_ACCOUNT'"), abnormalExportSql);
|
||||
assertTrue(abnormalExportSql.contains("tr.bank_statement_id is null"), abnormalExportSql);
|
||||
assertTrue(abnormalExportSql.contains("account.owner_type = 'EMPLOYEE'"), abnormalExportSql);
|
||||
assertTrue(abnormalExportSql.contains("tr.reason_detail"), abnormalExportSql);
|
||||
assertTrue(
|
||||
abnormalExportSql.contains("order by abnormal_time desc, account.account_no asc, tr.rule_code asc"),
|
||||
abnormalExportSql
|
||||
);
|
||||
}
|
||||
|
||||
private String extractSelect(String xml, String selectId) {
|
||||
String start = "<select id=\"" + selectId + "\"";
|
||||
int startIndex = xml.indexOf(start);
|
||||
|
||||
@@ -28,6 +28,8 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
@@ -406,6 +408,112 @@ class CcdiBankTagServiceImplTest {
|
||||
verify(analysisMapper).selectSalaryUnusedObjects(40L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rebuildProject_shouldDispatchSuddenAccountClosureObjectRule() {
|
||||
ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run);
|
||||
|
||||
CcdiBankTagRule rule = buildRule("ABNORMAL_ACCOUNT", "异常账户",
|
||||
"SUDDEN_ACCOUNT_CLOSURE", "突然销户", "OBJECT");
|
||||
|
||||
when(ruleMapper.selectEnabledRules("ABNORMAL_ACCOUNT")).thenReturn(List.of(rule));
|
||||
when(configResolver.resolve(40L, rule)).thenReturn(buildConfig(40L, rule));
|
||||
when(analysisMapper.selectSuddenAccountClosureObjects(40L)).thenReturn(List.of());
|
||||
|
||||
service.rebuildProject(40L, "ABNORMAL_ACCOUNT", "admin", TriggerType.MANUAL);
|
||||
|
||||
verify(analysisMapper).selectSuddenAccountClosureObjects(40L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rebuildProject_shouldDispatchDormantAccountLargeActivationObjectRule() {
|
||||
ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run);
|
||||
|
||||
CcdiBankTagRule rule = buildRule("ABNORMAL_ACCOUNT", "异常账户",
|
||||
"DORMANT_ACCOUNT_LARGE_ACTIVATION", "休眠账户大额启用", "OBJECT");
|
||||
|
||||
when(ruleMapper.selectEnabledRules("ABNORMAL_ACCOUNT")).thenReturn(List.of(rule));
|
||||
when(configResolver.resolve(40L, rule)).thenReturn(buildConfig(40L, rule));
|
||||
when(analysisMapper.selectDormantAccountLargeActivationObjects(40L)).thenReturn(List.of());
|
||||
|
||||
service.rebuildProject(40L, "ABNORMAL_ACCOUNT", "admin", TriggerType.MANUAL);
|
||||
|
||||
verify(analysisMapper).selectDormantAccountLargeActivationObjects(40L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rebuildProject_shouldInsertSuddenAccountClosureObjectResults() {
|
||||
ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run);
|
||||
|
||||
CcdiBankTagRule rule = buildRule("ABNORMAL_ACCOUNT", "异常账户",
|
||||
"SUDDEN_ACCOUNT_CLOSURE", "突然销户", "OBJECT");
|
||||
BankTagRuleExecutionConfig config = buildConfig(40L, rule);
|
||||
|
||||
BankTagObjectHitVO hit = new BankTagObjectHitVO();
|
||||
hit.setObjectType("STAFF_ID_CARD");
|
||||
hit.setObjectKey("330101199001011234");
|
||||
hit.setReasonDetail("账户62220001于2026-03-15销户,销户前30天内最后交易日2026-03-10,累计交易金额120000元,单笔最大金额80000元");
|
||||
|
||||
when(ruleMapper.selectEnabledRules("ABNORMAL_ACCOUNT")).thenReturn(List.of(rule));
|
||||
when(configResolver.resolve(40L, rule)).thenReturn(config);
|
||||
when(analysisMapper.selectSuddenAccountClosureObjects(40L)).thenReturn(List.of(hit));
|
||||
|
||||
service.rebuildProject(40L, "ABNORMAL_ACCOUNT", "admin", TriggerType.MANUAL);
|
||||
|
||||
verify(resultMapper).insertBatch(argThat(results -> results.stream().anyMatch(item ->
|
||||
"ABNORMAL_ACCOUNT".equals(item.getModelCode())
|
||||
&& "SUDDEN_ACCOUNT_CLOSURE".equals(item.getRuleCode())
|
||||
&& "OBJECT".equals(item.getResultType())
|
||||
&& "STAFF_ID_CARD".equals(item.getObjectType())
|
||||
)));
|
||||
}
|
||||
|
||||
@Test
|
||||
void rebuildProject_shouldInsertDormantAccountLargeActivationObjectResults() {
|
||||
ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run);
|
||||
|
||||
CcdiBankTagRule rule = buildRule("ABNORMAL_ACCOUNT", "异常账户",
|
||||
"DORMANT_ACCOUNT_LARGE_ACTIVATION", "休眠账户大额启用", "OBJECT");
|
||||
BankTagRuleExecutionConfig config = buildConfig(40L, rule);
|
||||
|
||||
BankTagObjectHitVO hit = new BankTagObjectHitVO();
|
||||
hit.setObjectType("STAFF_ID_CARD");
|
||||
hit.setObjectKey("330101199001011235");
|
||||
hit.setReasonDetail("账户62220002开户于2025-01-01,首次交易日期2025-08-01,沉睡时长7个月,启用后累计交易金额500000元,单笔最大金额120000元");
|
||||
|
||||
when(ruleMapper.selectEnabledRules("ABNORMAL_ACCOUNT")).thenReturn(List.of(rule));
|
||||
when(configResolver.resolve(40L, rule)).thenReturn(config);
|
||||
when(analysisMapper.selectDormantAccountLargeActivationObjects(40L)).thenReturn(List.of(hit));
|
||||
|
||||
service.rebuildProject(40L, "ABNORMAL_ACCOUNT", "admin", TriggerType.MANUAL);
|
||||
|
||||
verify(resultMapper).insertBatch(argThat(results -> results.stream().anyMatch(item ->
|
||||
"ABNORMAL_ACCOUNT".equals(item.getModelCode())
|
||||
&& "DORMANT_ACCOUNT_LARGE_ACTIVATION".equals(item.getRuleCode())
|
||||
&& "OBJECT".equals(item.getResultType())
|
||||
&& "STAFF_ID_CARD".equals(item.getObjectType())
|
||||
)));
|
||||
}
|
||||
|
||||
@Test
|
||||
void abnormalAccountMapperXml_shouldDeclareObjectSelects() throws Exception {
|
||||
String xml = Files.readString(Path.of("src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml"));
|
||||
|
||||
assertTrue(xml.contains("select id=\"selectSuddenAccountClosureObjects\""));
|
||||
assertTrue(xml.contains("select id=\"selectDormantAccountLargeActivationObjects\""));
|
||||
}
|
||||
|
||||
@Test
|
||||
void dormantAccountLargeActivationMapperXml_shouldContainDormantAccountConditions() throws Exception {
|
||||
String xml = Files.readString(Path.of("src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml"));
|
||||
|
||||
assertTrue(xml.contains("select id=\"selectDormantAccountLargeActivationObjects\""));
|
||||
assertTrue(xml.contains("ai.owner_type = 'EMPLOYEE'"));
|
||||
assertTrue(xml.contains("ai.status = 1"));
|
||||
assertTrue(xml.contains("ai.effective_date is not null"));
|
||||
assertTrue(xml.contains("DATE_ADD(ai.effective_date, INTERVAL 6 MONTH)"));
|
||||
assertTrue(xml.contains("windowTotalAmount >= 500000") || xml.contains("windowMaxSingleAmount >= 100000"));
|
||||
}
|
||||
|
||||
private CcdiBankTagRule buildRule(String modelCode, String modelName, String ruleCode, String ruleName, String resultType) {
|
||||
CcdiBankTagRule rule = new CcdiBankTagRule();
|
||||
rule.setModelCode(modelCode);
|
||||
|
||||
@@ -12,6 +12,7 @@ import java.util.Map;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class CcdiProjectOverviewEmployeeResultBuilderTest {
|
||||
|
||||
@@ -38,7 +39,11 @@ class CcdiProjectOverviewEmployeeResultBuilderTest {
|
||||
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
|
||||
"SUSPICIOUS_PART_TIME", "可疑兼职", "MONTHLY_FIXED_INCOME", "疑似兼职", "MEDIUM"),
|
||||
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
|
||||
"SUSPICIOUS_PROPERTY", "可疑财产", "HOUSE_REGISTRATION_MISMATCH", "房产登记不匹配", "LOW")
|
||||
"SUSPICIOUS_PROPERTY", "可疑财产", "HOUSE_REGISTRATION_MISMATCH", "房产登记不匹配", "LOW"),
|
||||
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
|
||||
"ABNORMAL_ACCOUNT", "异常账户", "SUDDEN_ACCOUNT_CLOSURE", "突然销户", "HIGH"),
|
||||
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
|
||||
"ABNORMAL_ACCOUNT", "异常账户", "DORMANT_ACCOUNT_LARGE_ACTIVATION", "休眠账户大额启用", "HIGH")
|
||||
);
|
||||
|
||||
List<CcdiProjectOverviewEmployeeResult> results =
|
||||
@@ -52,20 +57,22 @@ class CcdiProjectOverviewEmployeeResultBuilderTest {
|
||||
assertEquals("E1001", result.getStaffCode());
|
||||
assertEquals(12L, result.getDeptId());
|
||||
assertEquals("信息二部", result.getDeptName());
|
||||
assertEquals(5, result.getRuleCount());
|
||||
assertEquals(4, result.getModelCount());
|
||||
assertEquals(7, result.getHitCount());
|
||||
assertEquals(7, result.getRuleCount());
|
||||
assertEquals(5, result.getModelCount());
|
||||
assertEquals(9, result.getHitCount());
|
||||
assertEquals("HIGH", result.getRiskLevelCode());
|
||||
assertEquals("ABNORMAL_TRANSACTION,LARGE_TRANSACTION,SUSPICIOUS_PART_TIME,SUSPICIOUS_PROPERTY",
|
||||
assertEquals("ABNORMAL_ACCOUNT,ABNORMAL_TRANSACTION,LARGE_TRANSACTION,SUSPICIOUS_PART_TIME,SUSPICIOUS_PROPERTY",
|
||||
result.getModelCodesCsv());
|
||||
assertNotNull(result.getRiskPoint());
|
||||
|
||||
JSONArray modelNames = JSON.parseArray(result.getModelNamesJson());
|
||||
assertEquals(List.of("异常交易", "大额交易", "可疑兼职", "可疑财产"),
|
||||
assertEquals(List.of("异常账户", "异常交易", "大额交易", "可疑兼职", "可疑财产"),
|
||||
modelNames.toList(String.class));
|
||||
|
||||
JSONArray hitRules = JSON.parseArray(result.getHitRulesJson());
|
||||
assertEquals(5, hitRules.size());
|
||||
assertEquals(7, hitRules.size());
|
||||
assertTrue(result.getHitRulesJson().contains("SUDDEN_ACCOUNT_CLOSURE"));
|
||||
assertTrue(result.getHitRulesJson().contains("DORMANT_ACCOUNT_LARGE_ACTIVATION"));
|
||||
JSONObject firstRule = hitRules.getJSONObject(0);
|
||||
assertEquals("ABNORMAL_CUSTOMER_TRANSACTION", firstRule.getString("ruleCode"));
|
||||
assertEquals("异常客户交易", firstRule.getString("ruleName"));
|
||||
@@ -78,6 +85,7 @@ class CcdiProjectOverviewEmployeeResultBuilderTest {
|
||||
item -> item.getString("modelCode"),
|
||||
item -> item.getIntValue("warningCount")
|
||||
));
|
||||
assertEquals(2, warningCountByModel.get("ABNORMAL_ACCOUNT"));
|
||||
assertEquals(2, warningCountByModel.get("ABNORMAL_TRANSACTION"));
|
||||
assertEquals(3, warningCountByModel.get("LARGE_TRANSACTION"));
|
||||
assertEquals(1, warningCountByModel.get("SUSPICIOUS_PROPERTY"));
|
||||
|
||||
@@ -0,0 +1,148 @@
|
||||
package com.ruoyi.ccdi.project.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.ruoyi.ccdi.project.domain.CcdiProject;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectAbnormalAccountQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectAbnormalAccountExcel;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountItemVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountPageVO;
|
||||
import com.ruoyi.ccdi.project.mapper.CcdiBankTagResultMapper;
|
||||
import com.ruoyi.ccdi.project.mapper.CcdiProjectMapper;
|
||||
import com.ruoyi.ccdi.project.mapper.CcdiProjectOverviewEmployeeResultMapper;
|
||||
import com.ruoyi.ccdi.project.mapper.CcdiProjectOverviewMapper;
|
||||
import com.ruoyi.common.exception.ServiceException;
|
||||
import java.util.List;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.argThat;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class CcdiProjectOverviewServiceAbnormalAccountTest {
|
||||
|
||||
@InjectMocks
|
||||
private CcdiProjectOverviewServiceImpl service;
|
||||
|
||||
@Mock
|
||||
private CcdiProjectOverviewMapper overviewMapper;
|
||||
|
||||
@Mock
|
||||
private CcdiProjectMapper projectMapper;
|
||||
|
||||
@Mock
|
||||
private CcdiProjectOverviewEmployeeResultMapper overviewEmployeeResultMapper;
|
||||
|
||||
@Mock
|
||||
private CcdiBankTagResultMapper bankTagResultMapper;
|
||||
|
||||
@Mock
|
||||
private CcdiProjectOverviewEmployeeResultBuilder overviewEmployeeResultBuilder;
|
||||
|
||||
@Mock
|
||||
private CcdiProjectRiskDetailWorkbookExporter workbookExporter;
|
||||
|
||||
@Test
|
||||
void shouldMapAbnormalAccountPageRowsAndTotal() {
|
||||
mockProjectExists(40L);
|
||||
|
||||
CcdiProjectAbnormalAccountQueryDTO queryDTO = new CcdiProjectAbnormalAccountQueryDTO();
|
||||
queryDTO.setProjectId(40L);
|
||||
queryDTO.setPageNum(1);
|
||||
queryDTO.setPageSize(5);
|
||||
|
||||
CcdiProjectAbnormalAccountItemVO item = new CcdiProjectAbnormalAccountItemVO();
|
||||
item.setAccountNo("6222000000000001");
|
||||
item.setAccountName("李四");
|
||||
item.setBankName("中国农业银行");
|
||||
item.setAbnormalType("突然销户");
|
||||
item.setAbnormalTime("2026-03-20");
|
||||
item.setStatus("已销户");
|
||||
|
||||
Page<CcdiProjectAbnormalAccountItemVO> resultPage = new Page<>(1, 5);
|
||||
resultPage.setRecords(List.of(item));
|
||||
resultPage.setTotal(1L);
|
||||
when(overviewMapper.selectAbnormalAccountPage(any(Page.class), any(CcdiProjectAbnormalAccountQueryDTO.class)))
|
||||
.thenReturn(resultPage);
|
||||
|
||||
CcdiProjectAbnormalAccountPageVO result = service.getAbnormalAccountPeople(queryDTO);
|
||||
|
||||
assertEquals(1, result.getRows().size());
|
||||
assertEquals(1L, result.getTotal());
|
||||
assertEquals("6222000000000001", result.getRows().getFirst().getAccountNo());
|
||||
assertEquals("突然销户", result.getRows().getFirst().getAbnormalType());
|
||||
verify(overviewMapper).selectAbnormalAccountPage(
|
||||
argThat(page -> page.getCurrent() == 1L && page.getSize() == 5L),
|
||||
argThat(query -> query.getProjectId().equals(40L))
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldDefaultAbnormalAccountPageNumAndPageSizeToOneAndFive() {
|
||||
mockProjectExists(40L);
|
||||
|
||||
Page<CcdiProjectAbnormalAccountItemVO> emptyPage = new Page<>(1, 5);
|
||||
emptyPage.setRecords(List.of());
|
||||
emptyPage.setTotal(0L);
|
||||
when(overviewMapper.selectAbnormalAccountPage(any(Page.class), any(CcdiProjectAbnormalAccountQueryDTO.class)))
|
||||
.thenReturn(emptyPage);
|
||||
|
||||
CcdiProjectAbnormalAccountQueryDTO queryDTO = new CcdiProjectAbnormalAccountQueryDTO();
|
||||
queryDTO.setProjectId(40L);
|
||||
service.getAbnormalAccountPeople(queryDTO);
|
||||
|
||||
verify(overviewMapper).selectAbnormalAccountPage(
|
||||
argThat(page -> page.getCurrent() == 1L && page.getSize() == 5L),
|
||||
any(CcdiProjectAbnormalAccountQueryDTO.class)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldExportAbnormalAccountPeopleRows() {
|
||||
mockProjectExists(40L);
|
||||
|
||||
CcdiProjectAbnormalAccountItemVO item = new CcdiProjectAbnormalAccountItemVO();
|
||||
item.setAccountNo("6222000000000002");
|
||||
item.setAccountName("王五");
|
||||
item.setBankName("中国银行");
|
||||
item.setAbnormalType("休眠账户大额启用");
|
||||
item.setAbnormalTime("2025-08-01");
|
||||
item.setStatus("正常");
|
||||
when(overviewMapper.selectAbnormalAccountList(40L)).thenReturn(List.of(item));
|
||||
|
||||
List<CcdiProjectAbnormalAccountExcel> rows = service.exportAbnormalAccountPeople(40L);
|
||||
|
||||
assertEquals(1, rows.size());
|
||||
assertEquals("6222000000000002", rows.getFirst().getAccountNo());
|
||||
assertEquals("王五", rows.getFirst().getAccountName());
|
||||
assertEquals("中国银行", rows.getFirst().getBankName());
|
||||
assertEquals("休眠账户大额启用", rows.getFirst().getAbnormalType());
|
||||
assertEquals("2025-08-01", rows.getFirst().getAbnormalTime());
|
||||
assertEquals("正常", rows.getFirst().getStatus());
|
||||
verify(overviewMapper).selectAbnormalAccountList(40L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldThrowWhenProjectDoesNotExistForAbnormalAccountQueries() {
|
||||
when(projectMapper.selectById(99L)).thenReturn(null);
|
||||
|
||||
CcdiProjectAbnormalAccountQueryDTO queryDTO = new CcdiProjectAbnormalAccountQueryDTO();
|
||||
queryDTO.setProjectId(99L);
|
||||
|
||||
assertThrows(ServiceException.class, () -> service.getAbnormalAccountPeople(queryDTO));
|
||||
assertThrows(ServiceException.class, () -> service.exportAbnormalAccountPeople(99L));
|
||||
}
|
||||
|
||||
private void mockProjectExists(Long projectId) {
|
||||
CcdiProject project = new CcdiProject();
|
||||
project.setProjectId(projectId);
|
||||
when(projectMapper.selectById(projectId)).thenReturn(project);
|
||||
}
|
||||
}
|
||||
@@ -5,10 +5,12 @@ import com.ruoyi.ccdi.project.domain.CcdiProject;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectPersonAnalysisDetailQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskPeopleQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskModelPeopleQueryDTO;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectAbnormalAccountExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectEmployeeCreditNegativeExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectRiskPeopleOverviewExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectSuspiciousTransactionExcel;
|
||||
import com.ruoyi.ccdi.project.domain.entity.CcdiProjectOverviewEmployeeResult;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectAbnormalAccountItemVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeCreditNegativeItemVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiBankStatementListVO;
|
||||
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeRiskAggregateVO;
|
||||
@@ -268,6 +270,15 @@ class CcdiProjectOverviewServiceImplTest {
|
||||
creditItem.setCivilLmt(new BigDecimal("20000.00"));
|
||||
when(overviewMapper.selectEmployeeCreditNegativeList(40L)).thenReturn(List.of(creditItem));
|
||||
|
||||
CcdiProjectAbnormalAccountItemVO abnormalItem = new CcdiProjectAbnormalAccountItemVO();
|
||||
abnormalItem.setAccountNo("6222000000000001");
|
||||
abnormalItem.setAccountName("李四");
|
||||
abnormalItem.setBankName("中国农业银行");
|
||||
abnormalItem.setAbnormalType("突然销户");
|
||||
abnormalItem.setAbnormalTime("2026-03-20");
|
||||
abnormalItem.setStatus("已销户");
|
||||
when(overviewMapper.selectAbnormalAccountList(40L)).thenReturn(List.of(abnormalItem));
|
||||
|
||||
MockHttpServletResponse response = new MockHttpServletResponse();
|
||||
service.exportRiskDetails(response, 40L);
|
||||
|
||||
@@ -282,6 +293,9 @@ class CcdiProjectOverviewServiceImplTest {
|
||||
),
|
||||
argThat((List<CcdiProjectEmployeeCreditNegativeExcel> rows) ->
|
||||
rows.size() == 1 && "李四".equals(rows.getFirst().getPersonName())
|
||||
),
|
||||
argThat((List<CcdiProjectAbnormalAccountExcel> rows) ->
|
||||
rows.size() == 1 && "6222000000000001".equals(rows.getFirst().getAccountNo())
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.ruoyi.ccdi.project.service.impl;
|
||||
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectAbnormalAccountExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectEmployeeCreditNegativeExcel;
|
||||
import com.ruoyi.ccdi.project.domain.excel.CcdiProjectSuspiciousTransactionExcel;
|
||||
import org.apache.poi.ss.usermodel.WorkbookFactory;
|
||||
@@ -36,7 +37,15 @@ class CcdiProjectRiskDetailWorkbookExporterTest {
|
||||
creditRow.setCivilCnt(1);
|
||||
creditRow.setCivilLmt(new BigDecimal("20000.00"));
|
||||
|
||||
exporter.export(response, 40L, List.of(suspiciousRow), List.of(creditRow));
|
||||
CcdiProjectAbnormalAccountExcel abnormalRow = new CcdiProjectAbnormalAccountExcel();
|
||||
abnormalRow.setAccountNo("6222000000000001");
|
||||
abnormalRow.setAccountName("李四");
|
||||
abnormalRow.setBankName("中国农业银行");
|
||||
abnormalRow.setAbnormalType("突然销户");
|
||||
abnormalRow.setAbnormalTime("2026-03-20");
|
||||
abnormalRow.setStatus("已销户");
|
||||
|
||||
exporter.export(response, 40L, List.of(suspiciousRow), List.of(creditRow), List.of(abnormalRow));
|
||||
|
||||
assertTrue(response.getContentType().startsWith(
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
@@ -48,8 +57,18 @@ class CcdiProjectRiskDetailWorkbookExporterTest {
|
||||
assertEquals("员工负面征信信息", workbook.getSheetAt(1).getSheetName());
|
||||
assertEquals("异常账户人员信息", workbook.getSheetAt(2).getSheetName());
|
||||
assertEquals("账号", workbook.getSheetAt(2).getRow(0).getCell(0).getStringCellValue());
|
||||
assertEquals("开户人", workbook.getSheetAt(2).getRow(0).getCell(1).getStringCellValue());
|
||||
assertEquals("银行", workbook.getSheetAt(2).getRow(0).getCell(2).getStringCellValue());
|
||||
assertEquals("异常类型", workbook.getSheetAt(2).getRow(0).getCell(3).getStringCellValue());
|
||||
assertEquals("异常发生时间", workbook.getSheetAt(2).getRow(0).getCell(4).getStringCellValue());
|
||||
assertEquals("状态", workbook.getSheetAt(2).getRow(0).getCell(5).getStringCellValue());
|
||||
assertEquals(1, workbook.getSheetAt(2).getPhysicalNumberOfRows());
|
||||
assertEquals("6222000000000001", workbook.getSheetAt(2).getRow(1).getCell(0).getStringCellValue());
|
||||
assertEquals("李四", workbook.getSheetAt(2).getRow(1).getCell(1).getStringCellValue());
|
||||
assertEquals("中国农业银行", workbook.getSheetAt(2).getRow(1).getCell(2).getStringCellValue());
|
||||
assertEquals("突然销户", workbook.getSheetAt(2).getRow(1).getCell(3).getStringCellValue());
|
||||
assertEquals("2026-03-20", workbook.getSheetAt(2).getRow(1).getCell(4).getStringCellValue());
|
||||
assertEquals("已销户", workbook.getSheetAt(2).getRow(1).getCell(5).getStringCellValue());
|
||||
assertEquals(2, workbook.getSheetAt(2).getPhysicalNumberOfRows());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
package com.ruoyi.ccdi.project.sql;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertAll;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class CcdiAbnormalAccountRuleSqlMetadataTest {
|
||||
|
||||
@Test
|
||||
void abnormalAccountMetadataSql_shouldContainModelAndRuleDefinitions() throws IOException {
|
||||
Path path = Path.of("..", "sql", "migration",
|
||||
"2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql");
|
||||
|
||||
assertTrue(Files.exists(path), "异常账户模型迁移脚本应存在");
|
||||
|
||||
String sql = Files.readString(path, StandardCharsets.UTF_8);
|
||||
assertAll(
|
||||
() -> assertTrue(sql.contains("ABNORMAL_ACCOUNT")),
|
||||
() -> assertTrue(sql.contains("SUDDEN_ACCOUNT_CLOSURE")),
|
||||
() -> assertTrue(sql.contains("DORMANT_ACCOUNT_LARGE_ACTIVATION")),
|
||||
() -> assertTrue(sql.contains("'OBJECT'"))
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void abnormalAccountMetadataSql_shouldContainAccountInfoTableDefinition() throws IOException {
|
||||
Path path = Path.of("..", "sql", "migration",
|
||||
"2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql");
|
||||
|
||||
assertTrue(Files.exists(path), "异常账户模型迁移脚本应存在");
|
||||
|
||||
String sql = Files.readString(path, StandardCharsets.UTF_8).toLowerCase();
|
||||
assertAll(
|
||||
() -> assertTrue(sql.contains("create table if not exists `ccdi_account_info`")),
|
||||
() -> assertTrue(sql.contains("`account_no`")),
|
||||
() -> assertTrue(sql.contains("`owner_type`")),
|
||||
() -> assertTrue(sql.contains("`effective_date`")),
|
||||
() -> assertTrue(sql.contains("`invalid_date`"))
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
package com.ruoyi.ccdi.project.sql;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertAll;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class CcdiAccountInfoMergeSqlTest {
|
||||
|
||||
@Test
|
||||
void accountInfoMergeSql_shouldAddColumnsMigrateDataAndDropLegacyTable() throws IOException {
|
||||
Path path = Path.of("..", "sql", "migration",
|
||||
"2026-04-16-merge-ccdi-account-result-into-info.sql");
|
||||
|
||||
assertTrue(Files.exists(path), "账户库合表迁移脚本应存在");
|
||||
|
||||
String sql = Files.readString(path, StandardCharsets.UTF_8).toLowerCase();
|
||||
assertAll(
|
||||
() -> assertTrue(sql.contains("bin/mysql_utf8_exec.sh")),
|
||||
() -> assertTrue(sql.contains("ccdi_account_info")),
|
||||
() -> assertTrue(sql.contains("add column `is_self_account`")),
|
||||
() -> assertTrue(sql.contains("monthly_avg_trans_count")),
|
||||
() -> assertTrue(sql.contains("update `ccdi_account_info` ai")),
|
||||
() -> assertTrue(sql.contains("join `ccdi_account_result` ar")),
|
||||
() -> assertTrue(sql.contains("drop table `ccdi_account_result`"))
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,23 @@ class CcdiBankTagRuleSqlMetadataTest {
|
||||
assertPhase2Metadata(migrationSql);
|
||||
}
|
||||
|
||||
@Test
|
||||
void abnormalAccountMetadataSql_shouldContainBusinessCaliberAndRuleRemark() throws IOException {
|
||||
String migrationSql = readProjectFile("sql", "migration",
|
||||
"2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql");
|
||||
|
||||
assertAll(
|
||||
() -> assertTrue(migrationSql.contains("员工本人账户已销户,且销户日前30天内仍存在交易记录。"),
|
||||
"SUDDEN_ACCOUNT_CLOSURE 应使用设计文档中的业务口径"),
|
||||
() -> assertTrue(migrationSql.contains("员工本人账户开户后长期未使用,首次启用后出现大额资金流动。"),
|
||||
"DORMANT_ACCOUNT_LARGE_ACTIVATION 应使用设计文档中的业务口径"),
|
||||
() -> assertTrue(migrationSql.contains("真实规则:识别员工本人账户销户前30天内仍有交易的员工对象"),
|
||||
"SUDDEN_ACCOUNT_CLOSURE 应同步真实规则说明"),
|
||||
() -> assertTrue(migrationSql.contains("真实规则:识别长期休眠后首次启用即出现大额资金流动的员工对象"),
|
||||
"DORMANT_ACCOUNT_LARGE_ACTIVATION 应同步真实规则说明")
|
||||
);
|
||||
}
|
||||
|
||||
private void assertPhase1Metadata(String sqlContent) {
|
||||
assertAll(
|
||||
() -> assertTrue(sqlContent.contains("'FOREX_BUY_AMT'")
|
||||
|
||||
101
deploy/deploy-to-nas-tongweb.sh
Executable file
101
deploy/deploy-to-nas-tongweb.sh
Executable file
@@ -0,0 +1,101 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||
|
||||
SERVER_HOST="116.62.17.81"
|
||||
SERVER_PORT="9444"
|
||||
SERVER_USERNAME="wkc"
|
||||
SERVER_PASSWORD="wkc@0825"
|
||||
REMOTE_ROOT="/volume1/webapp/ccdi"
|
||||
TONGWEB_HOME="${TONGWEB_HOME:-/opt/TongWeb}"
|
||||
APP_NAME="${APP_NAME:-ruoyi-admin}"
|
||||
DRY_RUN="false"
|
||||
|
||||
ensure_command() {
|
||||
local command_name="$1"
|
||||
if ! command -v "${command_name}" >/dev/null 2>&1; then
|
||||
echo "缺少命令: ${command_name}" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
ensure_paramiko() {
|
||||
if python3 - <<'PY'
|
||||
import importlib.util
|
||||
import sys
|
||||
|
||||
sys.exit(0 if importlib.util.find_spec("paramiko") else 1)
|
||||
PY
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
python3 -m pip install --user paramiko
|
||||
}
|
||||
|
||||
POSITION=0
|
||||
for arg in "$@"; do
|
||||
if [[ "${arg}" == "--dry-run" ]]; then
|
||||
DRY_RUN="true"
|
||||
continue
|
||||
fi
|
||||
|
||||
POSITION=$((POSITION + 1))
|
||||
case "${POSITION}" in
|
||||
1) SERVER_HOST="${arg}" ;;
|
||||
2) SERVER_PORT="${arg}" ;;
|
||||
3) SERVER_USERNAME="${arg}" ;;
|
||||
4) SERVER_PASSWORD="${arg}" ;;
|
||||
5) REMOTE_ROOT="${arg}" ;;
|
||||
6) TONGWEB_HOME="${arg}" ;;
|
||||
7) APP_NAME="${arg}" ;;
|
||||
*)
|
||||
echo "仅支持 [host] [port] [username] [password] [remoteRoot] [tongwebHome] [appName] [--dry-run]" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "${DRY_RUN}" == "true" ]]; then
|
||||
echo "[DryRun] TongWeb NAS 部署参数预览"
|
||||
echo "Host: ${SERVER_HOST}"
|
||||
echo "Port: ${SERVER_PORT}"
|
||||
echo "Username: ${SERVER_USERNAME}"
|
||||
echo "RemoteRoot: ${REMOTE_ROOT}"
|
||||
echo "TongWebHome: ${TONGWEB_HOME}"
|
||||
echo "AppName: ${APP_NAME}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "[1/4] 检查本地环境"
|
||||
ensure_command "mvn"
|
||||
ensure_command "python3"
|
||||
|
||||
echo "[2/4] 打包后端 war"
|
||||
(
|
||||
cd "${REPO_ROOT}"
|
||||
mvn -pl ruoyi-admin -am package -DskipTests
|
||||
)
|
||||
|
||||
WAR_PATH="${REPO_ROOT}/ruoyi-admin/target/ruoyi-admin.war"
|
||||
if [[ ! -f "${WAR_PATH}" ]]; then
|
||||
echo "未找到后端 war 包: ${WAR_PATH}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[3/4] 检查远端执行依赖"
|
||||
ensure_paramiko
|
||||
|
||||
echo "[4/4] 上传 war 并重启 TongWeb"
|
||||
python3 "${SCRIPT_DIR}/remote-deploy-tongweb.py" \
|
||||
--host "${SERVER_HOST}" \
|
||||
--port "${SERVER_PORT}" \
|
||||
--username "${SERVER_USERNAME}" \
|
||||
--password "${SERVER_PASSWORD}" \
|
||||
--local-war "${WAR_PATH}" \
|
||||
--remote-root "${REMOTE_ROOT}" \
|
||||
--tongweb-home "${TONGWEB_HOME}" \
|
||||
--app-name "${APP_NAME}"
|
||||
@@ -104,6 +104,9 @@ copy_path "${REPO_ROOT}/ruoyi-ui/dist" "${STAGE_ROOT}/frontend/dist"
|
||||
copy_path "${REPO_ROOT}/docker-compose.yml" "${STAGE_ROOT}/docker-compose.yml"
|
||||
copy_path "${REPO_ROOT}/.env.example" "${STAGE_ROOT}/.env.example"
|
||||
copy_path "${REPO_ROOT}/ruoyi-admin/target/ruoyi-admin.jar" "${STAGE_ROOT}/backend/ruoyi-admin.jar"
|
||||
python3 "${SCRIPT_DIR}/render_nas_env.py" \
|
||||
--template "${REPO_ROOT}/.env.example" \
|
||||
--output "${STAGE_ROOT}/.env"
|
||||
|
||||
echo "[5/5] 上传并远端部署"
|
||||
ensure_paramiko
|
||||
|
||||
@@ -95,6 +95,12 @@ Copy-ItemSafe (Join-Path $repoRoot "ruoyi-ui\\dist") (Join-Path $stageRoot "fron
|
||||
Copy-ItemSafe (Join-Path $repoRoot "docker-compose.yml") (Join-Path $stageRoot "docker-compose.yml")
|
||||
Copy-ItemSafe (Join-Path $repoRoot ".env.example") (Join-Path $stageRoot ".env.example")
|
||||
Copy-ItemSafe (Join-Path $repoRoot "ruoyi-admin\\target\\ruoyi-admin.jar") (Join-Path $stageRoot "backend\\ruoyi-admin.jar")
|
||||
python (Join-Path $scriptDir "render_nas_env.py") `
|
||||
--template (Join-Path $repoRoot ".env.example") `
|
||||
--output (Join-Path $stageRoot ".env")
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
throw "生成 NAS 部署 .env 失败"
|
||||
}
|
||||
|
||||
Write-Host "[5/5] 上传并远端部署"
|
||||
$paramikoCheck = @'
|
||||
|
||||
136
deploy/remote-deploy-tongweb.py
Normal file
136
deploy/remote-deploy-tongweb.py
Normal file
@@ -0,0 +1,136 @@
|
||||
import argparse
|
||||
import posixpath
|
||||
import shlex
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import paramiko
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Upload backend war to NAS and restart TongWeb.")
|
||||
parser.add_argument("--host", required=True)
|
||||
parser.add_argument("--port", type=int, required=True)
|
||||
parser.add_argument("--username", required=True)
|
||||
parser.add_argument("--password", required=True)
|
||||
parser.add_argument("--local-war", required=True)
|
||||
parser.add_argument("--remote-root", required=True)
|
||||
parser.add_argument("--tongweb-home", required=True)
|
||||
parser.add_argument("--app-name", required=True)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def run_command(ssh, command):
|
||||
stdin, stdout, stderr = ssh.exec_command(command)
|
||||
exit_code = stdout.channel.recv_exit_status()
|
||||
output = stdout.read().decode("utf-8", errors="ignore")
|
||||
error = stderr.read().decode("utf-8", errors="ignore")
|
||||
return exit_code, output, error
|
||||
|
||||
|
||||
def sudo_prefix(password):
|
||||
return f"printf '%s\\n' {shlex.quote(password)} | sudo -S -p '' "
|
||||
|
||||
|
||||
def detect_command_prefix(ssh, password, command):
|
||||
plain_exit_code, _, _ = run_command(ssh, f"{command} >/dev/null 2>&1")
|
||||
if plain_exit_code == 0:
|
||||
return ""
|
||||
|
||||
sudo_probe = f"{sudo_prefix(password)}{command} >/dev/null 2>&1"
|
||||
sudo_exit_code, _, _ = run_command(ssh, sudo_probe)
|
||||
if sudo_exit_code == 0:
|
||||
return sudo_prefix(password)
|
||||
|
||||
raise RuntimeError(f"Remote command is not accessible: {command}")
|
||||
|
||||
|
||||
def ensure_remote_path(ssh, prefix, remote_path):
|
||||
command = f"{prefix}mkdir -p {shlex.quote(remote_path)}"
|
||||
exit_code, output, error = run_command(ssh, command)
|
||||
if exit_code != 0:
|
||||
raise RuntimeError(f"Failed to create remote directory {remote_path}:\n{output}\n{error}")
|
||||
|
||||
|
||||
def upload_file(sftp, local_file, remote_file):
|
||||
parent_dir = posixpath.dirname(remote_file)
|
||||
try:
|
||||
sftp.listdir(parent_dir)
|
||||
except OSError:
|
||||
raise RuntimeError(f"SFTP remote directory not found: {parent_dir}")
|
||||
sftp.put(str(local_file), remote_file)
|
||||
|
||||
|
||||
def build_deploy_command(args, prefix):
|
||||
app_war_name = f"{args.app_name}.war"
|
||||
remote_war_path = posixpath.join(args.remote_root.rstrip("/"), "backend", app_war_name)
|
||||
autodeploy_dir = posixpath.join(args.tongweb_home.rstrip("/"), "autodeploy")
|
||||
deployed_war_path = posixpath.join(autodeploy_dir, app_war_name)
|
||||
deployed_dir_path = posixpath.join(autodeploy_dir, args.app_name)
|
||||
stop_script = posixpath.join(args.tongweb_home.rstrip("/"), "bin", "stopserver.sh")
|
||||
start_script = posixpath.join(args.tongweb_home.rstrip("/"), "bin", "startservernohup.sh")
|
||||
|
||||
return (
|
||||
"set -e;"
|
||||
f"test -d {shlex.quote(args.tongweb_home)};"
|
||||
f"test -x {shlex.quote(stop_script)};"
|
||||
f"test -x {shlex.quote(start_script)};"
|
||||
f"{prefix}mkdir -p {shlex.quote(autodeploy_dir)};"
|
||||
f"{prefix}sh {shlex.quote(stop_script)} >/dev/null 2>&1 || true;"
|
||||
f"{prefix}rm -rf {shlex.quote(deployed_dir_path)};"
|
||||
f"{prefix}rm -f {shlex.quote(deployed_war_path)};"
|
||||
f"{prefix}cp {shlex.quote(remote_war_path)} {shlex.quote(deployed_war_path)};"
|
||||
f"{prefix}sh {shlex.quote(start_script)};"
|
||||
"sleep 5;"
|
||||
f"ls -l {shlex.quote(autodeploy_dir)};"
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
local_war = Path(args.local_war).resolve()
|
||||
if not local_war.exists():
|
||||
raise FileNotFoundError(f"Local war does not exist: {local_war}")
|
||||
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
ssh.connect(
|
||||
hostname=args.host,
|
||||
port=args.port,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
timeout=20,
|
||||
)
|
||||
|
||||
sftp = ssh.open_sftp()
|
||||
try:
|
||||
remote_root = args.remote_root.rstrip("/")
|
||||
remote_backend_dir = posixpath.join(remote_root, "backend")
|
||||
remote_war_path = posixpath.join(remote_backend_dir, f"{args.app_name}.war")
|
||||
|
||||
ensure_remote_path(ssh, "", remote_root)
|
||||
ensure_remote_path(ssh, "", remote_backend_dir)
|
||||
upload_file(sftp, local_war, remote_war_path)
|
||||
|
||||
command_prefix = detect_command_prefix(ssh, args.password, f"test -d {shlex.quote(args.tongweb_home)}")
|
||||
deploy_command = build_deploy_command(args, command_prefix)
|
||||
exit_code, output, error = run_command(ssh, deploy_command)
|
||||
if exit_code != 0:
|
||||
raise RuntimeError(f"Remote TongWeb deploy failed:\n{output}\n{error}")
|
||||
|
||||
print("=== DEPLOY OUTPUT ===")
|
||||
print(output.strip())
|
||||
if error.strip():
|
||||
print("=== DEPLOY STDERR ===")
|
||||
print(error.strip())
|
||||
finally:
|
||||
sftp.close()
|
||||
ssh.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception as exc:
|
||||
print(str(exc), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
47
deploy/render_nas_env.py
Normal file
47
deploy/render_nas_env.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
NAS_ENV_OVERRIDES = {
|
||||
"CCDI_DB_HOST": "192.168.0.111",
|
||||
"CCDI_DB_PORT": "40628",
|
||||
}
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Render NAS deployment .env for CCDI docker compose.")
|
||||
parser.add_argument("--template", required=True)
|
||||
parser.add_argument("--output", required=True)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def render_env_text(template_text: str) -> str:
|
||||
rendered_lines = []
|
||||
replaced_keys = set()
|
||||
|
||||
for line in template_text.splitlines():
|
||||
key, separator, value = line.partition("=")
|
||||
if separator and key in NAS_ENV_OVERRIDES:
|
||||
rendered_lines.append(f"{key}={NAS_ENV_OVERRIDES[key]}")
|
||||
replaced_keys.add(key)
|
||||
continue
|
||||
rendered_lines.append(line)
|
||||
|
||||
for key, value in NAS_ENV_OVERRIDES.items():
|
||||
if key not in replaced_keys:
|
||||
rendered_lines.append(f"{key}={value}")
|
||||
|
||||
return "\n".join(rendered_lines) + "\n"
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
template_path = Path(args.template)
|
||||
output_path = Path(args.output)
|
||||
|
||||
template_text = template_path.read_text(encoding="utf-8")
|
||||
output_path.write_text(render_env_text(template_text), encoding="utf-8")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -21,6 +21,7 @@ services:
|
||||
context: .
|
||||
dockerfile: docker/mock/Dockerfile
|
||||
container_name: ccdi-lsfx-mock
|
||||
command: ["python", "main.py", "--rule-hit-mode", "subset"]
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- backend
|
||||
|
||||
382
docs/design/2026-03-31-abnormal-account-bank-tag-design.md
Normal file
382
docs/design/2026-03-31-abnormal-account-bank-tag-design.md
Normal file
@@ -0,0 +1,382 @@
|
||||
# 异常账户模型接入银行流水打标设计文档
|
||||
|
||||
**模块**: 银行流水打标
|
||||
**日期**: 2026-03-31
|
||||
|
||||
## 一、背景
|
||||
|
||||
当前银行流水打标主链路已经具备以下基础能力:
|
||||
|
||||
- 规则元数据管理与启用控制
|
||||
- `CcdiBankTagServiceImpl` 统一执行入口
|
||||
- `CcdiBankTagAnalysisMapper.xml` 承载真实规则 SQL
|
||||
- `ccdi_bank_statement_tag_result` 统一承载 `STATEMENT / OBJECT` 命中结果
|
||||
- 项目风险总览按对象型结果聚合员工风险情况
|
||||
|
||||
根据 [异常账户.xlsx](/Users/wkc/Desktop/ccdi/ccdi/assets/异常账户.xlsx) 与 [员工账户.xlsx](/Users/wkc/Desktop/ccdi/ccdi/assets/员工账户.xlsx),本次需要新增独立模型“异常账户”,并正式接入以下两条规则:
|
||||
|
||||
- `SUDDEN_ACCOUNT_CLOSURE`:突然销户
|
||||
- `DORMANT_ACCOUNT_LARGE_ACTIVATION`:休眠账户大额启用
|
||||
|
||||
这两条规则均依赖新增账户信息表 `ccdi_account_info`,且风险筛查对象明确为“员工本人”。本次目标是在不改造现有打标架构的前提下,将两条规则纳入现有项目打标主链路,并补充能够稳定命中的测试数据与验证手段。
|
||||
|
||||
## 二、目标
|
||||
|
||||
本次设计目标如下:
|
||||
|
||||
1. 新增账户信息表 `ccdi_account_info`,支撑异常账户规则计算。
|
||||
2. 新增独立模型 `ABNORMAL_ACCOUNT`,并接入 2 条对象型规则。
|
||||
3. 将两条规则接入现有 `executeObjectRule(...)` 打标链路,不新增平行处理模块。
|
||||
4. 补充最小可命中的测试数据 SQL,并覆盖正样本与反样本。
|
||||
5. 保留 Java 自动化测试,同时在验证阶段使用 MySQL MCP 执行真实 SQL,确认命中结果符合业务口径。
|
||||
6. 在设计确认后,分别产出后端与前端实施计划文档。
|
||||
|
||||
## 三、范围
|
||||
|
||||
### 3.1 本次范围
|
||||
|
||||
- 新增 `ccdi_account_info` 建表 SQL
|
||||
- 新增模型 `ABNORMAL_ACCOUNT`
|
||||
- 新增规则元数据 `SUDDEN_ACCOUNT_CLOSURE`、`DORMANT_ACCOUNT_LARGE_ACTIVATION`
|
||||
- `CcdiBankTagServiceImpl` 新增对象型规则分发
|
||||
- `CcdiBankTagAnalysisMapper.java/.xml` 新增 2 条对象型查询
|
||||
- 新增测试数据 SQL
|
||||
- 新增 Java 自动化测试
|
||||
- 新增基于 MySQL MCP 的真实 SQL 验证步骤
|
||||
- 新增设计文档、后端实施计划、前端实施计划
|
||||
|
||||
### 3.2 不在本次范围
|
||||
|
||||
- 不开发“异常账户人员信息”独立查询、分页、详情、导出真实数据链路
|
||||
- 不改前端页面展示逻辑
|
||||
- 不扩展到关系人或外部账户
|
||||
- 不新增动态规则引擎、DSL 或兼容性补丁方案
|
||||
- 不改造 `lsfx-mock-server`
|
||||
- 不将固定阈值改造成项目可配置参数
|
||||
|
||||
## 四、现状分析
|
||||
|
||||
### 4.1 当前主链路
|
||||
|
||||
当前项目级银行流水打标流程为:
|
||||
|
||||
1. `CcdiBankTagServiceImpl.rebuildProject(...)` 加载启用规则。
|
||||
2. 规则按 `rule_code` 分发到 `executeStatementRule(...)` 或 `executeObjectRule(...)`。
|
||||
3. `CcdiBankTagAnalysisMapper.xml` 执行真实 SQL,返回流水型或对象型命中结果。
|
||||
4. Service 将命中结果组装为 `CcdiBankTagResult` 并写入 `ccdi_bank_statement_tag_result`。
|
||||
5. 项目结果总览再按对象维度聚合风险人数和命中规则快照。
|
||||
|
||||
### 4.2 当前缺口
|
||||
|
||||
当前仓库中“异常账户人员信息”仍为占位展示,且主打标规则中尚无“异常账户”模型与对应规则编码。也就是说,本次缺口主要是:
|
||||
|
||||
- 缺少账户信息基础表
|
||||
- 缺少异常账户模型与规则元数据
|
||||
- 缺少两条规则的对象型 SQL
|
||||
- 缺少最小可命中的测试样本与真实 SQL 验证
|
||||
|
||||
## 五、方案对比
|
||||
|
||||
### 5.1 方案一:最小闭环接入现有对象型打标链路
|
||||
|
||||
做法:
|
||||
|
||||
- 新增独立模型 `ABNORMAL_ACCOUNT`
|
||||
- 两条规则均按 `OBJECT` 结果类型落到员工维度
|
||||
- 通过 `CcdiBankTagAnalysisMapper.xml` 计算命中结果
|
||||
- 结果继续写入 `ccdi_bank_statement_tag_result`
|
||||
|
||||
优点:
|
||||
|
||||
- 改动最小
|
||||
- 完全复用现有打标主链路
|
||||
- 能直接进入现有员工风险总览聚合
|
||||
|
||||
缺点:
|
||||
|
||||
- 本轮不打通“异常账户人员信息”独立详情链路
|
||||
|
||||
### 5.2 方案二:在方案一基础上同时打通异常账户独立结果链路
|
||||
|
||||
优点:
|
||||
|
||||
- 风险详情中的“异常账户人员信息”可展示真实数据
|
||||
|
||||
缺点:
|
||||
|
||||
- 改动范围明显扩大
|
||||
- 超出本次需求
|
||||
- 不符合最短路径实现要求
|
||||
|
||||
### 5.3 方案三:仅补 SQL 验证,不接入主系统打标链路
|
||||
|
||||
优点:
|
||||
|
||||
- 开发最省
|
||||
|
||||
缺点:
|
||||
|
||||
- 无法满足“正式接入主系统打标链路”的需求
|
||||
|
||||
### 5.4 结论
|
||||
|
||||
采用方案一:
|
||||
|
||||
- 新增独立模型 `ABNORMAL_ACCOUNT`
|
||||
- 两条规则均按对象型规则接入现有打标链路
|
||||
- 结果沉淀到现有结果表
|
||||
- 后续如需开发异常账户独立查询能力,再以此为基础扩展
|
||||
|
||||
## 六、总体设计
|
||||
|
||||
### 6.1 模型与规则设计
|
||||
|
||||
本次新增如下模型与规则:
|
||||
|
||||
- 模型编码:`ABNORMAL_ACCOUNT`
|
||||
- 模型名称:`异常账户`
|
||||
- 规则一:`SUDDEN_ACCOUNT_CLOSURE` / `突然销户`
|
||||
- 规则二:`DORMANT_ACCOUNT_LARGE_ACTIVATION` / `休眠账户大额启用`
|
||||
|
||||
两条规则统一定义为:
|
||||
|
||||
- `result_type = OBJECT`
|
||||
- `object_type = STAFF_ID_CARD`
|
||||
- `object_key = 员工身份证号`
|
||||
|
||||
### 6.2 结果落库
|
||||
|
||||
两条规则命中后继续写入现有结果表 `ccdi_bank_statement_tag_result`,不新增单独结果表。
|
||||
|
||||
结果字段约束如下:
|
||||
|
||||
- `model_code = ABNORMAL_ACCOUNT`
|
||||
- `rule_code` 使用全大写风格
|
||||
- `result_type = OBJECT`
|
||||
- `bank_statement_id = null`
|
||||
- `object_type = STAFF_ID_CARD`
|
||||
- `object_key = 员工身份证号`
|
||||
- `reason_detail` 存储账户号、异常日期与统计快照
|
||||
|
||||
### 6.3 数据流
|
||||
|
||||
数据流保持为:
|
||||
|
||||
1. 项目级打标入口加载启用规则。
|
||||
2. 当规则编码为 `SUDDEN_ACCOUNT_CLOSURE` 或 `DORMANT_ACCOUNT_LARGE_ACTIVATION` 时,进入 `executeObjectRule(...)`。
|
||||
3. Mapper SQL 在项目范围内将 `ccdi_bank_statement` 与 `ccdi_account_info`、`ccdi_base_staff` 关联。
|
||||
4. SQL 返回员工身份证号维度的对象型命中结果。
|
||||
5. Service 将命中结果写入 `ccdi_bank_statement_tag_result`。
|
||||
6. 员工风险聚合继续从该结果表汇总,无需新建平行链路。
|
||||
|
||||
## 七、表结构设计
|
||||
|
||||
### 7.1 新增表 `ccdi_account_info`
|
||||
|
||||
以 [员工账户.xlsx](/Users/wkc/Desktop/ccdi/ccdi/assets/员工账户.xlsx) 为准,新增表 `ccdi_account_info`,核心字段如下:
|
||||
|
||||
- `account_id`
|
||||
- `account_no`
|
||||
- `account_type`
|
||||
- `account_name`
|
||||
- `owner_type`
|
||||
- `owner_id`
|
||||
- `bank`
|
||||
- `bank_code`
|
||||
- `currency`
|
||||
- `is_self_account`
|
||||
- `monthly_avg_trans_count`
|
||||
- `monthly_avg_trans_amount`
|
||||
- `trans_freq_type`
|
||||
- `dr_max_single_amount`
|
||||
- `cr_max_single_amount`
|
||||
- `dr_max_daily_amount`
|
||||
- `cr_max_daily_amount`
|
||||
- `trans_risk_level`
|
||||
- `status`
|
||||
- `effective_date`
|
||||
- `invalid_date`
|
||||
- `created_by`
|
||||
- `updated_by`
|
||||
- `create_time`
|
||||
- `update_time`
|
||||
|
||||
### 7.2 关联约束
|
||||
|
||||
本次规则只识别员工本人账户,关联口径固定为:
|
||||
|
||||
- `ccdi_account_info.owner_type = 'EMPLOYEE'`
|
||||
- `ccdi_account_info.owner_id = ccdi_base_staff.id_card`
|
||||
- `ccdi_account_info.account_no = ccdi_bank_statement.LE_ACCOUNT_NO`
|
||||
|
||||
说明:
|
||||
|
||||
- 仓库中当前未见单独的账号加解密或标准化链路,因此本次设计要求建表脚本、测试数据与流水数据直接使用一致账号值
|
||||
- 本次不将关系人账户纳入规则范围
|
||||
|
||||
## 八、规则 SQL 口径
|
||||
|
||||
### 8.1 `SUDDEN_ACCOUNT_CLOSURE`
|
||||
|
||||
业务口径:
|
||||
|
||||
- 员工本人账户已销户
|
||||
- 销户日前 30 天内仍存在交易记录
|
||||
|
||||
SQL 设计约束:
|
||||
|
||||
- 仅统计项目内流水
|
||||
- 统计窗口限定为 `[invalid_date - 30天, invalid_date)`
|
||||
- 按“员工身份证号 + 账号”粒度聚合,再映射回员工对象
|
||||
|
||||
命中条件:
|
||||
|
||||
- `status = 2`
|
||||
- `invalid_date is not null`
|
||||
- 窗口内存在至少 1 笔交易
|
||||
|
||||
返回结果:
|
||||
|
||||
- `objectType = STAFF_ID_CARD`
|
||||
- `objectKey = 员工身份证号`
|
||||
|
||||
`reasonDetail` 结构:
|
||||
|
||||
- `账户{account_no}于{invalid_date}销户,销户前30天内最后交易日{last_tx_date},累计交易金额{window_total_amount}元,单笔最大金额{window_max_single_amount}元`
|
||||
|
||||
### 8.2 `DORMANT_ACCOUNT_LARGE_ACTIVATION`
|
||||
|
||||
业务口径:
|
||||
|
||||
- 员工本人账户状态正常
|
||||
- 开户后长期未使用
|
||||
- 首次启用后出现大额资金流动
|
||||
|
||||
SQL 设计约束:
|
||||
|
||||
- 仅统计项目内流水
|
||||
- 以该账户在项目内的首次流水日期作为“启用时间”
|
||||
- “沉睡时长”按开户日期到首次交易日期计算
|
||||
|
||||
命中条件:
|
||||
|
||||
- `status = 1`
|
||||
- `effective_date is not null`
|
||||
- `first_tx_date >= effective_date + 6个月`
|
||||
- 且满足以下任一:
|
||||
- 启用后累计交易总额 `>= 500000`
|
||||
- 启用后单笔最大交易金额 `>= 100000`
|
||||
|
||||
返回结果:
|
||||
|
||||
- `objectType = STAFF_ID_CARD`
|
||||
- `objectKey = 员工身份证号`
|
||||
|
||||
`reasonDetail` 结构:
|
||||
|
||||
- `账户{account_no}开户于{effective_date},首次交易日期{first_tx_date},沉睡时长{dormant_months}个月,启用后累计交易金额{total_amount}元,单笔最大金额{max_single_amount}元`
|
||||
|
||||
### 8.3 公共规则约束
|
||||
|
||||
- 仅识别员工本人账户,不识别关系人和外部账户
|
||||
- 仅按项目内流水计算,不跨项目拼接历史流水
|
||||
- 累计金额使用 `amount_dr + amount_cr`
|
||||
- 单笔最大金额使用 `greatest(amount_dr, amount_cr)`
|
||||
- 同一员工多个账户分别判断,允许同一规则写入多条结果,避免强行合并后丢失账户级快照
|
||||
|
||||
## 九、测试数据设计
|
||||
|
||||
### 9.1 测试数据组织原则
|
||||
|
||||
新增一份独立增量 SQL,放在 `sql/migration/`,仅构造本次规则所需最小样本。
|
||||
|
||||
### 9.2 样本设计
|
||||
|
||||
建议最少包含以下样本:
|
||||
|
||||
- 员工 A:命中 `SUDDEN_ACCOUNT_CLOSURE`
|
||||
- 账户已销户
|
||||
- 销户前 30 天内有 2 到 3 笔项目流水
|
||||
- 员工 B:命中 `DORMANT_ACCOUNT_LARGE_ACTIVATION`
|
||||
- 开户日期早于首次交易至少 6 个月
|
||||
- 启用后累计金额超过 50 万
|
||||
- 员工 C:休眠不足 6 个月,不命中
|
||||
- 员工 D:已销户,但销户前 30 天无流水,不命中
|
||||
|
||||
### 9.3 数据一致性要求
|
||||
|
||||
- `ccdi_account_info.account_no` 与 `ccdi_bank_statement.LE_ACCOUNT_NO` 必须一致
|
||||
- `owner_id` 与员工身份证号一致
|
||||
- 正样本与反样本必须处于同一项目验证口径下,避免跨项目误差
|
||||
|
||||
## 十、测试与验证设计
|
||||
|
||||
### 10.1 Java 自动化测试
|
||||
|
||||
保留两层自动化测试:
|
||||
|
||||
1. Service 分发测试
|
||||
- 新规则能进入 `executeObjectRule(...)`
|
||||
2. Mapper / SQL 结构测试
|
||||
- 新 Mapper 方法存在
|
||||
- XML 中存在对应 `<select>`
|
||||
- 规则元数据和模型编码无拼写错误
|
||||
3. 结果聚合测试
|
||||
- 新规则写入后,员工风险总览可正常聚合
|
||||
|
||||
### 10.2 MySQL MCP 真实 SQL 验证
|
||||
|
||||
本次新增一层真实 SQL 验证,要求在测试阶段直接通过 MySQL MCP 执行规则 SQL,确认结果符合口径。
|
||||
|
||||
验证要求:
|
||||
|
||||
- 使用项目数据库连接信息
|
||||
- 不手写 `mysql -e`
|
||||
- 直接执行对象型规则对应 SQL
|
||||
- 校验命中员工身份证号是否与测试样本一致
|
||||
- 校验反样本不会被查出
|
||||
- 校验 `reason_detail` 中异常日期、累计金额、单笔最大金额等关键快照是否符合预期
|
||||
|
||||
### 10.3 测试结束清理
|
||||
|
||||
若验证阶段启动了本地前后端、Mock 服务或其他辅助进程,测试结束后需主动关闭,避免残留占用端口。
|
||||
|
||||
## 十一、实施边界
|
||||
|
||||
### 11.1 后端实施内容
|
||||
|
||||
- 新增建表与规则元数据 SQL
|
||||
- 新增 Mapper 方法和 XML SQL
|
||||
- 新增 Service 分发
|
||||
- 新增测试数据 SQL
|
||||
- 新增自动化测试
|
||||
- 执行 MySQL MCP SQL 验证
|
||||
|
||||
### 11.2 前端实施内容
|
||||
|
||||
本轮前端原则上不改代码,但仍需产出一份前端实施计划,明确说明:
|
||||
|
||||
- 现有页面继续复用项目总览对象聚合结果
|
||||
- 本轮不开发异常账户独立列表与详情
|
||||
- 前端无需新增接口或交互
|
||||
|
||||
## 十二、验收标准
|
||||
|
||||
验收标准如下:
|
||||
|
||||
1. `ccdi_account_info` 建表脚本存在且字段与 Excel 一致。
|
||||
2. 模型 `ABNORMAL_ACCOUNT` 与两条规则元数据已落库,编码统一全大写。
|
||||
3. `CcdiBankTagServiceImpl` 已接入两条规则对象型执行分支。
|
||||
4. 规则命中结果成功写入 `ccdi_bank_statement_tag_result`。
|
||||
5. 员工风险总览聚合后可看到新增模型与规则命中。
|
||||
6. 测试数据中的正样本可命中,反样本不命中。
|
||||
7. MySQL MCP 真实 SQL 验证结果与业务口径一致。
|
||||
|
||||
## 十三、后续文档规划
|
||||
|
||||
设计确认后,继续补充以下文档:
|
||||
|
||||
- `docs/plans/backend/` 下的后端实施计划
|
||||
- `docs/plans/frontend/` 下的前端实施计划
|
||||
- `docs/reports/implementation/` 下的后端实施记录
|
||||
- `docs/reports/implementation/` 下的前端实施记录
|
||||
@@ -0,0 +1,350 @@
|
||||
# LSFX Mock Server 异常账户基线同步设计文档
|
||||
|
||||
**模块**: `lsfx-mock-server`
|
||||
**日期**: 2026-03-31
|
||||
|
||||
## 一、背景
|
||||
|
||||
当前 `lsfx-mock-server` 已完成异常账户命中流水的主链路接入:
|
||||
|
||||
- `FileService` 可为 `logId` 生成稳定的 `abnormal_account_hit_rules`
|
||||
- `FileRecord` 内已保存 `abnormal_accounts`
|
||||
- `StatementService` 已能按异常账户事实拼接 `SUDDEN_ACCOUNT_CLOSURE`、`DORMANT_ACCOUNT_LARGE_ACTIVATION` 命中流水
|
||||
|
||||
但现阶段异常账户事实仅存在于 Mock 进程内存中,尚未同步到主项目真实规则依赖的关联表 `ccdi_account_info`。这会导致两个问题:
|
||||
|
||||
1. Mock 返回的流水看起来满足异常账户规则,但真实打标 SQL 缺少账户事实,命中不稳定
|
||||
2. 同一个 `logId` 下,“命中流水”与“真实账户事实”没有形成完整闭环
|
||||
|
||||
本次需求要求在生成可以命中异常账户的流水时,同时向关联表插入最小事实数据,保证真实规则命中条件成立。
|
||||
|
||||
## 二、目标
|
||||
|
||||
- 在 `fetch_inner_flow(...)` / 上传创建 `logId` 时一次性同步异常账户事实到 `ccdi_account_info`
|
||||
- 保持同一个 `logId` 的异常账户事实、返回流水、真实打标条件三者一致
|
||||
- 保持现有 `/watson/api/project/getBSByLogId` 接口协议不变
|
||||
- 保持 `StatementService` 只负责读 `FileRecord` 生成流水,不新增写库副作用
|
||||
- 对异常账户基线写库失败采用显式失败语义,不返回半成功 `logId`
|
||||
|
||||
## 三、非目标
|
||||
|
||||
- 不新增异常账户独立接口
|
||||
- 不修改现有随机规则命中策略
|
||||
- 不扩展 `ccdi_account_info` 为完整账户域模型
|
||||
- 不在 `getBSByLogId` 首次查询时补做异常账户写库
|
||||
- 不新增兜底、补丁或降级链路
|
||||
|
||||
## 四、方案对比
|
||||
|
||||
### 4.1 方案 A:在创建 `logId` 时同步异常账户基线,推荐
|
||||
|
||||
做法:
|
||||
|
||||
- `FileService` 生成 `FileRecord` 和 `abnormal_accounts`
|
||||
- 在保存 `file_records[log_id]` 之前,同步将异常账户事实幂等写入 `ccdi_account_info`
|
||||
- 后续 `StatementService` 只读内存事实生成流水
|
||||
|
||||
优点:
|
||||
|
||||
- 触发点单一,同一个 `logId` 只写一次
|
||||
- 不把写库副作用混进读接口
|
||||
- “命中前提未建好就不返回 `logId`” 的语义最清晰
|
||||
- 与现有 `fetch_inner_flow -> getBSByLogId` 主链路最一致
|
||||
|
||||
缺点:
|
||||
|
||||
- 需要新增一个很小的异常账户基线写库服务
|
||||
|
||||
### 4.2 方案 B:在 `getBSByLogId` 首次生成流水时再写库
|
||||
|
||||
优点:
|
||||
|
||||
- 只有真正查询流水时才落库
|
||||
|
||||
缺点:
|
||||
|
||||
- 读接口承担写库副作用,职责变重
|
||||
- 缓存、重试和并发下更容易出现重复写库或半成功状态
|
||||
- 不符合当前 Mock 服务“先建上传记录,再查流水”的链路习惯
|
||||
|
||||
### 4.3 方案 C:继续只保留内存事实,不做运行时写库
|
||||
|
||||
优点:
|
||||
|
||||
- 实现最省事
|
||||
|
||||
缺点:
|
||||
|
||||
- 无法保证真实规则稳定命中
|
||||
- 不满足当前需求
|
||||
|
||||
## 五、结论
|
||||
|
||||
采用方案 A。
|
||||
|
||||
原因如下:
|
||||
|
||||
- 最短路径实现真实闭环
|
||||
- 不破坏现有服务职责边界
|
||||
- 最容易保证“同一个 `logId` 一次建好全部命中前提”
|
||||
- 最符合你要求的“生成可以命中的流水时,同时向关联表插入数据”
|
||||
|
||||
## 六、总体设计
|
||||
|
||||
### 6.1 新增服务边界
|
||||
|
||||
新增 `AbnormalAccountBaselineService`,职责仅有一项:
|
||||
|
||||
- 将 `FileRecord.abnormal_accounts` 幂等同步到 `ccdi_account_info`
|
||||
|
||||
职责划分如下:
|
||||
|
||||
- `FileService`
|
||||
- 生成 `logId`
|
||||
- 选择员工身份
|
||||
- 生成异常账户命中计划
|
||||
- 生成 `abnormal_accounts`
|
||||
- 调用异常账户基线同步服务
|
||||
- `AbnormalAccountBaselineService`
|
||||
- 连接数据库
|
||||
- 以 `account_no` 为键执行幂等写入
|
||||
- `StatementService`
|
||||
- 继续只根据 `FileRecord` 生成命中流水
|
||||
- 不负责数据库写入
|
||||
|
||||
### 6.2 调用顺序
|
||||
|
||||
改造后的 `fetch_inner_flow(...)` 主链路如下:
|
||||
|
||||
1. 生成 `logId`
|
||||
2. 生成规则命中计划
|
||||
3. 创建 `FileRecord`
|
||||
4. 生成 `record.abnormal_accounts`
|
||||
5. 调用 `_apply_abnormal_account_baselines(file_record)`
|
||||
6. 基线写库成功后,再将 `file_record` 放入 `self.file_records`
|
||||
7. 继续后续现有逻辑并返回响应
|
||||
|
||||
这个顺序的关键点是:
|
||||
|
||||
- 不把异常账户写库放到 `StatementService`
|
||||
- 不在基线未落库成功时返回可用 `logId`
|
||||
|
||||
### 6.3 失败语义
|
||||
|
||||
- 若 `abnormal_account_hit_rules` 为空:直接跳过,不写库
|
||||
- 若命中了异常账户规则但 `abnormal_accounts` 为空:视为内部状态异常,直接失败
|
||||
- 若数据库连接失败或 upsert 失败:`fetch_inner_flow(...)` 直接失败,本次 `logId` 不写入内存
|
||||
- 不做补丁式重试,不返回半成功结果
|
||||
|
||||
## 七、数据模型设计
|
||||
|
||||
### 7.1 内存事实结构
|
||||
|
||||
继续复用当前 `FileRecord.abnormal_accounts` 结构,最小字段为:
|
||||
|
||||
- `account_no`
|
||||
- `owner_id_card`
|
||||
- `account_name`
|
||||
- `status`
|
||||
- `effective_date`
|
||||
- `invalid_date`
|
||||
- `rule_code`
|
||||
|
||||
说明:
|
||||
|
||||
- `rule_code` 仅作为 Mock 内部路由字段使用
|
||||
- 对外接口不返回这批事实
|
||||
|
||||
### 7.2 `ccdi_account_info` 同步字段
|
||||
|
||||
本次只同步真实规则命中所需的最小字段:
|
||||
|
||||
- `account_no`
|
||||
- `account_type`
|
||||
- `account_name`
|
||||
- `owner_type`
|
||||
- `owner_id`
|
||||
- `bank`
|
||||
- `bank_code`
|
||||
- `currency`
|
||||
- `is_self_account`
|
||||
- `trans_risk_level`
|
||||
- `status`
|
||||
- `effective_date`
|
||||
- `invalid_date`
|
||||
- `create_by`
|
||||
- `update_by`
|
||||
|
||||
其中字段值约束如下:
|
||||
|
||||
- `account_no`
|
||||
- 直接使用 `record.abnormal_accounts[*].account_no`
|
||||
- `account_name`
|
||||
- 直接使用 `record.abnormal_accounts[*].account_name`
|
||||
- `owner_type`
|
||||
- 固定写 `EMPLOYEE`
|
||||
- `owner_id`
|
||||
- 写 `owner_id_card`
|
||||
- `bank`
|
||||
- 固定写当前异常账户样本对齐的银行名称
|
||||
- `bank_code`
|
||||
- 固定写当前异常账户样本对齐的银行编码
|
||||
- `currency`
|
||||
- 固定 `CNY`
|
||||
- `is_self_account`
|
||||
- 固定 `1`
|
||||
- `trans_risk_level`
|
||||
- 固定 `HIGH`
|
||||
- `status`
|
||||
- 由规则事实决定
|
||||
- `effective_date`
|
||||
- 由规则事实决定
|
||||
- `invalid_date`
|
||||
- 仅销户规则写值
|
||||
|
||||
本次不补充 `monthly_avg_trans_count`、`monthly_avg_trans_amount`、`dr_max_single_amount` 等推导型字段,因为当前两条真实规则命中依赖的是账户状态与流水窗口,不依赖这些预统计字段。
|
||||
|
||||
## 八、幂等策略设计
|
||||
|
||||
### 8.1 唯一定位键
|
||||
|
||||
以 `account_no` 作为异常账户事实的唯一定位键。
|
||||
|
||||
原因:
|
||||
|
||||
- Mock 内部异常账户事实和异常账户样本流水都以账号为唯一桥梁
|
||||
- 同一个员工可能存在多个账户,按 `owner_id` 先删后插会扩大影响面
|
||||
- 账号粒度最符合异常账户明细展示与后续回溯链路
|
||||
|
||||
### 8.2 Upsert 规则
|
||||
|
||||
对每条异常账户事实执行单条幂等 upsert:
|
||||
|
||||
- 若账号不存在:插入
|
||||
- 若账号已存在:覆盖本次 Mock 负责的核心字段
|
||||
|
||||
覆盖范围仅限:
|
||||
|
||||
- `account_name`
|
||||
- `owner_type`
|
||||
- `owner_id`
|
||||
- `bank`
|
||||
- `bank_code`
|
||||
- `currency`
|
||||
- `is_self_account`
|
||||
- `trans_risk_level`
|
||||
- `status`
|
||||
- `effective_date`
|
||||
- `invalid_date`
|
||||
- `update_by`
|
||||
- `update_time`
|
||||
|
||||
明确不做的事:
|
||||
|
||||
- 不按员工先删整批账户
|
||||
- 不清空其他来源的账户数据
|
||||
- 不以 `owner_id` 做批量覆盖
|
||||
|
||||
## 九、一致性约束
|
||||
|
||||
必须同时满足以下约束:
|
||||
|
||||
1. `record.abnormal_accounts[*].account_no` 必须等于对应异常账户样本流水的 `accountMaskNo`
|
||||
2. `record.abnormal_accounts[*].owner_id_card` 必须等于对应异常账户样本流水的 `cretNo`
|
||||
3. 同一个 `logId` 下,异常账户事实与异常账户流水必须来自同一份 `FileRecord`
|
||||
4. `StatementService` 返回流水时不得覆盖已存在的异常账户样本账号
|
||||
|
||||
这意味着“内存事实 -> 返回流水 -> 数据库账户事实”三者会围绕同一个 `account_no` 对齐,后端真实 SQL 与结果回溯链路不会漂移。
|
||||
|
||||
## 十、模块改动设计
|
||||
|
||||
### 10.1 `lsfx-mock-server/services/file_service.py`
|
||||
|
||||
改动点:
|
||||
|
||||
- 注入新的 `abnormal_account_baseline_service`
|
||||
- 新增 `_apply_abnormal_account_baselines(file_record)` 封装方法
|
||||
- 在 `fetch_inner_flow(...)` 与上传建档链路中,于 `self.file_records[log_id] = file_record` 之前调用该方法
|
||||
|
||||
职责保持:
|
||||
|
||||
- 仍是异常账户规则计划和事实的唯一生成入口
|
||||
- 不直接拼装 SQL 字符串,数据库写入交给独立服务
|
||||
|
||||
### 10.2 `lsfx-mock-server/services/abnormal_account_baseline_service.py`
|
||||
|
||||
新增文件,提供:
|
||||
|
||||
- 数据库连接
|
||||
- 输入校验
|
||||
- 单条异常账户事实 upsert
|
||||
- 批量 apply 入口
|
||||
|
||||
建议方法签名:
|
||||
|
||||
```python
|
||||
def apply(self, staff_id_card: str, abnormal_accounts: List[dict]) -> None:
|
||||
...
|
||||
```
|
||||
|
||||
说明:
|
||||
|
||||
- `staff_id_card` 用于做最小一致性校验
|
||||
- `abnormal_accounts` 为当前 `logId` 已生成好的异常账户事实列表
|
||||
|
||||
### 10.3 `lsfx-mock-server/services/statement_service.py`
|
||||
|
||||
本次不新增写库逻辑,仅维持现有一致性保证:
|
||||
|
||||
- 继续从 `FileRecord` 读取 `abnormal_accounts`
|
||||
- 继续根据 `rule_code` 选择异常账户样本构造器
|
||||
- 保持 `_apply_primary_binding(...)` 只兜底缺失账号,不覆盖异常账户样本账号
|
||||
|
||||
## 十一、测试设计
|
||||
|
||||
### 11.1 `tests/test_file_service.py`
|
||||
|
||||
补充断言:
|
||||
|
||||
- 命中异常账户规则时,`fetch_inner_flow(...)` 会调用异常账户基线同步服务
|
||||
- 同步服务收到的账号、员工身份证、状态、生效日、销户日与 `record.abnormal_accounts` 完全一致
|
||||
- 基线同步失败时,`file_records` 中不会残留该 `logId`
|
||||
|
||||
这里优先使用 fake service / stub 断言调用参数,不直接依赖真实数据库。
|
||||
|
||||
### 11.2 `tests/test_statement_service.py`
|
||||
|
||||
保留现有异常账户流水样本测试,再补充链路一致性断言:
|
||||
|
||||
- 同一个 `logId` 下,异常账户样本流水中的 `accountMaskNo` 必须全部来自 `record.abnormal_accounts`
|
||||
- `StatementService` 不会因本次改造新增数据库写入副作用
|
||||
|
||||
### 11.3 `tests/test_abnormal_account_baseline_service.py`
|
||||
|
||||
新增服务层单测,覆盖:
|
||||
|
||||
- 空异常账户列表直接跳过
|
||||
- 命中规则但事实为空时报错
|
||||
- 新账号插入
|
||||
- 已有账号按 `account_no` 幂等更新
|
||||
|
||||
## 十二、验收标准
|
||||
|
||||
本次设计实施后,应满足以下验收结果:
|
||||
|
||||
1. 创建 `logId` 时,命中的异常账户事实会一次性写入 `ccdi_account_info`
|
||||
2. 同一个 `logId` 后续查询流水不会再次写库
|
||||
3. `ccdi_account_info.account_no` 与异常账户样本流水 `accountMaskNo` 完全一致
|
||||
4. 写库失败时,不返回半成功 `logId`
|
||||
5. 现有异常账户命中流水生成、分页与缓存语义保持不变
|
||||
|
||||
## 十三、结论
|
||||
|
||||
本次采用“创建 `logId` 时一次性同步异常账户基线”的方式改造 `lsfx-mock-server`:
|
||||
|
||||
- 让异常账户命中样本不再停留在 Mock 进程内存
|
||||
- 让 `ccdi_account_info` 与返回流水围绕同一个账号闭环
|
||||
- 保持现有接口不变
|
||||
- 保持最短路径实现,不引入兼容性和补丁式方案
|
||||
|
||||
这能确保 Mock 生成的异常账户流水不仅“看起来能命中”,而且“真实规则一定具备命中所需的账户事实前提”。
|
||||
@@ -0,0 +1,285 @@
|
||||
# LSFX Mock Server 异常账户命中流水设计文档
|
||||
|
||||
**模块**: `lsfx-mock-server`
|
||||
**日期**: 2026-03-31
|
||||
|
||||
## 一、背景
|
||||
|
||||
当前仓库中的异常账户模型已经在主系统后端完成规则接入,包含以下两条对象型规则:
|
||||
|
||||
- `SUDDEN_ACCOUNT_CLOSURE`
|
||||
- `DORMANT_ACCOUNT_LARGE_ACTIVATION`
|
||||
|
||||
根据已落地的后端实施计划与实现结果,这两条规则的命中依赖两类事实:
|
||||
|
||||
1. 账户事实:来自 `ccdi_account_info` 的账户状态、开户日、销户日、账户归属人
|
||||
2. 流水事实:来自 `ccdi_bank_statement` 的账号维度交易时间与交易金额
|
||||
|
||||
当前 [lsfx-mock-server](/Users/wkc/Desktop/ccdi/ccdi/lsfx-mock-server) 已具备以下能力:
|
||||
|
||||
- `FileService` 为每个 `logId` 生成稳定的规则命中计划
|
||||
- `StatementService` 根据命中计划拼接规则样本流水
|
||||
- `/watson/api/project/getBSByLogId` 返回分页流水列表
|
||||
|
||||
但 Mock 服务现阶段尚未支持异常账户模型对应的“账户事实 + 命中流水”闭环,因此后端即使接入了真实规则 SQL,也无法通过现有 Mock 数据稳定命中异常账户规则。
|
||||
|
||||
本次目标是在不改动现有接口协议的前提下,为 `lsfx-mock-server` 补齐异常账户规则的最小闭环造数能力,让同一个 `logId` 下既有可命中后端 SQL 的账户事实,也有与之匹配的流水样本。
|
||||
|
||||
## 二、目标
|
||||
|
||||
- 在现有 Mock 规则计划体系中新增异常账户命中计划。
|
||||
- 为每个命中异常账户规则的 `logId` 生成稳定的异常账户事实。
|
||||
- 按后端 SQL 口径生成两条规则对应的流水样本。
|
||||
- 保持现有 `/watson/api/project/getBSByLogId` 返回结构不变。
|
||||
- 保持现有 `FileService -> StatementService` 主链路不变,不引入平行造数机制。
|
||||
- 补充测试,确保命中计划、账户事实和流水样本三者一致。
|
||||
|
||||
## 三、非目标
|
||||
|
||||
- 不新增异常账户独立接口。
|
||||
- 不修改现有上传、拉取行内流水、查询流水接口的请求参数与响应结构。
|
||||
- 不对外直接返回异常账户事实列表。
|
||||
- 不模拟 `ccdi_account_info` 全字段,只保留两条规则所需最小字段。
|
||||
- 不扩展异常账户详情页、分页查询或导出链路。
|
||||
- 不引入动态配置平台、DSL 或补丁式兼容逻辑。
|
||||
|
||||
## 四、方案对比
|
||||
|
||||
### 4.1 方案一:并入现有 `rule_hit_plan` 体系
|
||||
|
||||
做法:
|
||||
|
||||
- 在 `FileRecord` 中新增异常账户命中计划与账户事实
|
||||
- 在 `build_seed_statements_for_rule_plan(...)` 中接入异常账户样本生成
|
||||
- `StatementService` 继续统一补噪声、编号、分页
|
||||
|
||||
优点:
|
||||
|
||||
- 完全复用当前 Mock 主链路
|
||||
- 同一个 `logId` 下规则计划、账户事实、流水样本天然一致
|
||||
- 实现路径最短,后续联调稳定
|
||||
|
||||
缺点:
|
||||
|
||||
- 需要在 `FileRecord` 中增加一层最小账户事实建模
|
||||
|
||||
### 4.2 方案二:仅在 `StatementService` 中硬编码异常账户流水样本
|
||||
|
||||
优点:
|
||||
|
||||
- 改动最少,实现最快
|
||||
|
||||
缺点:
|
||||
|
||||
- 命中流水和账户事实分离
|
||||
- 后续若需要调试命中原因或扩展账户事实,维护成本较高
|
||||
|
||||
### 4.3 方案三:新增独立异常账户服务模块
|
||||
|
||||
优点:
|
||||
|
||||
- 抽象边界更清晰
|
||||
|
||||
缺点:
|
||||
|
||||
- 对当前 Mock 项目来说偏重
|
||||
- 超出最短路径实现要求
|
||||
|
||||
## 五、结论
|
||||
|
||||
采用方案一。
|
||||
|
||||
原因如下:
|
||||
|
||||
- 与当前 Mock 服务的规则计划机制完全一致
|
||||
- 不新增接口、不增加平行数据流
|
||||
- 能以最小改动实现“账户事实 + 命中流水”的稳定闭环
|
||||
- 后续若主系统还需要继续扩展 Mock 规则样本,可以沿用同一套结构
|
||||
|
||||
## 六、总体设计
|
||||
|
||||
### 6.1 新增命中计划维度
|
||||
|
||||
在现有规则计划结构上新增:
|
||||
|
||||
- `abnormal_account_hit_rules`
|
||||
|
||||
规则池固定为:
|
||||
|
||||
- `SUDDEN_ACCOUNT_CLOSURE`
|
||||
- `DORMANT_ACCOUNT_LARGE_ACTIVATION`
|
||||
|
||||
`FileService` 在为 `logId` 生成规则计划时,继续沿用现有“稳定随机且可复现”的逻辑,把异常账户规则作为平级维度一起生成并回填到 `FileRecord`。
|
||||
|
||||
### 6.2 新增最小账户事实
|
||||
|
||||
`FileRecord` 新增以下字段:
|
||||
|
||||
- `abnormal_account_hit_rules: List[str]`
|
||||
- `abnormal_accounts: List[AbnormalAccountFact]`
|
||||
|
||||
其中 `AbnormalAccountFact` 仅保留两条规则需要的最小字段:
|
||||
|
||||
- `account_no`
|
||||
- `owner_id_card`
|
||||
- `account_name`
|
||||
- `status`
|
||||
- `effective_date`
|
||||
- `invalid_date`
|
||||
|
||||
说明:
|
||||
|
||||
- 本次不复制主系统 `ccdi_account_info` 全量结构
|
||||
- 只保留命中 SQL 真正会用到的事实,避免过度设计
|
||||
|
||||
### 6.3 样本流水接入位置
|
||||
|
||||
现有 `StatementService` 已通过 `build_seed_statements_for_rule_plan(...)` 生成规则样本流水。
|
||||
|
||||
本次改造保持该入口不变,仅在其内部追加异常账户规则样本构造:
|
||||
|
||||
- `build_sudden_account_closure_samples(...)`
|
||||
- `build_dormant_account_large_activation_samples(...)`
|
||||
|
||||
生成出的异常账户样本与其他规则样本一起组成 `seeded_statements`,之后继续复用现有逻辑:
|
||||
|
||||
1. 补足噪声流水
|
||||
2. 统一分配流水编号
|
||||
3. 打乱顺序
|
||||
4. 分页返回
|
||||
|
||||
## 七、规则口径设计
|
||||
|
||||
### 7.1 `SUDDEN_ACCOUNT_CLOSURE`
|
||||
|
||||
Mock 口径必须与后端 SQL 对齐:
|
||||
|
||||
- 账户状态为 `2`
|
||||
- `invalid_date` 非空
|
||||
- 流水账号与账户事实中的 `account_no` 一致
|
||||
- 所有命中样本流水时间都落在 `[invalid_date - 30天, invalid_date)` 区间内
|
||||
|
||||
样本策略:
|
||||
|
||||
- 为单个命中账户生成 2 到 3 笔流水
|
||||
- 同时覆盖收入和支出,便于后端聚合 `windowTotalAmount`
|
||||
- 保证存在明确的最后交易日,便于推导 `lastTxDate`
|
||||
- 保证存在稳定的单笔最大金额,便于推导 `windowMaxSingleAmount`
|
||||
|
||||
### 7.2 `DORMANT_ACCOUNT_LARGE_ACTIVATION`
|
||||
|
||||
Mock 口径必须与后端 SQL 对齐:
|
||||
|
||||
- 账户状态为 `1`
|
||||
- `effective_date` 非空
|
||||
- 首笔流水日期 `>= effective_date + 6个月`
|
||||
- 启用后流水满足:
|
||||
- `windowTotalAmount >= 500000`
|
||||
- 或 `windowMaxSingleAmount >= 100000`
|
||||
|
||||
样本策略:
|
||||
|
||||
- 首笔流水明确落在开户满 6 个月以后
|
||||
- 为避免边界漂移,直接让累计金额和单笔最大金额同时满足阈值
|
||||
- 启用后生成 2 笔以上流水,保证累计口径稳定
|
||||
|
||||
### 7.3 未命中规则时的处理
|
||||
|
||||
当某个 `logId` 的 `abnormal_account_hit_rules` 中不包含某条规则时:
|
||||
|
||||
- 不生成该规则的账户事实
|
||||
- 不生成该规则的流水样本
|
||||
- 不制造“接近命中但未命中”的灰度样本
|
||||
|
||||
这样可以避免误命中,保证 Mock 语义清晰。
|
||||
|
||||
## 八、数据流设计
|
||||
|
||||
本次改造后的数据流如下:
|
||||
|
||||
1. `FileService.fetch_inner_flow(...)` 或上传链路创建 `FileRecord`
|
||||
2. `FileService` 生成四类命中计划:
|
||||
- `large_transaction_hit_rules`
|
||||
- `phase1_hit_rules`
|
||||
- `phase2_statement_hit_rules`
|
||||
- `abnormal_account_hit_rules`
|
||||
3. `FileService` 根据异常账户命中计划生成 `abnormal_accounts`
|
||||
4. `StatementService._generate_statements(...)` 读取 `FileRecord`
|
||||
5. `build_seed_statements_for_rule_plan(...)` 依据异常账户命中计划拼接对应样本流水
|
||||
6. 服务层继续补噪声并返回现有结构的流水列表
|
||||
|
||||
结论:
|
||||
|
||||
- 内部多了一层异常账户事实
|
||||
- 对外接口保持不变
|
||||
|
||||
## 九、模块与职责
|
||||
|
||||
### 9.1 `lsfx-mock-server/services/file_service.py`
|
||||
|
||||
职责调整:
|
||||
|
||||
- 扩展 `FileRecord`
|
||||
- 生成并保存 `abnormal_account_hit_rules`
|
||||
- 按命中规则生成对应 `abnormal_accounts`
|
||||
- 保证同一 `logId` 下账户事实稳定可复现
|
||||
|
||||
### 9.2 `lsfx-mock-server/services/statement_rule_samples.py`
|
||||
|
||||
职责调整:
|
||||
|
||||
- 新增异常账户事实结构
|
||||
- 新增两类异常账户样本构造器
|
||||
- 在统一种子流水构造入口中接入异常账户样本
|
||||
|
||||
### 9.3 `lsfx-mock-server/services/statement_service.py`
|
||||
|
||||
职责保持不变,仅消费新增计划与样本:
|
||||
|
||||
- 读取 `FileRecord` 中的异常账户计划
|
||||
- 调用种子样本生成器
|
||||
- 继续完成补噪声、编号、缓存、分页返回
|
||||
|
||||
## 十、测试设计
|
||||
|
||||
测试分为三层:
|
||||
|
||||
### 10.1 计划层测试
|
||||
|
||||
验证点:
|
||||
|
||||
- `FileRecord` 能保存 `abnormal_account_hit_rules`
|
||||
- 生成的异常账户计划稳定、可复现
|
||||
- 命中计划与账户事实数量、规则类型一致
|
||||
|
||||
### 10.2 样本层测试
|
||||
|
||||
验证点:
|
||||
|
||||
- `SUDDEN_ACCOUNT_CLOSURE` 样本流水日期全部处于销户前 30 天窗口内
|
||||
- `DORMANT_ACCOUNT_LARGE_ACTIVATION` 首笔流水日期晚于开户满 6 个月
|
||||
- 休眠账户样本的累计金额和单笔最大金额达到后端 SQL 阈值
|
||||
|
||||
### 10.3 服务层测试
|
||||
|
||||
验证点:
|
||||
|
||||
- `StatementService._generate_statements(...)` 能把异常账户样本混入返回流水
|
||||
- 未命中的异常账户规则不会污染其他 `logId`
|
||||
- 同一个 `logId` 重复查询时缓存结果保持稳定
|
||||
|
||||
## 十一、风险与约束
|
||||
|
||||
- 本次不改协议,因此异常账户事实仅在 Mock 服务内部使用
|
||||
- 由于不新增独立接口,联调时仍需通过现有流水接口间接触发后端规则命中
|
||||
- 样本日期和金额必须严格贴后端 SQL 口径,避免出现“Mock 看起来合理但后端不命中”的问题
|
||||
|
||||
## 十二、后续计划
|
||||
|
||||
本设计确认后,下一步仅进入实施计划编写阶段,不直接扩展其他功能。
|
||||
|
||||
按仓库约定,需要继续补充:
|
||||
|
||||
- 后端实施计划
|
||||
- 前端实施计划
|
||||
- 本次改动的实施记录
|
||||
@@ -0,0 +1,463 @@
|
||||
# 项目详情风险明细异常账户人员信息设计文档
|
||||
|
||||
**模块**: 项目详情 - 结果总览 - 风险明细
|
||||
**日期**: 2026-03-31
|
||||
**作者**: Codex
|
||||
**状态**: 已确认
|
||||
|
||||
## 一、背景
|
||||
|
||||
当前项目详情页 `结果总览 -> 风险明细` 已经具备以下能力:
|
||||
|
||||
1. `涉疑交易明细` 已接入真实分页查询与统一导出。
|
||||
2. `员工负面征信信息` 已接入真实分页查询,并已纳入统一导出。
|
||||
3. `异常账户人员信息` 仍停留在前端静态占位与统一导出空 sheet。
|
||||
|
||||
与此同时,`2026-03-31` 已完成异常账户模型接入银行流水打标主链路:
|
||||
|
||||
- 模型编码:`ABNORMAL_ACCOUNT`
|
||||
- 规则编码:
|
||||
- `SUDDEN_ACCOUNT_CLOSURE`
|
||||
- `DORMANT_ACCOUNT_LARGE_ACTIVATION`
|
||||
- 命中结果已写入 `ccdi_bank_statement_tag_result`
|
||||
- 员工风险聚合已能承接异常账户模型命中
|
||||
|
||||
因此,本次需求不是新增模型能力,而是将已有的异常账户命中结果正式接入 `风险明细` 区域展示,并保证统一导出中的 `异常账户人员信息` sheet 导出真实数据。
|
||||
|
||||
## 二、目标
|
||||
|
||||
本次设计目标如下:
|
||||
|
||||
1. 将 `异常账户人员信息` 区块从占位数据改为真实查询结果。
|
||||
2. 页面展示字段与统一导出字段完全一致。
|
||||
3. 风险明细统一导出中的第 3 个 sheet 改为真实导出异常账户人员信息。
|
||||
4. 保持最短路径实现,不扩展详情弹窗、筛选器或平行链路。
|
||||
|
||||
## 三、范围
|
||||
|
||||
### 3.1 本次范围
|
||||
|
||||
- 新增结果总览专用异常账户人员分页查询接口
|
||||
- 新增异常账户人员导出查询
|
||||
- `RiskDetailSection.vue` 接入真实异常账户数据与独立分页
|
||||
- `risk-details/export` 第 3 个 sheet 改为真实数据
|
||||
- 补充本次设计文档与设计记录
|
||||
|
||||
### 3.2 不在本次范围
|
||||
|
||||
- 不新增异常账户详情弹窗
|
||||
- 不新增异常账户区块筛选条件
|
||||
- 不扩展到关系人或外部账户
|
||||
- 不新增单独的异常账户导出接口
|
||||
- 不改造项目分析弹窗
|
||||
- 不新增兼容性补丁、兜底链路或降级方案
|
||||
|
||||
## 四、现状分析
|
||||
|
||||
### 4.1 前端现状
|
||||
|
||||
当前核心组件为:
|
||||
|
||||
- `ruoyi-ui/src/views/ccdiProject/components/detail/RiskDetailSection.vue`
|
||||
|
||||
当前 `异常账户人员信息` 区块仍直接读取:
|
||||
|
||||
- `sectionData.abnormalAccountList || []`
|
||||
|
||||
现有列结构仍是早期占位字段:
|
||||
|
||||
1. `账户号`
|
||||
2. `账户人姓名`
|
||||
3. `开户银行`
|
||||
4. `异常发生时间`
|
||||
5. `状态`
|
||||
6. `操作`
|
||||
|
||||
这意味着当前页面展示既没有真实接口,也与本次统一导出的字段口径不完全一致。
|
||||
|
||||
### 4.2 后端现状
|
||||
|
||||
当前结果总览控制器为:
|
||||
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewController.java`
|
||||
|
||||
当前已具备:
|
||||
|
||||
1. `GET /ccdi/project/overview/suspicious-transactions`
|
||||
2. `GET /ccdi/project/overview/employee-credit-negative`
|
||||
3. `POST /ccdi/project/overview/risk-details/export`
|
||||
|
||||
其中统一导出由:
|
||||
|
||||
- `CcdiProjectOverviewServiceImpl.exportRiskDetails(...)`
|
||||
- `CcdiProjectRiskDetailWorkbookExporter`
|
||||
|
||||
共同完成。
|
||||
|
||||
但当前导出器对第 3 个 sheet 仅写入表头:
|
||||
|
||||
1. `账号`
|
||||
2. `开户人`
|
||||
3. `银行`
|
||||
4. `异常类型`
|
||||
5. `异常发生时间`
|
||||
6. `状态`
|
||||
|
||||
没有真实数据查询与写出逻辑。
|
||||
|
||||
### 4.3 已有数据基础
|
||||
|
||||
异常账户模型命中结果已存在于:
|
||||
|
||||
- `ccdi_bank_statement_tag_result`
|
||||
|
||||
并且当前模型设计已明确:
|
||||
|
||||
- `model_code = 'ABNORMAL_ACCOUNT'`
|
||||
- `result_type = 'OBJECT'`
|
||||
- `object_type = 'STAFF_ID_CARD'`
|
||||
- `object_key = 员工身份证号`
|
||||
|
||||
账户主数据已存在于:
|
||||
|
||||
- `ccdi_account_info`
|
||||
|
||||
因此本次展示与导出的最短路径,是直接基于异常账户对象型命中结果与账户信息表构造结果总览专用查询,而不是从聚合结果或前端 mock 数据反推。
|
||||
|
||||
## 五、方案对比
|
||||
|
||||
### 5.1 方案 A:新增结果总览专用异常账户查询链路,页面与导出共用同一口径
|
||||
|
||||
做法:
|
||||
|
||||
- 新增 `GET /ccdi/project/overview/abnormal-account-people`
|
||||
- 新增服务层内部导出查询方法
|
||||
- 查询源直接使用 `ccdi_bank_statement_tag_result + ccdi_account_info`
|
||||
- 页面展示与统一导出共用同一套字段口径
|
||||
|
||||
优点:
|
||||
|
||||
- 页面与导出完全同口径
|
||||
- 不需要解析占位数据
|
||||
- 不依赖聚合快照反推细节
|
||||
- 与“一条命中结果一行”的确认口径天然一致
|
||||
- 改动集中在结果总览域内,符合最短路径
|
||||
|
||||
缺点:
|
||||
|
||||
- 需要补充新的 VO、Mapper SQL、Excel 行对象和测试
|
||||
|
||||
### 5.2 方案 B:复用项目分析弹窗对象型异常查询,再补字段拼装
|
||||
|
||||
做法:
|
||||
|
||||
- 基于 `selectPersonAnalysisObjectRows` 再改造成风险明细列表
|
||||
|
||||
问题:
|
||||
|
||||
- 现有查询主要返回标题、摘要和 `reasonDetail`
|
||||
- 不直接提供 `账号 / 银行 / 状态 / 异常发生时间`
|
||||
- 需要从 `reasonDetail` 反解析字段,稳定性差
|
||||
- 不适合作为统一导出数据源
|
||||
|
||||
### 5.3 方案 C:基于员工风险聚合表反推异常账户明细
|
||||
|
||||
做法:
|
||||
|
||||
- 以 `ccdi_project_overview_employee_result` 为主,再关联账户表补全字段
|
||||
|
||||
问题:
|
||||
|
||||
- 聚合层已经丢失“每条命中结果一行”的细粒度
|
||||
- 难以稳定还原 `异常类型` 与 `异常发生时间`
|
||||
- 容易导致页面与导出口径偏移
|
||||
|
||||
### 5.4 结论
|
||||
|
||||
采用 **方案 A:新增结果总览专用异常账户查询链路,页面与导出共用同一口径**。
|
||||
|
||||
## 六、总体设计
|
||||
|
||||
### 6.1 设计原则
|
||||
|
||||
本次设计遵循以下原则:
|
||||
|
||||
1. 以已有异常账户打标结果为唯一事实来源。
|
||||
2. 页面展示与导出字段保持完全一致。
|
||||
3. 一条命中结果一行,不做账号合并、不做员工合并。
|
||||
4. 仅识别员工本人账户,不扩展关系人或外部账户。
|
||||
5. 不新增平行模块,所有改动收口在结果总览域。
|
||||
|
||||
### 6.2 数据流
|
||||
|
||||
页面查询链路:
|
||||
|
||||
1. `RiskDetailSection.vue` 加载当前项目的异常账户人员分页数据。
|
||||
2. 前端调用 `GET /ccdi/project/overview/abnormal-account-people`。
|
||||
3. 控制器调用 `overviewService.getAbnormalAccountPeople(...)`。
|
||||
4. 服务层校验项目存在,调用 Mapper 分页 SQL。
|
||||
5. Mapper 从异常账户对象命中结果与账户信息表中返回结果。
|
||||
6. 前端渲染 `异常账户人员信息` 表格。
|
||||
|
||||
统一导出链路:
|
||||
|
||||
1. 用户点击 `风险明细` 卡片右上角 `导出`。
|
||||
2. 前端调用 `POST /ccdi/project/overview/risk-details/export`。
|
||||
3. 服务层查询:
|
||||
- 涉疑交易全量数据
|
||||
- 员工负面征信全量数据
|
||||
- 异常账户人员全量数据
|
||||
4. `CcdiProjectRiskDetailWorkbookExporter` 统一生成 3 个 sheet。
|
||||
5. 第 3 个 sheet `异常账户人员信息` 写出真实数据。
|
||||
|
||||
## 七、字段与业务口径
|
||||
|
||||
### 7.1 页面与导出统一字段
|
||||
|
||||
本次 `异常账户人员信息` 页面与导出统一使用以下 6 个字段:
|
||||
|
||||
1. `账号`
|
||||
2. `开户人`
|
||||
3. `银行`
|
||||
4. `异常类型`
|
||||
5. `异常发生时间`
|
||||
6. `状态`
|
||||
|
||||
不保留“操作”列,也不新增辅助列。
|
||||
|
||||
### 7.2 粒度口径
|
||||
|
||||
展示与导出粒度固定为:
|
||||
|
||||
- 一条异常账户命中结果一行
|
||||
|
||||
规则说明:
|
||||
|
||||
- 同一员工命中多条异常账户规则时,保留多行
|
||||
- 同一账号命中多条规则时,也保留多行
|
||||
- 不按员工汇总
|
||||
- 不按账号合并
|
||||
|
||||
### 7.3 字段映射规则
|
||||
|
||||
#### 1. 账号
|
||||
|
||||
- 取 `ccdi_account_info.account_no`
|
||||
|
||||
#### 2. 开户人
|
||||
|
||||
- 优先取 `ccdi_account_info.account_name`
|
||||
- 若为空,则回退员工姓名
|
||||
|
||||
#### 3. 银行
|
||||
|
||||
- 取 `ccdi_account_info.bank`
|
||||
|
||||
#### 4. 异常类型
|
||||
|
||||
- 取 `ccdi_bank_statement_tag_result.rule_name`
|
||||
|
||||
#### 5. 异常发生时间
|
||||
|
||||
- 对 `SUDDEN_ACCOUNT_CLOSURE` 取账户销户日期 `invalid_date`
|
||||
- 对 `DORMANT_ACCOUNT_LARGE_ACTIVATION` 取首次交易日期 `first_tx_date`
|
||||
- 统一格式化为日期字符串
|
||||
|
||||
#### 6. 状态
|
||||
|
||||
- 取 `ccdi_account_info.status`
|
||||
- 映射文案固定为:
|
||||
- `1 -> 正常`
|
||||
- `2 -> 已销户`
|
||||
|
||||
本次不额外扩展更多状态码解释。
|
||||
|
||||
## 八、后端设计
|
||||
|
||||
### 8.1 控制器接口
|
||||
|
||||
在 `CcdiProjectOverviewController` 下新增接口:
|
||||
|
||||
- `GET /ccdi/project/overview/abnormal-account-people`
|
||||
|
||||
入参:
|
||||
|
||||
- `projectId`
|
||||
- `pageNum`
|
||||
- `pageSize`
|
||||
|
||||
权限:
|
||||
|
||||
- 沿用结果总览查询权限 `ccdi:project:query`
|
||||
|
||||
返回结构:
|
||||
|
||||
- `rows`
|
||||
- `total`
|
||||
|
||||
### 8.2 服务层职责
|
||||
|
||||
在 `ICcdiProjectOverviewService` 与实现类中新增:
|
||||
|
||||
1. `getAbnormalAccountPeople(queryDTO)`
|
||||
2. `exportAbnormalAccountPeople(projectId)`
|
||||
|
||||
服务层职责如下:
|
||||
|
||||
1. 校验项目存在
|
||||
2. 处理分页参数
|
||||
3. 查询异常账户人员分页或导出数据
|
||||
4. 将查询结果映射为页面 VO 或导出 Excel 对象
|
||||
5. 在 `exportRiskDetails(...)` 中将异常账户全量数据传入工作簿导出器
|
||||
|
||||
### 8.3 Mapper 查询策略
|
||||
|
||||
查询必须满足以下约束:
|
||||
|
||||
1. 仅查询当前项目:
|
||||
- `tr.project_id = 当前项目`
|
||||
2. 仅查询异常账户模型:
|
||||
- `tr.model_code = 'ABNORMAL_ACCOUNT'`
|
||||
3. 仅查询对象型结果:
|
||||
- `tr.bank_statement_id is null`
|
||||
4. 仅查询员工本人账户:
|
||||
- `account.owner_type = 'EMPLOYEE'`
|
||||
- `account.owner_id = tr.object_key`
|
||||
5. 每条命中结果唯一关联到一条账户记录
|
||||
|
||||
### 8.4 账户唯一关联规则
|
||||
|
||||
由于异常账户对象型结果以“员工身份证号”为主键落库,本次查询必须保证命中结果可稳定回溯到具体账户。
|
||||
|
||||
设计约束如下:
|
||||
|
||||
1. 优先依据异常账户规则 `reason_detail` 中的账号信息匹配 `ccdi_account_info.account_no`
|
||||
2. 仅在账号匹配成功时返回该条结果
|
||||
3. 不允许仅凭员工身份证号将同一员工下全部账户全部展开,避免误导
|
||||
|
||||
这意味着本次实现同时要求异常账户对象型结果具备“可唯一回溯到账号”的查询条件,不使用模糊补数方案。
|
||||
|
||||
### 8.5 导出收口
|
||||
|
||||
`POST /ccdi/project/overview/risk-details/export` 继续保持统一导出入口,不额外新增独立异常账户导出接口。
|
||||
|
||||
服务层导出步骤调整为:
|
||||
|
||||
1. 查询涉疑交易全量数据
|
||||
2. 查询员工负面征信全量数据
|
||||
3. 查询异常账户人员全量数据
|
||||
4. 将三类数据统一传入 `CcdiProjectRiskDetailWorkbookExporter`
|
||||
|
||||
导出文件顺序保持不变:
|
||||
|
||||
1. `涉疑交易明细`
|
||||
2. `员工负面征信信息`
|
||||
3. `异常账户人员信息`
|
||||
|
||||
## 九、前端设计
|
||||
|
||||
### 9.1 页面位置
|
||||
|
||||
本次前端改动集中在:
|
||||
|
||||
- `ruoyi-ui/src/views/ccdiProject/components/detail/RiskDetailSection.vue`
|
||||
|
||||
### 9.2 区块展示
|
||||
|
||||
`异常账户人员信息` 区块调整为真实业务表格,字段顺序固定为:
|
||||
|
||||
1. `账号`
|
||||
2. `开户人`
|
||||
3. `银行`
|
||||
4. `异常类型`
|
||||
5. `异常发生时间`
|
||||
6. `状态`
|
||||
|
||||
副标题保持语义一致,可调整为:
|
||||
|
||||
- `展示异常账户命中人员及账户状态`
|
||||
|
||||
### 9.3 交互规则
|
||||
|
||||
- 不新增区块级导出按钮
|
||||
- 不新增查看详情按钮
|
||||
- 不新增行操作列
|
||||
- 保持独立分页
|
||||
- 保持独立 loading
|
||||
- 查询失败时仅影响该区块,不影响其他两个风险明细区块
|
||||
|
||||
### 9.4 空态文案
|
||||
|
||||
空态文案统一为:
|
||||
|
||||
- `当前项目暂无异常账户人员信息`
|
||||
|
||||
## 十、测试设计
|
||||
|
||||
### 10.1 后端测试
|
||||
|
||||
新增或调整以下验证:
|
||||
|
||||
1. Mapper SQL 测试
|
||||
- 校验异常账户分页查询与导出查询包含 `ABNORMAL_ACCOUNT`、项目过滤、对象型过滤和账户关联条件
|
||||
2. Service 测试
|
||||
- 校验异常账户分页查询 `rows/total` 返回正确
|
||||
- 校验统一导出会将异常账户真实数据传入导出器
|
||||
3. Workbook 导出测试
|
||||
- 校验第 3 个 sheet 存在真实数据行
|
||||
- 校验列顺序为:
|
||||
- `账号`
|
||||
- `开户人`
|
||||
- `银行`
|
||||
- `异常类型`
|
||||
- `异常发生时间`
|
||||
- `状态`
|
||||
|
||||
### 10.2 前端测试
|
||||
|
||||
新增或调整以下验证:
|
||||
|
||||
1. `RiskDetailSection.vue` 异常账户真实字段渲染测试
|
||||
2. 异常账户区块独立分页测试
|
||||
3. 统一导出按钮仍走 `risk-details/export` 的测试
|
||||
4. 移除旧占位“操作 / 查看详情”列的静态断言
|
||||
|
||||
## 十一、边界与异常处理
|
||||
|
||||
### 11.1 空数据场景
|
||||
|
||||
当项目下没有异常账户命中结果时:
|
||||
|
||||
- 页面显示空态
|
||||
- 导出 sheet 仅保留表头,不输出数据行
|
||||
|
||||
### 11.2 查询失败场景
|
||||
|
||||
页面查询失败时:
|
||||
|
||||
- 清空当前异常账户列表
|
||||
- 提示:`加载异常账户人员信息失败`
|
||||
- 不联动清空涉疑交易或员工负面征信区块
|
||||
|
||||
统一导出失败时:
|
||||
|
||||
- 沿用当前服务层异常提示:`导出风险明细失败`
|
||||
|
||||
### 11.3 非本次范围约束
|
||||
|
||||
本次明确不做以下扩展:
|
||||
|
||||
- 不新增详情弹窗
|
||||
- 不增加筛选条件
|
||||
- 不补关系人账户
|
||||
- 不增加异步导出任务
|
||||
- 不在导出中追加页面外字段
|
||||
|
||||
## 十二、后续文档规划
|
||||
|
||||
设计确认并完成文档复核后,继续补充两份实施计划:
|
||||
|
||||
1. 后端实施计划:`docs/plans/backend/`
|
||||
2. 前端实施计划:`docs/plans/frontend/`
|
||||
|
||||
随后按实际改动沉淀实施记录。
|
||||
574
docs/design/2026-04-17-intermediary-library-refactor-design.md
Normal file
574
docs/design/2026-04-17-intermediary-library-refactor-design.md
Normal file
@@ -0,0 +1,574 @@
|
||||
# 中介库主从结构改造设计文档
|
||||
|
||||
**模块**: 中介库管理
|
||||
**日期**: 2026-04-17
|
||||
**作者**: Codex
|
||||
**状态**: 已确认
|
||||
|
||||
## 一、背景
|
||||
|
||||
当前中介库模块采用“个人中介 / 机构中介”并列维护方式:
|
||||
|
||||
1. 个人中介数据存放于 `ccdi_biz_intermediary`
|
||||
2. 机构中介数据直接复用 `ccdi_enterprise_base_info`
|
||||
3. 前端新增弹窗在“个人 / 机构”之间二选一录入
|
||||
4. 首页列表通过联合查询同时展示个人中介与机构中介
|
||||
|
||||
现业务需求已变更为新的主从维护模式:
|
||||
|
||||
1. 用户先录入中介个人信息
|
||||
2. 再在该中介名下录入亲属个人信息
|
||||
3. 再在该中介名下录入关联机构关系信息
|
||||
4. 关联机构信息只维护“中介与机构的关系”,实体主档仍由实体信息维护功能负责
|
||||
5. 首页列表需要统一查询“中介本人 / 中介亲属 / 中介关联机构”三类记录
|
||||
|
||||
本次设计目标是在尽量复用现有中介库模块基础上,将原并列建模改造为“中介本人主记录 + 亲属子记录 + 机构关系子记录”的结构。
|
||||
|
||||
## 二、目标
|
||||
|
||||
本次设计目标如下:
|
||||
|
||||
1. 中介库主记录统一为“中介本人”
|
||||
2. `ccdi_biz_intermediary` 同时承载中介本人和中介亲属
|
||||
3. 新增 `ccdi_intermediary_enterprise_relation` 维护中介与机构的关系
|
||||
4. 首页统一展示三类记录,并支持统一查询
|
||||
5. 新增流程改为“先新增中介本人,再在详情中维护亲属和关联机构”
|
||||
6. 保持最短路径实现,不引入额外通用关系模型或平行模块
|
||||
|
||||
## 三、范围
|
||||
|
||||
### 3.1 本次范围
|
||||
|
||||
- 调整中介库数据模型
|
||||
- 新增中介关联机构关系表
|
||||
- 改造中介库首页联合查询
|
||||
- 改造中介新增、详情、亲属维护、关联机构维护交互
|
||||
- 调整后端中介、亲属、关联机构关系接口
|
||||
- 补充设计文档与实施计划
|
||||
|
||||
### 3.2 不在本次范围
|
||||
|
||||
- 不负责录入或修改实体机构主档
|
||||
- 不新增机构主档补录兜底逻辑
|
||||
- 不做通用人员关系平台化抽象
|
||||
- 不新增独立一级菜单拆分为三个平行模块
|
||||
- 不扩展导入、导出、异步任务链路
|
||||
- 不新增兼容性补丁、降级方案或过度设计
|
||||
|
||||
## 四、现状分析
|
||||
|
||||
### 4.1 前端现状
|
||||
|
||||
当前中介库页面位于:
|
||||
|
||||
- `ruoyi-ui/src/views/ccdiIntermediary/index.vue`
|
||||
- `ruoyi-ui/src/views/ccdiIntermediary/components/EditDialog.vue`
|
||||
|
||||
现有特点:
|
||||
|
||||
1. 首页列表面向“个人中介 / 机构中介”两类并列记录
|
||||
2. 新增弹窗先选择类型,再进入不同表单
|
||||
3. 个人中介与机构中介使用不同详情接口
|
||||
4. 机构中介可直接在中介库模块中新增与修改
|
||||
|
||||
这与当前需求存在三个核心偏差:
|
||||
|
||||
1. 主记录不是“中介本人”,而是“个人 / 机构”并列
|
||||
2. 没有中介详情下的亲属子列表与机构关系子列表
|
||||
3. 首页列表未按“本人 / 亲属 / 机构关系”统一口径展示
|
||||
|
||||
### 4.2 后端现状
|
||||
|
||||
当前中介控制器位于:
|
||||
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiIntermediaryController.java`
|
||||
|
||||
现有后端特点:
|
||||
|
||||
1. `POST /ccdi/intermediary/person` 新增个人中介
|
||||
2. `POST /ccdi/intermediary/entity` 新增机构中介
|
||||
3. `GET /ccdi/intermediary/list` 联合查询个人中介与机构中介
|
||||
4. 机构中介直接写入 `ccdi_enterprise_base_info`
|
||||
|
||||
这与新需求的偏差在于:
|
||||
|
||||
1. 机构不应再作为中介库主记录新增
|
||||
2. 亲属未形成独立子资源模型
|
||||
3. 中介与机构之间缺少独立关系表
|
||||
|
||||
### 4.3 现有可复用基础
|
||||
|
||||
仓库中已存在以下可参考实现:
|
||||
|
||||
1. `ccdi_staff_fmy_relation` 员工亲属关系维护
|
||||
2. `ccdi_staff_enterprise_relation` 员工关联企业维护
|
||||
3. `ccdi_cust_enterprise_relation` 客户关联企业维护
|
||||
|
||||
可复用的思路主要是:
|
||||
|
||||
1. 子资源独立 CRUD
|
||||
2. 统一列表查询 + 独立编辑弹窗
|
||||
3. 关系表联查企业主档展示企业名称
|
||||
|
||||
但本次不直接照搬上述模块,而是在中介库现有主页面中完成收口。
|
||||
|
||||
## 五、方案对比
|
||||
|
||||
### 5.1 方案 A:中介本人为主记录,亲属与机构关系作为子记录
|
||||
|
||||
做法:
|
||||
|
||||
1. `ccdi_biz_intermediary` 统一存中介本人和中介亲属
|
||||
2. 新增 `ccdi_intermediary_enterprise_relation`
|
||||
3. 首页列表联合查询三类记录
|
||||
4. 首页新增只允许新增中介本人
|
||||
5. 详情页维护亲属与关联机构
|
||||
|
||||
优点:
|
||||
|
||||
1. 最贴合当前业务口径
|
||||
2. 最大程度复用现有中介库菜单、权限与主页面
|
||||
3. 后端改造边界清晰
|
||||
4. 前端操作路径与“先建中介,再维护子信息”一致
|
||||
|
||||
缺点:
|
||||
|
||||
1. 需要重写现有“个人 / 机构并列”的列表语义
|
||||
2. 需要补一张新的机构关系表
|
||||
|
||||
### 5.2 方案 B:拆成中介本人、亲属、机构关系三个平行页面
|
||||
|
||||
做法:
|
||||
|
||||
1. 中介本人独立页面
|
||||
2. 中介亲属独立页面
|
||||
3. 中介关联机构独立页面
|
||||
|
||||
问题:
|
||||
|
||||
1. 与“先建中介,再在下面维护”不一致
|
||||
2. 页面、菜单、权限改动面更大
|
||||
3. 首页聚合查询与跳转链路更复杂
|
||||
|
||||
### 5.3 方案 C:抽象统一关系模型
|
||||
|
||||
做法:
|
||||
|
||||
1. 抽象统一的人员关系与机构关系模型
|
||||
2. 中介、员工、客户共用一套关系平台
|
||||
|
||||
问题:
|
||||
|
||||
1. 抽象过重
|
||||
2. 明显超出当前需求
|
||||
3. 会引入额外改造面,不符合最短路径原则
|
||||
|
||||
### 5.4 结论
|
||||
|
||||
采用 **方案 A:中介本人为主记录,亲属与机构关系作为子记录**。
|
||||
|
||||
## 六、总体设计
|
||||
|
||||
### 6.1 设计原则
|
||||
|
||||
本次设计遵循以下原则:
|
||||
|
||||
1. 中介库只维护中介本人、亲属和中介机构关系
|
||||
2. 机构主档继续归实体信息维护功能负责
|
||||
3. 不新增无必要的身份字段,优先复用现有字段表达业务含义
|
||||
4. 首页统一查询,编辑入口按记录类型分流
|
||||
5. 删除中介本人时同时清理亲属与机构关系
|
||||
|
||||
### 6.2 数据流
|
||||
|
||||
新增中介链路:
|
||||
|
||||
1. 首页点击新增
|
||||
2. 打开中介本人新增弹窗
|
||||
3. 保存中介本人到 `ccdi_biz_intermediary`
|
||||
4. 自动进入中介详情
|
||||
5. 在详情中新增亲属与关联机构关系
|
||||
|
||||
首页查询链路:
|
||||
|
||||
1. 前端调用 `GET /ccdi/intermediary/list`
|
||||
2. 后端联合查询:
|
||||
- `ccdi_biz_intermediary` 中 `person_sub_type = 本人`
|
||||
- `ccdi_biz_intermediary` 中 `person_sub_type != 本人`
|
||||
- `ccdi_intermediary_enterprise_relation` 联查 `ccdi_enterprise_base_info`
|
||||
3. 统一返回前端展示字段与 `recordType`
|
||||
|
||||
删除中介链路:
|
||||
|
||||
1. 删除中介本人
|
||||
2. 删除本人记录
|
||||
3. 删除名下亲属记录
|
||||
4. 删除名下关联机构关系记录
|
||||
5. 不删除企业主档
|
||||
|
||||
## 七、数据模型设计
|
||||
|
||||
### 7.1 `ccdi_biz_intermediary` 使用方式调整
|
||||
|
||||
`ccdi_biz_intermediary` 继续作为人员表,不新增新的身份字段,直接复用:
|
||||
|
||||
- `person_sub_type`
|
||||
- `related_num_id`
|
||||
|
||||
字段口径调整如下:
|
||||
|
||||
1. 中介本人
|
||||
- `person_sub_type = 本人`
|
||||
- `related_num_id` 为空
|
||||
|
||||
2. 中介亲属
|
||||
- `person_sub_type = 配偶 / 子女 / 父母 / 兄弟姐妹 / 其他`
|
||||
- `related_num_id = 所属中介本人的 biz_id`
|
||||
|
||||
因此:
|
||||
|
||||
- `person_sub_type` 负责表达“本人 / 亲属关系”
|
||||
- `related_num_id` 负责表达“归属到哪个中介本人”
|
||||
|
||||
### 7.2 新增 `ccdi_intermediary_enterprise_relation`
|
||||
|
||||
新增表:`ccdi_intermediary_enterprise_relation`
|
||||
|
||||
建议字段:
|
||||
|
||||
1. `id` BIGINT 主键
|
||||
2. `intermediary_biz_id` VARCHAR(64)
|
||||
关联中介本人 `biz_id`
|
||||
3. `social_credit_code` VARCHAR(18)
|
||||
关联机构统一社会信用代码
|
||||
4. `relation_person_post` VARCHAR(100)
|
||||
中介在该机构的关联角色/职务
|
||||
5. `remark` VARCHAR(500)
|
||||
6. `created_by`
|
||||
7. `create_time`
|
||||
8. `updated_by`
|
||||
9. `update_time`
|
||||
|
||||
唯一性约束建议:
|
||||
|
||||
- `uk_intermediary_enterprise (intermediary_biz_id, social_credit_code)`
|
||||
|
||||
说明:
|
||||
|
||||
1. 只维护关系,不维护企业主档
|
||||
2. 企业名称通过联查 `ccdi_enterprise_base_info.enterprise_name` 获取
|
||||
3. 删除关系时只删该表记录
|
||||
|
||||
### 7.3 删除规则
|
||||
|
||||
删除中介本人时:
|
||||
|
||||
1. 删除 `ccdi_biz_intermediary` 中本人记录
|
||||
2. 删除 `ccdi_biz_intermediary` 中 `related_num_id = 本人 biz_id` 的亲属记录
|
||||
3. 删除 `ccdi_intermediary_enterprise_relation` 中 `intermediary_biz_id = 本人 biz_id` 的全部关系记录
|
||||
4. 不删除 `ccdi_enterprise_base_info`
|
||||
|
||||
## 八、首页列表与查询口径
|
||||
|
||||
### 8.1 列表展示字段
|
||||
|
||||
首页列表统一展示以下字段:
|
||||
|
||||
1. 名称
|
||||
2. 证件号
|
||||
3. 关联中介姓名
|
||||
4. 关联关系
|
||||
5. 创建时间
|
||||
|
||||
### 8.2 三类记录展示映射
|
||||
|
||||
1. 中介本人
|
||||
- 名称:`name`
|
||||
- 证件号:`person_id`
|
||||
- 关联中介姓名:本人姓名
|
||||
- 关联关系:`本人`
|
||||
|
||||
2. 中介亲属
|
||||
- 名称:`name`
|
||||
- 证件号:`person_id`
|
||||
- 关联中介姓名:所属中介本人姓名
|
||||
- 关联关系:`person_sub_type`
|
||||
|
||||
3. 关联机构
|
||||
- 名称:机构名称
|
||||
- 证件号:统一社会信用代码
|
||||
- 关联中介姓名:所属中介本人姓名
|
||||
- 关联关系:`relation_person_post`
|
||||
|
||||
### 8.3 搜索字段
|
||||
|
||||
首页搜索字段固定为:
|
||||
|
||||
1. 名称
|
||||
2. 证件号
|
||||
3. 记录类型
|
||||
4. 关联中介信息
|
||||
|
||||
其中“关联中介信息”为一个合并输入框,同时支持:
|
||||
|
||||
1. 按关联中介姓名查询
|
||||
2. 按关联中介证件号查询
|
||||
|
||||
### 8.4 记录类型
|
||||
|
||||
联合查询结果中增加派生字段 `recordType`,仅用于前端分流,不落库:
|
||||
|
||||
1. `INTERMEDIARY`
|
||||
2. `RELATIVE`
|
||||
3. `ENTERPRISE_RELATION`
|
||||
|
||||
## 九、接口设计
|
||||
|
||||
### 9.1 中介本人接口
|
||||
|
||||
保留现有中介主线接口,语义调整为“中介本人”:
|
||||
|
||||
1. `POST /ccdi/intermediary/person`
|
||||
- 新增中介本人
|
||||
- 固定写入:
|
||||
- `person_sub_type = 本人`
|
||||
- `related_num_id = null`
|
||||
|
||||
2. `PUT /ccdi/intermediary/person`
|
||||
- 修改中介本人
|
||||
|
||||
3. `GET /ccdi/intermediary/person/{bizId}`
|
||||
- 查询中介本人详情
|
||||
|
||||
### 9.2 中介亲属接口
|
||||
|
||||
新增中介名下亲属接口,仍然落 `ccdi_biz_intermediary`:
|
||||
|
||||
1. `GET /ccdi/intermediary/{bizId}/relatives`
|
||||
- 查询某中介名下亲属列表
|
||||
|
||||
2. `POST /ccdi/intermediary/{bizId}/relative`
|
||||
- 新增亲属
|
||||
- 固定写入:
|
||||
- `related_num_id = bizId`
|
||||
- `person_sub_type != 本人`
|
||||
|
||||
3. `GET /ccdi/intermediary/relative/{relativeBizId}`
|
||||
- 查询亲属详情
|
||||
|
||||
4. `PUT /ccdi/intermediary/relative`
|
||||
- 修改亲属
|
||||
|
||||
5. `DELETE /ccdi/intermediary/relative/{relativeBizId}`
|
||||
- 删除亲属
|
||||
|
||||
### 9.3 中介关联机构关系接口
|
||||
|
||||
新增关系表对应接口:
|
||||
|
||||
1. `GET /ccdi/intermediary/{bizId}/enterprise-relations`
|
||||
- 查询某中介名下关联机构列表
|
||||
|
||||
2. `POST /ccdi/intermediary/{bizId}/enterprise-relation`
|
||||
- 新增关联机构关系
|
||||
|
||||
3. `GET /ccdi/intermediary/enterprise-relation/{id}`
|
||||
- 查询单条机构关系详情
|
||||
|
||||
4. `PUT /ccdi/intermediary/enterprise-relation`
|
||||
- 修改机构关系
|
||||
|
||||
5. `DELETE /ccdi/intermediary/enterprise-relation/{id}`
|
||||
- 删除机构关系
|
||||
|
||||
### 9.4 首页联合查询接口
|
||||
|
||||
保留:
|
||||
|
||||
- `GET /ccdi/intermediary/list`
|
||||
|
||||
返回统一字段:
|
||||
|
||||
1. `recordType`
|
||||
2. `recordId`
|
||||
3. `name`
|
||||
4. `certificateNo`
|
||||
5. `relatedIntermediaryName`
|
||||
6. `relationText`
|
||||
7. `createTime`
|
||||
|
||||
查询条件:
|
||||
|
||||
1. `name`
|
||||
2. `certificateNo`
|
||||
3. `recordType`
|
||||
4. `relatedIntermediaryKeyword`
|
||||
|
||||
其中 `relatedIntermediaryKeyword` 同时匹配:
|
||||
|
||||
1. 关联中介姓名
|
||||
2. 关联中介证件号
|
||||
|
||||
### 9.5 删除接口行为
|
||||
|
||||
1. 删除中介本人:
|
||||
- 继续使用中介主删除接口
|
||||
- 服务层执行本人、亲属、机构关系级联清理
|
||||
|
||||
2. 删除亲属:
|
||||
- 走亲属删除接口
|
||||
|
||||
3. 删除关联机构:
|
||||
- 走关联机构关系删除接口
|
||||
|
||||
## 十、前端页面设计
|
||||
|
||||
### 10.1 首页页面
|
||||
|
||||
保留页面:
|
||||
|
||||
- `ruoyi-ui/src/views/ccdiIntermediary/index.vue`
|
||||
|
||||
页面语义调整为“中介综合库”。
|
||||
|
||||
改造内容:
|
||||
|
||||
1. 搜索区改为新搜索字段
|
||||
2. 列表改为三类记录混合展示
|
||||
3. 新增按钮只允许新增中介本人
|
||||
|
||||
### 10.2 列表操作行为
|
||||
|
||||
1. `recordType = INTERMEDIARY`
|
||||
- 详情
|
||||
- 修改
|
||||
- 删除
|
||||
|
||||
2. `recordType = RELATIVE`
|
||||
- 详情 / 编辑亲属
|
||||
- 删除亲属
|
||||
|
||||
3. `recordType = ENTERPRISE_RELATION`
|
||||
- 详情 / 编辑关联机构关系
|
||||
- 删除关联关系
|
||||
|
||||
### 10.3 新增流程
|
||||
|
||||
首页新增流程固定为:
|
||||
|
||||
1. 点击新增
|
||||
2. 打开中介本人新增弹窗
|
||||
3. 保存成功
|
||||
4. 自动进入该中介详情
|
||||
5. 在详情中继续维护亲属与关联机构
|
||||
|
||||
### 10.4 中介详情页
|
||||
|
||||
中介本人详情建议改为“大弹窗详情维护页”,包含三个区域:
|
||||
|
||||
1. 中介基本信息
|
||||
2. 亲属信息列表
|
||||
3. 关联机构信息列表
|
||||
|
||||
支持在详情中:
|
||||
|
||||
1. 编辑中介本人
|
||||
2. 新增 / 编辑 / 删除亲属
|
||||
3. 新增 / 编辑 / 删除关联机构关系
|
||||
|
||||
### 10.5 子记录编辑
|
||||
|
||||
首页点到亲属或关联机构时:
|
||||
|
||||
1. 直接打开对应编辑弹窗
|
||||
2. 弹窗中展示所属中介姓名,只读
|
||||
3. 保存成功后刷新首页列表
|
||||
4. 如当前已打开中介详情,同时刷新详情内子列表
|
||||
|
||||
### 10.6 弹窗拆分建议
|
||||
|
||||
当前 `EditDialog.vue` 同时承担个人和机构两套表单,本次建议拆分为:
|
||||
|
||||
1. 中介本人编辑弹窗
|
||||
2. 亲属编辑弹窗
|
||||
3. 关联机构关系编辑弹窗
|
||||
|
||||
如果实现时考虑最短路径,也允许在现有组件上拆成三个分支表单,但从设计上仍以三类用途明确的编辑组件为目标。
|
||||
|
||||
## 十一、校验与业务规则
|
||||
|
||||
### 11.1 中介本人
|
||||
|
||||
1. 新增时固定 `person_sub_type = 本人`
|
||||
2. `related_num_id` 必须为空
|
||||
3. 证件号仍需保持唯一性校验
|
||||
|
||||
### 11.2 中介亲属
|
||||
|
||||
1. `person_sub_type` 禁止保存为 `本人`
|
||||
2. `related_num_id` 必须指向所属中介本人
|
||||
3. 首页展示关联关系时直接取 `person_sub_type`
|
||||
|
||||
### 11.3 关联机构关系
|
||||
|
||||
1. `social_credit_code` 必须存在于实体信息库
|
||||
2. 同一中介下不允许重复关联同一统一社会信用代码
|
||||
3. 不允许通过中介模块新增机构主档
|
||||
|
||||
## 十二、数据库与代码改动清单
|
||||
|
||||
### 12.1 后端
|
||||
|
||||
预计改动:
|
||||
|
||||
1. `CcdiIntermediaryController`
|
||||
2. `ICcdiIntermediaryService`
|
||||
3. `CcdiIntermediaryServiceImpl`
|
||||
4. 中介相关 DTO / VO
|
||||
5. 新增中介关联机构关系 domain / DTO / VO / mapper / service / controller
|
||||
6. `CcdiIntermediaryMapper.xml`
|
||||
7. 新增机构关系 Mapper XML
|
||||
|
||||
### 12.2 前端
|
||||
|
||||
预计改动:
|
||||
|
||||
1. `ruoyi-ui/src/views/ccdiIntermediary/index.vue`
|
||||
2. `SearchForm.vue`
|
||||
3. `DataTable.vue`
|
||||
4. `DetailDialog.vue`
|
||||
5. `EditDialog.vue` 或拆分后的三个编辑组件
|
||||
6. `ruoyi-ui/src/api/ccdiIntermediary.js`
|
||||
7. 新增关联机构关系 API
|
||||
|
||||
### 12.3 SQL
|
||||
|
||||
预计新增:
|
||||
|
||||
1. `ccdi_intermediary_enterprise_relation` 建表脚本
|
||||
2. 如需字典补充,再新增对应迁移脚本
|
||||
|
||||
## 十三、测试要点
|
||||
|
||||
1. 新增中介本人成功,自动进入详情
|
||||
2. 在详情中新增亲属成功,首页能查到亲属记录
|
||||
3. 在详情中新增关联机构关系成功,首页能查到关联机构记录
|
||||
4. 首页支持按名称、证件号、记录类型、关联中介信息查询
|
||||
5. 首页点击亲属记录可直接编辑
|
||||
6. 首页点击关联机构记录可直接编辑
|
||||
7. 删除中介本人后,亲属记录一并删除
|
||||
8. 删除中介本人后,关联机构关系一并删除
|
||||
9. 删除中介本人后,机构主档仍保留
|
||||
10. 同一中介重复关联同一机构时正确拦截
|
||||
|
||||
## 十四、结论
|
||||
|
||||
本次中介库改造采用“中介本人主记录 + 亲属子记录 + 关联机构关系子记录”的主从结构:
|
||||
|
||||
1. `ccdi_biz_intermediary` 负责维护中介本人和亲属
|
||||
2. `ccdi_intermediary_enterprise_relation` 负责维护中介与机构的关系
|
||||
3. 首页以统一列表展示三类记录
|
||||
4. 新增流程固定为“先建中介本人,再维护子信息”
|
||||
5. 机构主档继续由实体信息维护功能独立负责
|
||||
|
||||
该方案满足当前需求,并保持了最短路径实现与清晰的前后端边界。
|
||||
@@ -0,0 +1,494 @@
|
||||
# 异常账户模型接入银行流水打标后端 Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
>
|
||||
> 仓库约束:当前仓库明确禁止开启 subagent,执行时统一使用 `superpowers:executing-plans`。
|
||||
|
||||
**Goal:** 在后端正式接入异常账户模型,新增 `ccdi_account_info`、两条对象型打标规则、最小可命中测试数据,并通过 Java 测试和 MySQL MCP SQL 校验确认命中口径正确。
|
||||
|
||||
**Architecture:** 复用现有 `CcdiBankTagServiceImpl -> CcdiBankTagAnalysisMapper.xml -> ccdi_bank_statement_tag_result` 主链路,不新增并行模块。两条规则统一作为 `OBJECT` 规则落到员工身份证号维度,`reason_detail` 保留账户级异常快照;测试层同时保留 Java 自动化测试与 MySQL MCP 真实 SQL 验证。
|
||||
|
||||
**Tech Stack:** Java 21, Spring Boot 3, MyBatis XML, MyBatis Plus, MySQL, JUnit 5, Mockito, MySQL MCP
|
||||
|
||||
---
|
||||
|
||||
## File Map
|
||||
|
||||
**Create:**
|
||||
|
||||
- `sql/migration/2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql`
|
||||
- 建表 `ccdi_account_info`,补模型与规则元数据
|
||||
- `sql/migration/2026-03-31-add-abnormal-account-rule-test-data.sql`
|
||||
- 插入最小命中与反样本测试数据
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiAbnormalAccountRuleSqlMetadataTest.java`
|
||||
- 校验模型、规则编码、结果类型与业务口径文本
|
||||
- `docs/reports/implementation/2026-03-31-abnormal-account-bank-tag-backend-implementation.md`
|
||||
- 记录后端实施与验证结果
|
||||
|
||||
**Modify:**
|
||||
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java`
|
||||
- 新增两个对象型规则分发分支
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiBankTagAnalysisMapper.java`
|
||||
- 新增两个 Mapper 方法签名
|
||||
- `ccdi-project/src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml`
|
||||
- 新增两条对象型真实 SQL
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImplTest.java`
|
||||
- 覆盖新规则分发与结果入库
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewEmployeeResultBuilderTest.java`
|
||||
- 校验新增规则能进入员工风险聚合快照
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiBankTagRuleSqlMetadataTest.java`
|
||||
- 补模型/规则元数据断言
|
||||
|
||||
**No Change Expected:**
|
||||
|
||||
- `ruoyi-ui/`
|
||||
- 本轮前端不新增接口和页面改动
|
||||
- `lsfx-mock-server/`
|
||||
- 本轮不扩展 Mock 样本
|
||||
|
||||
## Task 1: 锁定模型、规则与落库契约
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiAbnormalAccountRuleSqlMetadataTest.java`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiBankTagRuleSqlMetadataTest.java`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImplTest.java`
|
||||
|
||||
- [ ] **Step 1: 先写 SQL 元数据测试**
|
||||
|
||||
新增 `CcdiAbnormalAccountRuleSqlMetadataTest`,至少覆盖:
|
||||
|
||||
```java
|
||||
assertTrue(sql.contains("ABNORMAL_ACCOUNT"));
|
||||
assertTrue(sql.contains("SUDDEN_ACCOUNT_CLOSURE"));
|
||||
assertTrue(sql.contains("DORMANT_ACCOUNT_LARGE_ACTIVATION"));
|
||||
assertTrue(sql.contains("'OBJECT'"));
|
||||
```
|
||||
|
||||
- [ ] **Step 2: 再写 Service 分发失败测试**
|
||||
|
||||
在 `CcdiBankTagServiceImplTest` 中新增两个断言,约束新规则走对象型 Mapper:
|
||||
|
||||
```java
|
||||
verify(analysisMapper).selectSuddenAccountClosureObjects(40L);
|
||||
verify(analysisMapper).selectDormantAccountLargeActivationObjects(40L);
|
||||
```
|
||||
|
||||
- [ ] **Step 3: 运行定向测试,确认失败点正确**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -Dtest=CcdiAbnormalAccountRuleSqlMetadataTest,CcdiBankTagServiceImplTest test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- FAIL,提示缺少规则元数据脚本内容或缺少 Mapper / Service 分发
|
||||
|
||||
- [ ] **Step 4: 最小化补充规则元数据和方法签名**
|
||||
|
||||
按以下顺序补代码:
|
||||
|
||||
1. 新增规则元数据脚本文件骨架
|
||||
2. 在 `CcdiBankTagAnalysisMapper.java` 增加两个方法:
|
||||
|
||||
```java
|
||||
List<BankTagObjectHitVO> selectSuddenAccountClosureObjects(@Param("projectId") Long projectId);
|
||||
List<BankTagObjectHitVO> selectDormantAccountLargeActivationObjects(@Param("projectId") Long projectId);
|
||||
```
|
||||
|
||||
3. 在 `CcdiBankTagServiceImpl.java` 中新增两个 `case`
|
||||
|
||||
- [ ] **Step 5: 重新运行定向测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -Dtest=CcdiAbnormalAccountRuleSqlMetadataTest,CcdiBankTagServiceImplTest test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS 或仅剩 SQL 实现相关失败
|
||||
|
||||
- [ ] **Step 6: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiBankTagAnalysisMapper.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiAbnormalAccountRuleSqlMetadataTest.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImplTest.java \
|
||||
sql/migration/2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql
|
||||
git commit -m "补充异常账户模型规则骨架"
|
||||
```
|
||||
|
||||
## Task 2: 落地 `ccdi_account_info` 建表与规则元数据脚本
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `sql/migration/2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiAbnormalAccountRuleSqlMetadataTest.java`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiBankTagRuleSqlMetadataTest.java`
|
||||
|
||||
- [ ] **Step 1: 先补脚本结构测试**
|
||||
|
||||
为建表 SQL 增加断言,至少包含:
|
||||
|
||||
```java
|
||||
assertTrue(sql.contains("create table if not exists `ccdi_account_info`"));
|
||||
assertTrue(sql.contains("`account_no`"));
|
||||
assertTrue(sql.contains("`owner_type`"));
|
||||
assertTrue(sql.contains("`effective_date`"));
|
||||
assertTrue(sql.contains("`invalid_date`"));
|
||||
```
|
||||
|
||||
- [ ] **Step 2: 运行测试确认失败**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -Dtest=CcdiAbnormalAccountRuleSqlMetadataTest,CcdiBankTagRuleSqlMetadataTest test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- FAIL,提示脚本字段或规则元数据缺失
|
||||
|
||||
- [ ] **Step 3: 写最小建表与元数据脚本**
|
||||
|
||||
在 `2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql` 中按以下顺序补内容:
|
||||
|
||||
1. `CREATE TABLE IF NOT EXISTS ccdi_account_info`
|
||||
2. 插入模型 `ABNORMAL_ACCOUNT`
|
||||
3. 插入两条规则元数据
|
||||
4. 业务口径文本与设计文档保持一致
|
||||
|
||||
- [ ] **Step 4: 重新运行元数据测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -Dtest=CcdiAbnormalAccountRuleSqlMetadataTest,CcdiBankTagRuleSqlMetadataTest test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS
|
||||
|
||||
- [ ] **Step 5: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add sql/migration/2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiAbnormalAccountRuleSqlMetadataTest.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiBankTagRuleSqlMetadataTest.java
|
||||
git commit -m "补充异常账户模型建表和规则元数据"
|
||||
```
|
||||
|
||||
## Task 3: 先写两条规则 SQL 的失败测试
|
||||
|
||||
**Files:**
|
||||
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImplTest.java`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewEmployeeResultBuilderTest.java`
|
||||
- Modify: `ccdi-project/src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml`
|
||||
|
||||
- [ ] **Step 1: 写对象型结果断言**
|
||||
|
||||
在 `CcdiBankTagServiceImplTest` 中新增两个测试,断言对象型结果内容:
|
||||
|
||||
```java
|
||||
verify(resultMapper).insertBatch(argThat(results -> results.stream().anyMatch(item ->
|
||||
"ABNORMAL_ACCOUNT".equals(item.getModelCode())
|
||||
&& "SUDDEN_ACCOUNT_CLOSURE".equals(item.getRuleCode())
|
||||
&& "OBJECT".equals(item.getResultType())
|
||||
&& "STAFF_ID_CARD".equals(item.getObjectType())
|
||||
)));
|
||||
```
|
||||
|
||||
- [ ] **Step 2: 写聚合层断言**
|
||||
|
||||
在 `CcdiProjectOverviewEmployeeResultBuilderTest` 中补断言,确保新增规则能进入 `hitRulesJson`:
|
||||
|
||||
```java
|
||||
assertTrue(result.getHitRulesJson().contains("SUDDEN_ACCOUNT_CLOSURE"));
|
||||
assertTrue(result.getHitRulesJson().contains("DORMANT_ACCOUNT_LARGE_ACTIVATION"));
|
||||
```
|
||||
|
||||
- [ ] **Step 3: 运行测试确认失败**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -Dtest=CcdiBankTagServiceImplTest,CcdiProjectOverviewEmployeeResultBuilderTest test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- FAIL,提示 SQL 尚未返回结果或规则未写入正确字段
|
||||
|
||||
- [ ] **Step 4: 暂不修实现,先保存失败断言**
|
||||
|
||||
保持测试失败状态,进入下一任务补最小 SQL 实现。
|
||||
|
||||
## Task 4: 实现 `SUDDEN_ACCOUNT_CLOSURE` 最小闭环
|
||||
|
||||
**Files:**
|
||||
|
||||
- Modify: `ccdi-project/src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml`
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiBankTagAnalysisMapper.java`
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java`
|
||||
|
||||
- [ ] **Step 1: 在 XML 中新增查询骨架**
|
||||
|
||||
新增 `selectSuddenAccountClosureObjects`,结果列固定为:
|
||||
|
||||
```sql
|
||||
select
|
||||
'STAFF_ID_CARD' as objectType,
|
||||
staff.id_card as objectKey,
|
||||
concat(...) as reasonDetail
|
||||
```
|
||||
|
||||
- [ ] **Step 2: 按设计补完整过滤条件**
|
||||
|
||||
SQL 需要覆盖:
|
||||
|
||||
- `owner_type = 'EMPLOYEE'`
|
||||
- `status = 2`
|
||||
- `invalid_date is not null`
|
||||
- 统计窗口 `[invalid_date - 30天, invalid_date)`
|
||||
- 按员工身份证号 + 账号聚合
|
||||
|
||||
- [ ] **Step 3: 运行定向测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -Dtest=CcdiBankTagServiceImplTest test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 至少 `SUDDEN_ACCOUNT_CLOSURE` 相关断言 PASS
|
||||
|
||||
- [ ] **Step 4: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add ccdi-project/src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiBankTagAnalysisMapper.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java
|
||||
git commit -m "实现突然销户打标规则"
|
||||
```
|
||||
|
||||
## Task 5: 实现 `DORMANT_ACCOUNT_LARGE_ACTIVATION` 最小闭环
|
||||
|
||||
**Files:**
|
||||
|
||||
- Modify: `ccdi-project/src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml`
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiBankTagAnalysisMapper.java`
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java`
|
||||
|
||||
- [ ] **Step 1: 在 XML 中新增查询骨架**
|
||||
|
||||
新增 `selectDormantAccountLargeActivationObjects`,结果列同样返回:
|
||||
|
||||
```sql
|
||||
'STAFF_ID_CARD' as objectType,
|
||||
staff.id_card as objectKey,
|
||||
concat(...) as reasonDetail
|
||||
```
|
||||
|
||||
- [ ] **Step 2: 补完整命中条件**
|
||||
|
||||
SQL 需要覆盖:
|
||||
|
||||
- `owner_type = 'EMPLOYEE'`
|
||||
- `status = 1`
|
||||
- `effective_date is not null`
|
||||
- `first_tx_date >= effective_date + 6个月`
|
||||
- `total_amount >= 500000 or max_single_amount >= 100000`
|
||||
|
||||
- [ ] **Step 3: 运行定向测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -Dtest=CcdiBankTagServiceImplTest,CcdiProjectOverviewEmployeeResultBuilderTest test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS
|
||||
|
||||
- [ ] **Step 4: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add ccdi-project/src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiBankTagAnalysisMapper.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewEmployeeResultBuilderTest.java
|
||||
git commit -m "实现休眠账户大额启用打标规则"
|
||||
```
|
||||
|
||||
## Task 6: 补最小测试数据 SQL
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `sql/migration/2026-03-31-add-abnormal-account-rule-test-data.sql`
|
||||
- Modify: `docs/reports/implementation/2026-03-31-abnormal-account-bank-tag-backend-implementation.md`
|
||||
|
||||
- [ ] **Step 1: 先写测试数据注释骨架**
|
||||
|
||||
在 SQL 文件中先划分 4 组样本块:
|
||||
|
||||
- 员工 A:命中 `SUDDEN_ACCOUNT_CLOSURE`
|
||||
- 员工 B:命中 `DORMANT_ACCOUNT_LARGE_ACTIVATION`
|
||||
- 员工 C:休眠不足 6 个月,不命中
|
||||
- 员工 D:销户前 30 天无流水,不命中
|
||||
|
||||
- [ ] **Step 2: 写最小测试数据**
|
||||
|
||||
按顺序补数据:
|
||||
|
||||
1. 员工基础数据
|
||||
2. 项目内流水数据
|
||||
3. `ccdi_account_info` 账户数据
|
||||
4. 必要的清理 SQL
|
||||
|
||||
- [ ] **Step 3: 记录导入命令**
|
||||
|
||||
在实施记录中先预填导入方式:
|
||||
|
||||
```bash
|
||||
bin/mysql_utf8_exec.sh sql/migration/2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql
|
||||
bin/mysql_utf8_exec.sh sql/migration/2026-03-31-add-abnormal-account-rule-test-data.sql
|
||||
```
|
||||
|
||||
- [ ] **Step 4: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add sql/migration/2026-03-31-add-abnormal-account-rule-test-data.sql \
|
||||
docs/reports/implementation/2026-03-31-abnormal-account-bank-tag-backend-implementation.md
|
||||
git commit -m "补充异常账户规则测试数据"
|
||||
```
|
||||
|
||||
## Task 7: 用 MySQL MCP 执行真实 SQL 校验口径
|
||||
|
||||
**Files:**
|
||||
|
||||
- Modify: `docs/reports/implementation/2026-03-31-abnormal-account-bank-tag-backend-implementation.md`
|
||||
|
||||
- [ ] **Step 1: 导入建表和测试数据脚本**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
bin/mysql_utf8_exec.sh sql/migration/2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql
|
||||
bin/mysql_utf8_exec.sh sql/migration/2026-03-31-add-abnormal-account-rule-test-data.sql
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS,脚本执行成功且无乱码
|
||||
|
||||
- [ ] **Step 2: 使用 MySQL MCP 执行 `SUDDEN_ACCOUNT_CLOSURE` 对应 SQL**
|
||||
|
||||
要求:
|
||||
|
||||
- 直接执行 Mapper 中的真实查询等价 SQL
|
||||
- 校验仅返回员工 A
|
||||
- 校验 `reasonDetail` 中包含销户日期、最后交易日、累计金额和单笔最大金额
|
||||
|
||||
- [ ] **Step 3: 使用 MySQL MCP 执行 `DORMANT_ACCOUNT_LARGE_ACTIVATION` 对应 SQL**
|
||||
|
||||
要求:
|
||||
|
||||
- 直接执行 Mapper 中的真实查询等价 SQL
|
||||
- 校验仅返回员工 B
|
||||
- 校验员工 C 未命中
|
||||
- 校验 `reasonDetail` 中包含开户日期、首次交易日期、累计金额或单笔最大金额快照
|
||||
|
||||
- [ ] **Step 4: 将 SQL 验证结果写入实施记录**
|
||||
|
||||
记录:
|
||||
|
||||
- 实际执行 SQL 摘要
|
||||
- 命中对象
|
||||
- 未命中对象
|
||||
- 与业务口径的对照结论
|
||||
|
||||
- [ ] **Step 5: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add docs/reports/implementation/2026-03-31-abnormal-account-bank-tag-backend-implementation.md
|
||||
git commit -m "补充异常账户规则SQL校验记录"
|
||||
```
|
||||
|
||||
## Task 8: 跑完整后端验证并收尾
|
||||
|
||||
**Files:**
|
||||
|
||||
- Modify: `docs/reports/implementation/2026-03-31-abnormal-account-bank-tag-backend-implementation.md`
|
||||
|
||||
- [ ] **Step 1: 运行后端定向测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -Dtest=CcdiAbnormalAccountRuleSqlMetadataTest,CcdiBankTagRuleSqlMetadataTest,CcdiBankTagServiceImplTest,CcdiProjectOverviewEmployeeResultBuilderTest test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS
|
||||
|
||||
- [ ] **Step 2: 如需联调打标主链路,启动后端并验证后主动关闭**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ruoyi-admin -am package -DskipTests
|
||||
cd ruoyi-admin/target && java -jar ruoyi-admin.jar
|
||||
```
|
||||
|
||||
验证完成后关闭进程。
|
||||
|
||||
- [ ] **Step 3: 完善实施记录**
|
||||
|
||||
记录:
|
||||
|
||||
- 最终改动文件
|
||||
- 测试结果
|
||||
- MySQL MCP 校验结论
|
||||
- 若启动过进程,明确已关闭
|
||||
|
||||
- [ ] **Step 4: 最终提交**
|
||||
|
||||
```bash
|
||||
git add ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiBankTagAnalysisMapper.java \
|
||||
ccdi-project/src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiAbnormalAccountRuleSqlMetadataTest.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiBankTagRuleSqlMetadataTest.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImplTest.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewEmployeeResultBuilderTest.java \
|
||||
sql/migration/2026-03-31-create-ccdi-account-info-and-abnormal-account-rules.sql \
|
||||
sql/migration/2026-03-31-add-abnormal-account-rule-test-data.sql \
|
||||
docs/reports/implementation/2026-03-31-abnormal-account-bank-tag-backend-implementation.md
|
||||
git commit -m "完成异常账户模型后端接入"
|
||||
```
|
||||
|
||||
## Final Verification
|
||||
|
||||
- [ ] 运行:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -Dtest=CcdiAbnormalAccountRuleSqlMetadataTest,CcdiBankTagRuleSqlMetadataTest,CcdiBankTagServiceImplTest,CcdiProjectOverviewEmployeeResultBuilderTest test
|
||||
```
|
||||
|
||||
- [ ] 使用 MySQL MCP 执行两条规则真实 SQL,确认正样本命中、反样本不命中
|
||||
- [ ] 确认结果写入 `ccdi_bank_statement_tag_result`
|
||||
- [ ] 确认新增编码全为大写
|
||||
- [ ] 如启动过后端进程,验证结束后主动关闭
|
||||
@@ -0,0 +1,383 @@
|
||||
# LSFX Mock Server 异常账户命中流水后端 Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
>
|
||||
> 仓库约束:当前仓库明确禁止开启 subagent,执行时统一使用 `superpowers:executing-plans`。
|
||||
|
||||
**Goal:** 在现有 `lsfx-mock-server` 中补齐异常账户命中计划、最小账户事实和可命中后端 SQL 的流水样本,让同一个 `logId` 下稳定命中 `SUDDEN_ACCOUNT_CLOSURE` 与 `DORMANT_ACCOUNT_LARGE_ACTIVATION`。
|
||||
|
||||
**Architecture:** 复用现有 `FileService -> FileRecord -> StatementService -> build_seed_statements_for_rule_plan(...)` 主链路,不新增独立接口或平行造数模块。异常账户能力拆成三层:`FileRecord` 持有命中计划与账户事实、`statement_rule_samples.py` 生成异常账户样本、`StatementService` 统一混入种子流水并继续补噪声与分页。
|
||||
|
||||
**Tech Stack:** Python 3, FastAPI, pytest, dataclasses, Markdown docs
|
||||
|
||||
---
|
||||
|
||||
## File Structure
|
||||
|
||||
- `lsfx-mock-server/services/file_service.py`: 扩展 `FileRecord`,增加异常账户规则池、命中计划和最小账户事实生成逻辑。
|
||||
- `lsfx-mock-server/services/statement_rule_samples.py`: 定义异常账户事实结构,并新增两类异常账户样本生成器及统一接入点。
|
||||
- `lsfx-mock-server/services/statement_service.py`: 消费 `FileRecord` 中新增的异常账户计划,让种子流水能混入异常账户命中样本。
|
||||
- `lsfx-mock-server/tests/test_file_service.py`: 锁定异常账户命中计划与账户事实生成行为。
|
||||
- `lsfx-mock-server/tests/test_statement_service.py`: 锁定异常账户样本日期窗口、金额阈值与服务层混入行为。
|
||||
- `docs/reports/implementation/2026-03-31-lsfx-mock-server-abnormal-account-backend-implementation.md`: 记录本次 Mock 服务后端实施结果。
|
||||
- `docs/tests/records/2026-03-31-lsfx-mock-server-abnormal-account-backend-verification.md`: 记录 pytest 验证命令、结果与进程清理结论。
|
||||
|
||||
## Task 1: 扩展 `FileRecord` 并锁定异常账户命中计划
|
||||
|
||||
**Files:**
|
||||
- Modify: `lsfx-mock-server/services/file_service.py`
|
||||
- Modify: `lsfx-mock-server/tests/test_file_service.py`
|
||||
- Reference: `docs/design/2026-03-31-lsfx-mock-server-abnormal-account-design.md`
|
||||
|
||||
- [ ] **Step 1: Write the failing test**
|
||||
|
||||
先在 `lsfx-mock-server/tests/test_file_service.py` 中新增失败用例,锁定 `fetch_inner_flow(...)` 生成的 `FileRecord` 会携带异常账户命中计划和账户事实:
|
||||
|
||||
```python
|
||||
def test_fetch_inner_flow_should_attach_abnormal_account_rule_plan():
|
||||
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
|
||||
|
||||
response = service.fetch_inner_flow(
|
||||
{
|
||||
"groupId": 1001,
|
||||
"customerNo": "customer_abnormal_account",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
)
|
||||
log_id = response["data"][0]
|
||||
record = service.file_records[log_id]
|
||||
|
||||
assert hasattr(record, "abnormal_account_hit_rules")
|
||||
assert hasattr(record, "abnormal_accounts")
|
||||
assert isinstance(record.abnormal_account_hit_rules, list)
|
||||
assert isinstance(record.abnormal_accounts, list)
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_file_service.py::test_fetch_inner_flow_should_attach_abnormal_account_rule_plan -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `FAIL`
|
||||
- 原因是 `FileRecord` 还没有异常账户相关字段
|
||||
|
||||
- [ ] **Step 3: Write minimal implementation**
|
||||
|
||||
在 `lsfx-mock-server/services/file_service.py` 中按最小路径补齐:
|
||||
|
||||
1. 新增规则池常量:
|
||||
|
||||
```python
|
||||
ABNORMAL_ACCOUNT_RULE_CODES = [
|
||||
"SUDDEN_ACCOUNT_CLOSURE",
|
||||
"DORMANT_ACCOUNT_LARGE_ACTIVATION",
|
||||
]
|
||||
```
|
||||
|
||||
2. 扩展 `FileRecord` 字段:
|
||||
|
||||
```python
|
||||
abnormal_account_hit_rules: List[str] = field(default_factory=list)
|
||||
abnormal_accounts: List[dict] = field(default_factory=list)
|
||||
```
|
||||
|
||||
3. 在 `_build_subset_rule_hit_plan(...)`、`_build_all_compatible_rule_hit_plan(...)`、`_apply_rule_hit_plan_to_record(...)` 中纳入 `abnormal_account_hit_rules`
|
||||
4. 新增最小账户事实生成方法,并在 `fetch_inner_flow(...)` / 上传链路创建记录时写入 `abnormal_accounts`
|
||||
|
||||
- [ ] **Step 4: Run test to verify it passes**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_file_service.py::test_fetch_inner_flow_should_attach_abnormal_account_rule_plan -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
- `FileRecord` 已稳定保存异常账户命中计划和账户事实列表
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add lsfx-mock-server/services/file_service.py lsfx-mock-server/tests/test_file_service.py
|
||||
git commit -m "补充异常账户命中计划与账户事实"
|
||||
```
|
||||
|
||||
## Task 2: 先写异常账户样本生成器的失败测试
|
||||
|
||||
**Files:**
|
||||
- Modify: `lsfx-mock-server/services/statement_rule_samples.py`
|
||||
- Modify: `lsfx-mock-server/tests/test_statement_service.py`
|
||||
- Reference: `docs/design/2026-03-31-lsfx-mock-server-abnormal-account-design.md`
|
||||
|
||||
- [ ] **Step 1: Write the failing tests**
|
||||
|
||||
在 `lsfx-mock-server/tests/test_statement_service.py` 中新增两条失败测试,分别锁定两条规则的样本口径:
|
||||
|
||||
```python
|
||||
def test_sudden_account_closure_samples_should_stay_within_30_days_before_invalid_date():
|
||||
statements = build_sudden_account_closure_samples(
|
||||
group_id=1000,
|
||||
log_id=20001,
|
||||
account_fact={
|
||||
"account_no": "6222000000000001",
|
||||
"owner_id_card": "320101199001010030",
|
||||
"account_name": "测试员工工资卡",
|
||||
"status": 2,
|
||||
"effective_date": "2024-01-01",
|
||||
"invalid_date": "2026-03-20",
|
||||
},
|
||||
le_name="测试主体",
|
||||
)
|
||||
|
||||
assert statements
|
||||
assert all("6222000000000001" == item["accountMaskNo"] for item in statements)
|
||||
assert all("2026-02-18" <= item["trxDate"][:10] < "2026-03-20" for item in statements)
|
||||
|
||||
|
||||
def test_dormant_account_large_activation_samples_should_exceed_threshold_after_6_months():
|
||||
statements = build_dormant_account_large_activation_samples(
|
||||
group_id=1000,
|
||||
log_id=20001,
|
||||
account_fact={
|
||||
"account_no": "6222000000000002",
|
||||
"owner_id_card": "320101199001010030",
|
||||
"account_name": "测试员工工资卡",
|
||||
"status": 1,
|
||||
"effective_date": "2025-01-01",
|
||||
"invalid_date": None,
|
||||
},
|
||||
le_name="测试主体",
|
||||
)
|
||||
|
||||
assert statements
|
||||
assert min(item["trxDate"][:10] for item in statements) >= "2025-07-01"
|
||||
assert sum(item["drAmount"] + item["crAmount"] for item in statements) >= 500000
|
||||
assert max(item["drAmount"] + item["crAmount"] for item in statements) >= 100000
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_statement_service.py -k "sudden_account_closure or dormant_account_large_activation" -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `FAIL`
|
||||
- 原因是异常账户样本生成器尚不存在
|
||||
|
||||
- [ ] **Step 3: Write minimal implementation**
|
||||
|
||||
在 `lsfx-mock-server/services/statement_rule_samples.py` 中补齐最小实现:
|
||||
|
||||
1. 定义异常账户事实结构或约定字典字段
|
||||
2. 新增:
|
||||
|
||||
```python
|
||||
def build_sudden_account_closure_samples(...): ...
|
||||
def build_dormant_account_large_activation_samples(...): ...
|
||||
```
|
||||
|
||||
3. 造数要求:
|
||||
- `SUDDEN_ACCOUNT_CLOSURE` 所有流水落在销户前 30 天窗口内
|
||||
- `DORMANT_ACCOUNT_LARGE_ACTIVATION` 首笔流水晚于开户满 6 个月
|
||||
- 休眠账户样本同时满足累计金额和单笔最大金额阈值
|
||||
|
||||
- [ ] **Step 4: Run test to verify it passes**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_statement_service.py -k "sudden_account_closure or dormant_account_large_activation" -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
- 两类样本的日期和金额口径与设计一致
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add lsfx-mock-server/services/statement_rule_samples.py lsfx-mock-server/tests/test_statement_service.py
|
||||
git commit -m "补充异常账户规则样本生成器"
|
||||
```
|
||||
|
||||
## Task 3: 将异常账户样本接入统一种子流水构造
|
||||
|
||||
**Files:**
|
||||
- Modify: `lsfx-mock-server/services/statement_rule_samples.py`
|
||||
- Modify: `lsfx-mock-server/services/statement_service.py`
|
||||
- Modify: `lsfx-mock-server/tests/test_statement_service.py`
|
||||
|
||||
- [ ] **Step 1: Write the failing service-level test**
|
||||
|
||||
在 `lsfx-mock-server/tests/test_statement_service.py` 中新增失败测试,锁定服务层会按命中计划混入异常账户样本:
|
||||
|
||||
```python
|
||||
def test_generate_statements_should_follow_abnormal_account_rule_plan_from_file_record():
|
||||
file_service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
|
||||
statement_service = StatementService(file_service=file_service)
|
||||
|
||||
response = file_service.fetch_inner_flow(
|
||||
{
|
||||
"groupId": 1001,
|
||||
"customerNo": "customer_abnormal_rule_plan",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
)
|
||||
log_id = response["data"][0]
|
||||
record = file_service.file_records[log_id]
|
||||
record.abnormal_account_hit_rules = ["SUDDEN_ACCOUNT_CLOSURE"]
|
||||
record.abnormal_accounts = [
|
||||
{
|
||||
"account_no": "6222000000000001",
|
||||
"owner_id_card": record.staff_id_card,
|
||||
"account_name": "测试员工工资卡",
|
||||
"status": 2,
|
||||
"effective_date": "2024-01-01",
|
||||
"invalid_date": "2026-03-20",
|
||||
}
|
||||
]
|
||||
|
||||
statements = statement_service._generate_statements(group_id=1001, log_id=log_id, count=80)
|
||||
|
||||
assert any(item["accountMaskNo"] == "6222000000000001" for item in statements)
|
||||
assert any("销户" in item["userMemo"] or "异常账户" in item["userMemo"] for item in statements)
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_statement_service.py::test_generate_statements_should_follow_abnormal_account_rule_plan_from_file_record -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `FAIL`
|
||||
- 原因是种子流水构造入口尚未消费异常账户命中计划
|
||||
|
||||
- [ ] **Step 3: Write minimal implementation**
|
||||
|
||||
实现顺序:
|
||||
|
||||
1. 在 `build_seed_statements_for_rule_plan(...)` 中增加 `abnormal_account_hit_rules` 入参消费
|
||||
2. 根据 `abnormal_accounts` 逐条匹配调用对应样本生成器
|
||||
3. 在 `StatementService._generate_statements(...)` 中把 `record.abnormal_account_hit_rules` 和 `record.abnormal_accounts` 传给样本构造入口
|
||||
|
||||
- [ ] **Step 4: Run targeted test to verify it passes**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_statement_service.py::test_generate_statements_should_follow_abnormal_account_rule_plan_from_file_record -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
- 服务层已能按 `FileRecord` 中的异常账户计划稳定混入命中样本
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add lsfx-mock-server/services/statement_rule_samples.py lsfx-mock-server/services/statement_service.py lsfx-mock-server/tests/test_statement_service.py
|
||||
git commit -m "接入异常账户命中流水主链路"
|
||||
```
|
||||
|
||||
## Task 4: 跑回归测试并补实施记录
|
||||
|
||||
**Files:**
|
||||
- Create: `docs/reports/implementation/2026-03-31-lsfx-mock-server-abnormal-account-backend-implementation.md`
|
||||
- Create: `docs/tests/records/2026-03-31-lsfx-mock-server-abnormal-account-backend-verification.md`
|
||||
- Modify: `lsfx-mock-server/README.md`
|
||||
|
||||
- [ ] **Step 1: Run focused backend regression**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_file_service.py tests/test_statement_service.py -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
- 异常账户相关新增测试和既有流水造数测试均通过
|
||||
|
||||
- [ ] **Step 2: Run full mock-server regression**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/ -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
- 无异常账户改造引入的回归失败
|
||||
|
||||
- [ ] **Step 3: Update docs**
|
||||
|
||||
在实施记录中至少写明:
|
||||
|
||||
- 新增的异常账户命中计划字段
|
||||
- 两类异常账户样本生成器
|
||||
- 服务层如何接入现有种子流水主链路
|
||||
- 验证命令与结果
|
||||
|
||||
在验证记录中至少写明:
|
||||
|
||||
- 执行过的 pytest 命令
|
||||
- 结果摘要
|
||||
- 本轮未启动额外前后端进程,因此无需清理残留进程
|
||||
|
||||
如果 README 已描述规则计划结构,同步补充 `abnormal_account_hit_rules` 说明;否则可只补最小联调说明。
|
||||
|
||||
- [ ] **Step 4: Re-run docs-relevant verification if README changed**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_statement_service.py -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add lsfx-mock-server/README.md docs/reports/implementation/2026-03-31-lsfx-mock-server-abnormal-account-backend-implementation.md docs/tests/records/2026-03-31-lsfx-mock-server-abnormal-account-backend-verification.md
|
||||
git commit -m "完成异常账户Mock服务后端实施记录"
|
||||
```
|
||||
@@ -0,0 +1,486 @@
|
||||
# LSFX Mock Server 异常账户基线同步后端 Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
>
|
||||
> 仓库约束:当前仓库明确禁止开启 subagent,执行时统一使用 `superpowers:executing-plans`。
|
||||
|
||||
**Goal:** 在 `lsfx-mock-server` 创建 `logId` 时一次性把异常账户事实同步到 `ccdi_account_info`,让同一个 `logId` 下的异常账户事实、命中流水和真实打标前提稳定闭环。
|
||||
|
||||
**Architecture:** 继续复用现有 `FileService -> FileRecord -> StatementService` 主链路,不新增接口,也不把写库副作用混进 `getBSByLogId`。新增一个很小的 `AbnormalAccountBaselineService` 负责按 `account_no` 幂等 upsert `ccdi_account_info`,由 `FileService` 在保存 `file_records[log_id]` 前调用;`StatementService` 仍只读取 `FileRecord` 生成异常账户流水样本。
|
||||
|
||||
**Tech Stack:** Python 3, FastAPI, PyMySQL, pytest, dataclasses, Markdown docs
|
||||
|
||||
---
|
||||
|
||||
## File Structure
|
||||
|
||||
- `lsfx-mock-server/services/abnormal_account_baseline_service.py`: 新增异常账户基线写库服务,封装数据库连接、输入校验和按账号幂等 upsert。
|
||||
- `lsfx-mock-server/services/file_service.py`: 注入异常账户基线服务,并在 `fetch_inner_flow(...)` / 上传建档链路中于保存 `FileRecord` 前触发基线同步。
|
||||
- `lsfx-mock-server/services/statement_service.py`: 只补链路一致性断言,不新增写库逻辑。
|
||||
- `lsfx-mock-server/tests/test_abnormal_account_baseline_service.py`: 新增服务层单测,覆盖空输入、异常输入、插入和更新。
|
||||
- `lsfx-mock-server/tests/test_file_service.py`: 锁定 `fetch_inner_flow(...)` 调用基线同步服务及失败回滚语义。
|
||||
- `lsfx-mock-server/tests/test_statement_service.py`: 锁定异常账户样本流水与 `record.abnormal_accounts` 账号一致。
|
||||
- `docs/reports/implementation/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-backend-implementation.md`: 记录本次后端实施结果。
|
||||
- `docs/tests/records/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-backend-verification.md`: 记录 pytest 验证命令、结果和进程清理结论。
|
||||
|
||||
## Task 1: 先锁定 `FileService` 的基线同步触发点
|
||||
|
||||
**Files:**
|
||||
- Modify: `lsfx-mock-server/tests/test_file_service.py`
|
||||
- Modify: `lsfx-mock-server/services/file_service.py`
|
||||
- Reference: `docs/design/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-design.md`
|
||||
|
||||
- [ ] **Step 1: Write the failing test**
|
||||
|
||||
在 `lsfx-mock-server/tests/test_file_service.py` 中新增 fake baseline service 和两条失败测试:
|
||||
|
||||
```python
|
||||
class FakeAbnormalAccountBaselineService:
|
||||
def __init__(self, should_fail=False):
|
||||
self.should_fail = should_fail
|
||||
self.calls = []
|
||||
|
||||
def apply(self, staff_id_card, abnormal_accounts):
|
||||
self.calls.append(
|
||||
{
|
||||
"staff_id_card": staff_id_card,
|
||||
"abnormal_accounts": [dict(item) for item in abnormal_accounts],
|
||||
}
|
||||
)
|
||||
if self.should_fail:
|
||||
raise RuntimeError("baseline sync failed")
|
||||
|
||||
|
||||
def test_fetch_inner_flow_should_sync_abnormal_account_baselines_before_caching():
|
||||
baseline_service = FakeAbnormalAccountBaselineService()
|
||||
service = FileService(
|
||||
staff_identity_repository=FakeStaffIdentityRepository(),
|
||||
abnormal_account_baseline_service=baseline_service,
|
||||
)
|
||||
|
||||
response = service.fetch_inner_flow(
|
||||
{
|
||||
"groupId": 1001,
|
||||
"customerNo": "customer_abnormal_baseline",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
)
|
||||
|
||||
log_id = response["data"][0]
|
||||
record = service.file_records[log_id]
|
||||
|
||||
assert baseline_service.calls
|
||||
assert baseline_service.calls[0]["staff_id_card"] == record.staff_id_card
|
||||
assert baseline_service.calls[0]["abnormal_accounts"] == record.abnormal_accounts
|
||||
|
||||
|
||||
def test_fetch_inner_flow_should_not_cache_log_id_when_abnormal_account_baseline_sync_fails():
|
||||
baseline_service = FakeAbnormalAccountBaselineService(should_fail=True)
|
||||
service = FileService(
|
||||
staff_identity_repository=FakeStaffIdentityRepository(),
|
||||
abnormal_account_baseline_service=baseline_service,
|
||||
)
|
||||
|
||||
with pytest.raises(RuntimeError, match="baseline sync failed"):
|
||||
service.fetch_inner_flow(
|
||||
{
|
||||
"groupId": 1001,
|
||||
"customerNo": "customer_abnormal_baseline_fail",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
)
|
||||
|
||||
assert service.file_records == {}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_file_service.py -k "abnormal_account_baseline" -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `FAIL`
|
||||
- 原因是 `FileService` 还不接受 `abnormal_account_baseline_service` 注入,也没有在建档阶段触发写库
|
||||
|
||||
- [ ] **Step 3: Write minimal implementation**
|
||||
|
||||
在 `lsfx-mock-server/services/file_service.py` 中按最小路径补齐:
|
||||
|
||||
1. 新增构造参数:
|
||||
|
||||
```python
|
||||
def __init__(..., abnormal_account_baseline_service=None):
|
||||
self.abnormal_account_baseline_service = (
|
||||
abnormal_account_baseline_service or AbnormalAccountBaselineService()
|
||||
)
|
||||
```
|
||||
|
||||
2. 新增内部封装:
|
||||
|
||||
```python
|
||||
def _apply_abnormal_account_baselines(self, file_record: FileRecord) -> None:
|
||||
if not file_record.abnormal_account_hit_rules:
|
||||
return
|
||||
if not file_record.abnormal_accounts:
|
||||
raise RuntimeError("异常账户命中计划存在,但未生成账户事实")
|
||||
self.abnormal_account_baseline_service.apply(
|
||||
staff_id_card=file_record.staff_id_card,
|
||||
abnormal_accounts=file_record.abnormal_accounts,
|
||||
)
|
||||
```
|
||||
|
||||
3. 在 `fetch_inner_flow(...)` 和上传链路中,将:
|
||||
|
||||
```python
|
||||
self.file_records[log_id] = file_record
|
||||
```
|
||||
|
||||
调整为:
|
||||
|
||||
```python
|
||||
self._apply_abnormal_account_baselines(file_record)
|
||||
self.file_records[log_id] = file_record
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Run test to verify it passes**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_file_service.py -k "abnormal_account_baseline" -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
- 成功路径会先调用 baseline service
|
||||
- 失败路径不会把半成品 `logId` 写入 `file_records`
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add lsfx-mock-server/services/file_service.py lsfx-mock-server/tests/test_file_service.py
|
||||
git commit -m "接入异常账户基线同步触发点"
|
||||
```
|
||||
|
||||
## Task 2: 实现异常账户基线写库服务
|
||||
|
||||
**Files:**
|
||||
- Create: `lsfx-mock-server/services/abnormal_account_baseline_service.py`
|
||||
- Modify: `lsfx-mock-server/tests/test_abnormal_account_baseline_service.py`
|
||||
- Reference: `docs/design/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-design.md`
|
||||
|
||||
- [ ] **Step 1: Write the failing tests**
|
||||
|
||||
新建 `lsfx-mock-server/tests/test_abnormal_account_baseline_service.py`,先锁定四类行为:
|
||||
|
||||
```python
|
||||
def test_apply_should_skip_when_abnormal_accounts_is_empty():
|
||||
service = AbnormalAccountBaselineService()
|
||||
fake_connection = FakeConnection()
|
||||
service._connect = lambda: fake_connection
|
||||
|
||||
service.apply("330101199001010001", [])
|
||||
|
||||
assert fake_connection.executed_sql == []
|
||||
|
||||
|
||||
def test_apply_should_raise_when_fact_owner_mismatches_staff():
|
||||
service = AbnormalAccountBaselineService()
|
||||
|
||||
with pytest.raises(RuntimeError, match="owner_id_card"):
|
||||
service.apply(
|
||||
"330101199001010001",
|
||||
[
|
||||
{
|
||||
"account_no": "6222000000000001",
|
||||
"owner_id_card": "330101199001010099",
|
||||
"account_name": "测试员工工资卡",
|
||||
"status": 2,
|
||||
"effective_date": "2024-01-01",
|
||||
"invalid_date": "2026-03-20",
|
||||
"rule_code": "SUDDEN_ACCOUNT_CLOSURE",
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_apply_should_insert_new_account_fact_by_account_no():
|
||||
...
|
||||
|
||||
|
||||
def test_apply_should_update_existing_account_fact_by_account_no():
|
||||
...
|
||||
```
|
||||
|
||||
`FakeConnection` / `FakeCursor` 只需记录 `execute(...)` 调用和提交次数,不需要真实数据库。
|
||||
|
||||
- [ ] **Step 2: Run test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_abnormal_account_baseline_service.py -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `FAIL`
|
||||
- 原因是服务文件尚不存在
|
||||
|
||||
- [ ] **Step 3: Write minimal implementation**
|
||||
|
||||
在 `lsfx-mock-server/services/abnormal_account_baseline_service.py` 中实现:
|
||||
|
||||
1. `__init__` 直接复用现有 `settings.CCDI_DB_*`
|
||||
2. `_connect()` 使用 `pymysql.connect(..., charset="utf8mb4", autocommit=False)`
|
||||
3. `apply(staff_id_card, abnormal_accounts)` 内部规则:
|
||||
- 空列表直接返回
|
||||
- 若任一 `owner_id_card` 与 `staff_id_card` 不一致,直接抛错
|
||||
- 对每条 fact 执行单条 upsert
|
||||
- 成功后统一 `commit()`,失败则 `rollback()`
|
||||
|
||||
建议 upsert 语句形态:
|
||||
|
||||
```sql
|
||||
INSERT INTO ccdi_account_info (
|
||||
account_no,
|
||||
account_type,
|
||||
account_name,
|
||||
owner_type,
|
||||
owner_id,
|
||||
bank,
|
||||
bank_code,
|
||||
currency,
|
||||
is_self_account,
|
||||
trans_risk_level,
|
||||
status,
|
||||
effective_date,
|
||||
invalid_date,
|
||||
create_by,
|
||||
update_by
|
||||
)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
account_name = VALUES(account_name),
|
||||
owner_type = VALUES(owner_type),
|
||||
owner_id = VALUES(owner_id),
|
||||
bank = VALUES(bank),
|
||||
bank_code = VALUES(bank_code),
|
||||
currency = VALUES(currency),
|
||||
is_self_account = VALUES(is_self_account),
|
||||
trans_risk_level = VALUES(trans_risk_level),
|
||||
status = VALUES(status),
|
||||
effective_date = VALUES(effective_date),
|
||||
invalid_date = VALUES(invalid_date),
|
||||
update_by = VALUES(update_by),
|
||||
update_time = NOW()
|
||||
```
|
||||
|
||||
固定值约束:
|
||||
|
||||
- `account_type = 'DEBIT'`
|
||||
- `owner_type = 'EMPLOYEE'`
|
||||
- `bank = '兰溪农商银行'`
|
||||
- `bank_code = 'LXNCSY'`
|
||||
- `currency = 'CNY'`
|
||||
- `is_self_account = 1`
|
||||
- `trans_risk_level = 'HIGH'`
|
||||
- `create_by/update_by = 'lsfx-mock-server'`
|
||||
|
||||
- [ ] **Step 4: Run test to verify it passes**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_abnormal_account_baseline_service.py -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
- 覆盖空输入、校验失败、插入和更新四类行为
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add lsfx-mock-server/services/abnormal_account_baseline_service.py lsfx-mock-server/tests/test_abnormal_account_baseline_service.py
|
||||
git commit -m "新增异常账户基线写库服务"
|
||||
```
|
||||
|
||||
## Task 3: 锁定异常账户事实与返回流水的一致性
|
||||
|
||||
**Files:**
|
||||
- Modify: `lsfx-mock-server/tests/test_statement_service.py`
|
||||
- Reference: `lsfx-mock-server/services/statement_service.py`
|
||||
- Reference: `lsfx-mock-server/services/statement_rule_samples.py`
|
||||
|
||||
- [ ] **Step 1: Write the failing test**
|
||||
|
||||
在 `lsfx-mock-server/tests/test_statement_service.py` 中新增一条只校验一致性的用例:
|
||||
|
||||
```python
|
||||
def test_get_bank_statement_should_only_use_abnormal_account_numbers_from_file_record():
|
||||
file_service = FileService(
|
||||
staff_identity_repository=FakeStaffIdentityRepository(),
|
||||
abnormal_account_baseline_service=FakeAbnormalAccountBaselineService(),
|
||||
)
|
||||
statement_service = StatementService(file_service=file_service)
|
||||
|
||||
response = file_service.fetch_inner_flow(
|
||||
{
|
||||
"groupId": 1001,
|
||||
"customerNo": "customer_abnormal_statement_consistency",
|
||||
"dataChannelCode": "test_code",
|
||||
"requestDateId": 20240101,
|
||||
"dataStartDateId": 20240101,
|
||||
"dataEndDateId": 20240131,
|
||||
"uploadUserId": 902001,
|
||||
}
|
||||
)
|
||||
log_id = response["data"][0]
|
||||
record = file_service.file_records[log_id]
|
||||
record.abnormal_account_hit_rules = ["SUDDEN_ACCOUNT_CLOSURE"]
|
||||
record.abnormal_accounts = [
|
||||
{
|
||||
"account_no": "6222000000000099",
|
||||
"owner_id_card": record.staff_id_card,
|
||||
"account_name": "测试员工工资卡",
|
||||
"status": 2,
|
||||
"effective_date": "2024-01-01",
|
||||
"invalid_date": "2026-03-20",
|
||||
"rule_code": "SUDDEN_ACCOUNT_CLOSURE",
|
||||
}
|
||||
]
|
||||
|
||||
result = statement_service.get_bank_statement(
|
||||
{"groupId": 1001, "logId": log_id, "pageNow": 1, "pageSize": 500}
|
||||
)
|
||||
abnormal_numbers = {
|
||||
item["accountMaskNo"]
|
||||
for item in result["data"]["bankStatementList"]
|
||||
if "销户" in item["userMemo"] or "异常账户" in item["userMemo"]
|
||||
}
|
||||
|
||||
assert abnormal_numbers == {"6222000000000099"}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run test to verify it fails when chain drifts**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_statement_service.py::test_get_bank_statement_should_only_use_abnormal_account_numbers_from_file_record -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 若当前实现已满足,可直接 `PASS`
|
||||
- 若失败,只允许修正账号回填链路,禁止引入写库逻辑到 `StatementService`
|
||||
|
||||
- [ ] **Step 3: Keep implementation minimal**
|
||||
|
||||
若失败,仅允许在 `lsfx-mock-server/services/statement_service.py` 中做最小修正:
|
||||
|
||||
- `_apply_primary_binding(...)` 继续只兜底空账号
|
||||
- 不覆盖异常账户样本已有 `accountMaskNo`
|
||||
- 不新增数据库连接或写库逻辑
|
||||
|
||||
- [ ] **Step 4: Run focused statement tests**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_statement_service.py -k "abnormal_account" -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
- 既有异常账户样本日期/金额测试与新增一致性测试同时通过
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add lsfx-mock-server/services/statement_service.py lsfx-mock-server/tests/test_statement_service.py
|
||||
git commit -m "锁定异常账户流水与账户事实一致性"
|
||||
```
|
||||
|
||||
## Task 4: 完成回归验证并补实施记录
|
||||
|
||||
**Files:**
|
||||
- Create: `docs/reports/implementation/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-backend-implementation.md`
|
||||
- Create: `docs/tests/records/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-backend-verification.md`
|
||||
|
||||
- [ ] **Step 1: Run full targeted backend tests**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest \
|
||||
tests/test_abnormal_account_baseline_service.py \
|
||||
tests/test_file_service.py \
|
||||
tests/test_statement_service.py -k "abnormal_account or abnormal_account_baseline" -v
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- `PASS`
|
||||
- 无新增异常账户相关失败
|
||||
|
||||
- [ ] **Step 2: Write implementation record**
|
||||
|
||||
在 `docs/reports/implementation/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-backend-implementation.md` 中记录:
|
||||
|
||||
- 新增 `AbnormalAccountBaselineService`
|
||||
- `FileService` 在建 `logId` 时同步异常账户基线
|
||||
- 失败回滚语义
|
||||
- 异常账户事实与返回流水的一致性约束
|
||||
|
||||
- [ ] **Step 3: Write verification record**
|
||||
|
||||
在 `docs/tests/records/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-backend-verification.md` 中记录:
|
||||
|
||||
- 执行过的 pytest 命令
|
||||
- 关键通过点
|
||||
- 本次未启动前后端长驻进程,因此无需额外杀进程
|
||||
|
||||
- [ ] **Step 4: Verify final diff scope**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
git diff --name-only HEAD~3..HEAD
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- 仅包含本次异常账户基线同步相关服务、测试和文档
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add \
|
||||
docs/reports/implementation/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-backend-implementation.md \
|
||||
docs/tests/records/2026-03-31-lsfx-mock-server-abnormal-account-baseline-sync-backend-verification.md
|
||||
git commit -m "记录异常账户基线同步后端实施"
|
||||
```
|
||||
@@ -0,0 +1,523 @@
|
||||
# 项目详情风险明细异常账户人员信息后端 Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
>
|
||||
> 仓库约束:当前仓库明确禁止开启 subagent,执行时统一使用 `superpowers:executing-plans`。
|
||||
|
||||
**Goal:** 为项目详情风险明细补齐“异常账户人员信息”的真实后端查询与统一导出能力,让页面展示和第 3 个 Excel sheet 共用同一套异常账户明细口径。
|
||||
|
||||
**Architecture:** 在结果总览域内新增异常账户分页查询接口和非分页导出查询,数据源直接使用 `ccdi_bank_statement_tag_result + ccdi_account_info`,按“一条命中结果一行”返回。统一导出继续复用 `POST /ccdi/project/overview/risk-details/export`,仅将第 3 个 sheet 从空表头改为真实数据写出,不新增平行导出接口。
|
||||
|
||||
**Tech Stack:** Java 21, Spring Boot 3, MyBatis Plus Page, MyBatis XML, Apache POI, JUnit 5, Mockito
|
||||
|
||||
---
|
||||
|
||||
## File Map
|
||||
|
||||
**Create:**
|
||||
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/dto/CcdiProjectAbnormalAccountQueryDTO.java`
|
||||
- 结果总览异常账户分页查询入参,仅承载 `projectId/pageNum/pageSize`
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectAbnormalAccountItemVO.java`
|
||||
- 异常账户单行展示对象,字段与页面列一致
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectAbnormalAccountPageVO.java`
|
||||
- 异常账户分页返回对象,统一承载 `rows/total`
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/excel/CcdiProjectAbnormalAccountExcel.java`
|
||||
- `异常账户人员信息` sheet 行对象
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceAbnormalAccountTest.java`
|
||||
- 单独覆盖异常账户分页与导出映射
|
||||
|
||||
**Modify:**
|
||||
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewController.java`
|
||||
- 新增异常账户分页查询接口
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/ICcdiProjectOverviewService.java`
|
||||
- 新增异常账户分页与导出方法定义
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImpl.java`
|
||||
- 实现异常账户分页查询、导出映射,并接入统一导出流程
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapper.java`
|
||||
- 新增异常账户分页与导出查询声明
|
||||
- `ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml`
|
||||
- 新增异常账户基础 SQL、分页 SQL、导出 SQL
|
||||
- `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectRiskDetailWorkbookExporter.java`
|
||||
- 第 3 个 sheet 改为写出真实异常账户数据
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewControllerTest.java`
|
||||
- 覆盖新分页接口委托行为
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewControllerContractTest.java`
|
||||
- 覆盖新接口路径、注解与参数签名
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImplTest.java`
|
||||
- 更新统一导出测试,断言第 3 个 sheet 已传入真实数据
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectRiskDetailWorkbookExporterTest.java`
|
||||
- 更新工作簿导出断言,校验异常账户真实数据行
|
||||
- `ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapperSqlTest.java`
|
||||
- 覆盖异常账户分页与导出 SQL 口径
|
||||
- `docs/reports/implementation/2026-03-31-project-detail-risk-details-abnormal-account-backend-implementation.md`
|
||||
- 记录后端实施细节与验证结果
|
||||
|
||||
## Task 1: 锁定结果总览异常账户分页接口契约
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/dto/CcdiProjectAbnormalAccountQueryDTO.java`
|
||||
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectAbnormalAccountPageVO.java`
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewController.java`
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/ICcdiProjectOverviewService.java`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewControllerTest.java`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewControllerContractTest.java`
|
||||
|
||||
- [ ] **Step 1: 先写控制器契约测试**
|
||||
|
||||
在 `CcdiProjectOverviewControllerContractTest` 中新增对新方法的反射断言:
|
||||
|
||||
```java
|
||||
Method method = controllerClass.getMethod(
|
||||
"getAbnormalAccountPeople",
|
||||
CcdiProjectAbnormalAccountQueryDTO.class
|
||||
);
|
||||
GetMapping getMapping = method.getAnnotation(GetMapping.class);
|
||||
assertEquals("/abnormal-account-people", getMapping.value()[0]);
|
||||
```
|
||||
|
||||
同时断言:
|
||||
|
||||
- 存在 `@Operation(summary = "查询异常账户人员信息")`
|
||||
- 存在 `@PreAuthorize("@ss.hasPermi('ccdi:project:query')")`
|
||||
|
||||
- [ ] **Step 2: 再写控制器委托单测,先让它失败**
|
||||
|
||||
在 `CcdiProjectOverviewControllerTest` 中新增测试:
|
||||
|
||||
```java
|
||||
CcdiProjectAbnormalAccountQueryDTO queryDTO = new CcdiProjectAbnormalAccountQueryDTO();
|
||||
queryDTO.setProjectId(40L);
|
||||
when(overviewService.getAbnormalAccountPeople(queryDTO)).thenReturn(pageVO);
|
||||
|
||||
AjaxResult result = controller.getAbnormalAccountPeople(queryDTO);
|
||||
|
||||
verify(overviewService).getAbnormalAccountPeople(same(queryDTO));
|
||||
assertSame(pageVO, result.get("data"));
|
||||
```
|
||||
|
||||
- [ ] **Step 3: 运行控制器定向测试确认失败点正确**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -am \
|
||||
-Dsurefire.failIfNoSpecifiedTests=false \
|
||||
-Dtest=CcdiProjectOverviewControllerContractTest,CcdiProjectOverviewControllerTest \
|
||||
test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- FAIL,提示缺少 `getAbnormalAccountPeople` 方法、DTO 或服务接口方法
|
||||
|
||||
- [ ] **Step 4: 最小化补齐控制器与接口骨架**
|
||||
|
||||
1. 创建 `CcdiProjectAbnormalAccountQueryDTO`
|
||||
2. 创建 `CcdiProjectAbnormalAccountPageVO`
|
||||
3. 在 `ICcdiProjectOverviewService` 增加:
|
||||
|
||||
```java
|
||||
default CcdiProjectAbnormalAccountPageVO getAbnormalAccountPeople(
|
||||
CcdiProjectAbnormalAccountQueryDTO queryDTO
|
||||
) {
|
||||
return new CcdiProjectAbnormalAccountPageVO();
|
||||
}
|
||||
```
|
||||
|
||||
4. 在控制器中增加:
|
||||
|
||||
```java
|
||||
@GetMapping("/abnormal-account-people")
|
||||
@Operation(summary = "查询异常账户人员信息")
|
||||
@PreAuthorize("@ss.hasPermi('ccdi:project:query')")
|
||||
public AjaxResult getAbnormalAccountPeople(CcdiProjectAbnormalAccountQueryDTO queryDTO) {
|
||||
CcdiProjectAbnormalAccountPageVO pageVO = overviewService.getAbnormalAccountPeople(queryDTO);
|
||||
return AjaxResult.success(pageVO);
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 5: 重新运行控制器测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -am \
|
||||
-Dsurefire.failIfNoSpecifiedTests=false \
|
||||
-Dtest=CcdiProjectOverviewControllerContractTest,CcdiProjectOverviewControllerTest \
|
||||
test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS
|
||||
|
||||
- [ ] **Step 6: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/dto/CcdiProjectAbnormalAccountQueryDTO.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectAbnormalAccountPageVO.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewController.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/ICcdiProjectOverviewService.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewControllerContractTest.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/controller/CcdiProjectOverviewControllerTest.java
|
||||
git commit -m "补充异常账户人员查询接口契约"
|
||||
```
|
||||
|
||||
## Task 2: 补齐异常账户分页与导出 SQL 口径
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectAbnormalAccountItemVO.java`
|
||||
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/excel/CcdiProjectAbnormalAccountExcel.java`
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapper.java`
|
||||
- Modify: `ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapperSqlTest.java`
|
||||
|
||||
- [ ] **Step 1: 先写 Mapper SQL 测试**
|
||||
|
||||
在 `CcdiProjectOverviewMapperSqlTest` 中新增两个 select 断言:
|
||||
|
||||
```java
|
||||
String abnormalPageSql = extractSelect(xml, "selectAbnormalAccountPage");
|
||||
String abnormalExportSql = extractSelect(xml, "selectAbnormalAccountList");
|
||||
|
||||
assertTrue(abnormalPageSql.contains("tr.model_code = 'ABNORMAL_ACCOUNT'"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("tr.bank_statement_id is null"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("account.owner_type = 'EMPLOYEE'"), abnormalPageSql);
|
||||
assertTrue(abnormalPageSql.contains("tr.reason_detail"), abnormalPageSql);
|
||||
assertTrue(abnormalExportSql.contains("order by abnormal_time desc"), abnormalExportSql);
|
||||
```
|
||||
|
||||
同时补充对状态映射和规则时间字段的静态断言:
|
||||
|
||||
- `when account.status = 1 then '正常'`
|
||||
- `when account.status = 2 then '已销户'`
|
||||
- `when tr.rule_code = 'SUDDEN_ACCOUNT_CLOSURE'`
|
||||
- `when tr.rule_code = 'DORMANT_ACCOUNT_LARGE_ACTIVATION'`
|
||||
|
||||
- [ ] **Step 2: 运行 SQL 测试确认失败**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -am \
|
||||
-Dsurefire.failIfNoSpecifiedTests=false \
|
||||
-Dtest=CcdiProjectOverviewMapperSqlTest \
|
||||
test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- FAIL,提示缺少 `selectAbnormalAccountPage` 或 `selectAbnormalAccountList`
|
||||
|
||||
- [ ] **Step 3: 设计基础 SQL 片段,再补 Mapper 方法签名**
|
||||
|
||||
在 `CcdiProjectOverviewMapper.java` 中新增:
|
||||
|
||||
```java
|
||||
Page<CcdiProjectAbnormalAccountItemVO> selectAbnormalAccountPage(
|
||||
Page<CcdiProjectAbnormalAccountItemVO> page,
|
||||
@Param("query") CcdiProjectAbnormalAccountQueryDTO query
|
||||
);
|
||||
|
||||
List<CcdiProjectAbnormalAccountItemVO> selectAbnormalAccountList(@Param("projectId") Long projectId);
|
||||
```
|
||||
|
||||
在 XML 中先抽出基础 SQL 片段:
|
||||
|
||||
- `abnormalAccountBaseSql`
|
||||
- 统一负责:
|
||||
- 项目过滤
|
||||
- `ABNORMAL_ACCOUNT` 模型过滤
|
||||
- 对象型结果过滤
|
||||
- `owner_type = 'EMPLOYEE'`
|
||||
- 账号唯一关联
|
||||
- `账号 / 开户人 / 银行 / 异常类型 / 异常发生时间 / 状态` 映射
|
||||
|
||||
- [ ] **Step 4: 实现分页 SQL 与导出 SQL**
|
||||
|
||||
分页 SQL:
|
||||
|
||||
- 使用 `#{query.projectId}`
|
||||
- 按 `abnormal_time desc, account.account_no asc, tr.rule_code asc`
|
||||
|
||||
导出 SQL:
|
||||
|
||||
- 使用 `#{projectId}`
|
||||
- 与分页 SQL 保持同一列集合与同一排序规则
|
||||
|
||||
账号唯一关联要求:
|
||||
|
||||
- 优先通过 `tr.reason_detail` 中包含的账号匹配 `account.account_no`
|
||||
- 没有账号匹配条件时不要把员工名下全部账户笛卡尔展开
|
||||
|
||||
- [ ] **Step 5: 重新运行 SQL 测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -am \
|
||||
-Dsurefire.failIfNoSpecifiedTests=false \
|
||||
-Dtest=CcdiProjectOverviewMapperSqlTest \
|
||||
test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS
|
||||
|
||||
- [ ] **Step 6: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectAbnormalAccountItemVO.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/excel/CcdiProjectAbnormalAccountExcel.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapper.java \
|
||||
ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapperSqlTest.java
|
||||
git commit -m "补充异常账户人员查询SQL"
|
||||
```
|
||||
|
||||
## Task 3: 完成服务层分页映射与项目校验
|
||||
|
||||
**Files:**
|
||||
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/ICcdiProjectOverviewService.java`
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImpl.java`
|
||||
- Create: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceAbnormalAccountTest.java`
|
||||
|
||||
- [ ] **Step 1: 先写服务层失败测试**
|
||||
|
||||
在 `CcdiProjectOverviewServiceAbnormalAccountTest` 中新增 4 个测试:
|
||||
|
||||
1. 分页查询返回 `rows/total`
|
||||
2. 分页查询默认页码为 `1`、分页大小为 `5`
|
||||
3. 导出查询返回 `List<CcdiProjectAbnormalAccountExcel>`
|
||||
4. 项目不存在时,分页与导出都抛 `ServiceException`
|
||||
|
||||
核心断言示例:
|
||||
|
||||
```java
|
||||
CcdiProjectAbnormalAccountPageVO result = service.getAbnormalAccountPeople(queryDTO);
|
||||
assertEquals(1, result.getRows().size());
|
||||
assertEquals("突然销户", result.getRows().getFirst().getAbnormalType());
|
||||
verify(overviewMapper).selectAbnormalAccountPage(any(Page.class), any(CcdiProjectAbnormalAccountQueryDTO.class));
|
||||
```
|
||||
|
||||
- [ ] **Step 2: 跑服务层测试确认失败**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -am \
|
||||
-Dsurefire.failIfNoSpecifiedTests=false \
|
||||
-Dtest=CcdiProjectOverviewServiceAbnormalAccountTest \
|
||||
test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- FAIL,提示缺少服务方法、Mapper 调用或 Excel 映射
|
||||
|
||||
- [ ] **Step 3: 实现最小服务层逻辑**
|
||||
|
||||
在 `ICcdiProjectOverviewService` 中新增:
|
||||
|
||||
```java
|
||||
default List<CcdiProjectAbnormalAccountExcel> exportAbnormalAccountPeople(Long projectId) {
|
||||
return List.of();
|
||||
}
|
||||
```
|
||||
|
||||
在 `CcdiProjectOverviewServiceImpl` 中实现:
|
||||
|
||||
1. `getAbnormalAccountPeople(queryDTO)`
|
||||
2. `exportAbnormalAccountPeople(projectId)`
|
||||
3. `buildAbnormalAccountExcelRow(...)`
|
||||
|
||||
实现要求:
|
||||
|
||||
- 先 `ensureProjectExists(...)`
|
||||
- 分页默认值沿用现有结果总览风格
|
||||
- 页面 VO 和 Excel 行对象字段完全同构
|
||||
|
||||
- [ ] **Step 4: 重新运行服务层测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -am \
|
||||
-Dsurefire.failIfNoSpecifiedTests=false \
|
||||
-Dtest=CcdiProjectOverviewServiceAbnormalAccountTest \
|
||||
test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS
|
||||
|
||||
- [ ] **Step 5: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/ICcdiProjectOverviewService.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImpl.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceAbnormalAccountTest.java
|
||||
git commit -m "补充异常账户人员服务映射"
|
||||
```
|
||||
|
||||
## Task 4: 将异常账户真实数据接入统一导出工作簿
|
||||
|
||||
**Files:**
|
||||
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImpl.java`
|
||||
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectRiskDetailWorkbookExporter.java`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImplTest.java`
|
||||
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectRiskDetailWorkbookExporterTest.java`
|
||||
|
||||
- [ ] **Step 1: 先改统一导出测试,让它要求第 3 个 sheet 有真实数据**
|
||||
|
||||
在 `CcdiProjectRiskDetailWorkbookExporterTest` 中把原先“只有表头”改成:
|
||||
|
||||
```java
|
||||
CcdiProjectAbnormalAccountExcel abnormalRow = new CcdiProjectAbnormalAccountExcel();
|
||||
abnormalRow.setAccountNo("6222000000000001");
|
||||
abnormalRow.setAccountName("李四");
|
||||
abnormalRow.setBankName("中国农业银行");
|
||||
abnormalRow.setAbnormalType("突然销户");
|
||||
abnormalRow.setAbnormalTime("2026-03-20");
|
||||
abnormalRow.setStatus("已销户");
|
||||
```
|
||||
|
||||
断言:
|
||||
|
||||
- sheet 名仍为 `异常账户人员信息`
|
||||
- 第 1 行写出真实数据
|
||||
- 列顺序依次为:
|
||||
- `账号`
|
||||
- `开户人`
|
||||
- `银行`
|
||||
- `异常类型`
|
||||
- `异常发生时间`
|
||||
- `状态`
|
||||
|
||||
- [ ] **Step 2: 再改服务层统一导出测试**
|
||||
|
||||
在 `CcdiProjectOverviewServiceImplTest.shouldExportRiskDetailsWorkbook` 中增加异常账户 stub:
|
||||
|
||||
```java
|
||||
when(overviewMapper.selectAbnormalAccountList(40L)).thenReturn(List.of(abnormalItem));
|
||||
```
|
||||
|
||||
并把 `verify(workbookExporter).export(...)` 扩展为包含第 3 个参数列表断言。
|
||||
|
||||
- [ ] **Step 3: 运行导出相关测试确认失败**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -am \
|
||||
-Dsurefire.failIfNoSpecifiedTests=false \
|
||||
-Dtest=CcdiProjectOverviewServiceImplTest,CcdiProjectRiskDetailWorkbookExporterTest \
|
||||
test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- FAIL,提示导出器方法签名或第 3 个 sheet 断言不匹配
|
||||
|
||||
- [ ] **Step 4: 最小化修改导出器与服务**
|
||||
|
||||
1. 将 `CcdiProjectRiskDetailWorkbookExporter.export(...)` 方法签名扩为接收异常账户列表
|
||||
2. `writeAbnormalAccountSheet(...)` 从“只写表头”改成“表头 + 数据行”
|
||||
3. `CcdiProjectOverviewServiceImpl.exportRiskDetails(...)` 中查询 `exportAbnormalAccountPeople(projectId)`
|
||||
4. 调用导出器时一并传入异常账户列表
|
||||
|
||||
- [ ] **Step 5: 重新运行导出相关测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -am \
|
||||
-Dsurefire.failIfNoSpecifiedTests=false \
|
||||
-Dtest=CcdiProjectOverviewServiceImplTest,CcdiProjectRiskDetailWorkbookExporterTest \
|
||||
test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS
|
||||
|
||||
- [ ] **Step 6: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImpl.java \
|
||||
ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectRiskDetailWorkbookExporter.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImplTest.java \
|
||||
ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectRiskDetailWorkbookExporterTest.java
|
||||
git commit -m "补充风险明细异常账户统一导出"
|
||||
```
|
||||
|
||||
## Task 5: 记录实施结果并做最终回归
|
||||
|
||||
**Files:**
|
||||
|
||||
- Modify: `docs/reports/implementation/2026-03-31-project-detail-risk-details-abnormal-account-backend-implementation.md`
|
||||
|
||||
- [ ] **Step 1: 运行后端最终回归测试**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-project -am \
|
||||
-Dsurefire.failIfNoSpecifiedTests=false \
|
||||
-Dtest=CcdiProjectOverviewControllerContractTest,CcdiProjectOverviewControllerTest,CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewServiceAbnormalAccountTest,CcdiProjectOverviewServiceImplTest,CcdiProjectRiskDetailWorkbookExporterTest \
|
||||
test
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
- PASS
|
||||
|
||||
- [ ] **Step 2: 如需手工联调,启动后端并验证后立即关闭**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
mvn -pl ruoyi-admin -am package -DskipTests
|
||||
cd ruoyi-admin/target && java -jar ruoyi-admin.jar
|
||||
```
|
||||
|
||||
至少验证:
|
||||
|
||||
1. `GET /ccdi/project/overview/abnormal-account-people` 可返回 `rows/total`
|
||||
2. `POST /ccdi/project/overview/risk-details/export` 第 3 个 sheet 含真实异常账户数据
|
||||
|
||||
验证结束后必须关闭 `java -jar ruoyi-admin.jar` 进程。
|
||||
|
||||
- [ ] **Step 3: 编写后端实施记录**
|
||||
|
||||
在实施记录中写清:
|
||||
|
||||
- 新增接口路径
|
||||
- 新增 DTO/VO/Excel 对象
|
||||
- Mapper SQL 口径
|
||||
- 统一导出第 3 个 sheet 的真实化改动
|
||||
- 自动化测试命令与结果
|
||||
- 如有手工联调,记录启动与关闭进程情况
|
||||
|
||||
- [ ] **Step 4: 提交本任务**
|
||||
|
||||
```bash
|
||||
git add docs/reports/implementation/2026-03-31-project-detail-risk-details-abnormal-account-backend-implementation.md
|
||||
git commit -m "记录异常账户人员信息后端实施"
|
||||
```
|
||||
|
||||
## Final Verification
|
||||
|
||||
- [ ] `GET /ccdi/project/overview/abnormal-account-people` 返回字段完整:`accountNo/accountName/bankName/abnormalType/abnormalTime/status`
|
||||
- [ ] 页面查询与导出查询都只取 `ABNORMAL_ACCOUNT` 对象型结果
|
||||
- [ ] 第 3 个 sheet 不再是空白模板
|
||||
- [ ] 同一账号命中多条规则时保留多行
|
||||
- [ ] 如启动了后端进程,验证结束后已手动关闭
|
||||
@@ -0,0 +1,94 @@
|
||||
# 2026-04-14 后端运行与打包约定实施记录
|
||||
|
||||
## 1. 改动目标
|
||||
|
||||
- 固化本地后端继续走 `ruoyi-admin.jar + 内嵌 Tomcat` 启动链路
|
||||
- 固化 `mvn -pl ruoyi-admin -am package -DskipTests` 同时产出 `jar` 与 `war`
|
||||
- 固化部署脚本统一消费 `ruoyi-admin.war`
|
||||
- 固化 `bin/restart_java_backend.sh` 默认跟随后端日志,并支持 `FOLLOW_LOGS=false`
|
||||
|
||||
## 2. 实施内容
|
||||
|
||||
### 2.1 Maven 打包链路
|
||||
|
||||
涉及文件:
|
||||
|
||||
- `ruoyi-admin/pom.xml`
|
||||
|
||||
实施内容:
|
||||
|
||||
- 保持 `ruoyi-admin` 的 `<packaging>jar</packaging>` 不变,确保本地运行仍使用可执行 `jar`
|
||||
- 为 `maven-war-plugin` 增加 `package` 阶段显式执行 `war` 目标,确保执行 `mvn -pl ruoyi-admin -am package -DskipTests` 时额外生成 `ruoyi-admin.war`
|
||||
- 保留 `spring-boot-maven-plugin repackage`,继续生成可执行 `ruoyi-admin.jar`
|
||||
|
||||
### 2.2 本地后端重启脚本
|
||||
|
||||
涉及文件:
|
||||
|
||||
- `bin/restart_java_backend.sh`
|
||||
|
||||
实施内容:
|
||||
|
||||
- 新增 `FOLLOW_LOGS="${FOLLOW_LOGS:-true}"` 默认开关
|
||||
- `start`、`restart` 成功后默认执行 `tail -F` 持续输出后端日志
|
||||
- 当外部传入 `FOLLOW_LOGS=false` 时,仅启动后端,不进入日志跟随
|
||||
|
||||
### 2.3 部署产物切换
|
||||
|
||||
涉及文件:
|
||||
|
||||
- `deploy/deploy-to-nas.sh`
|
||||
- `deploy/deploy.ps1`
|
||||
- `docker/backend/Dockerfile`
|
||||
|
||||
实施内容:
|
||||
|
||||
- 部署目录组装时由复制 `ruoyi-admin.jar` 改为复制 `ruoyi-admin.war`
|
||||
- Docker 后端镜像改为消费 `ruoyi-admin.war`
|
||||
- 保证部署脚本不再把 `ruoyi-admin.jar` 当作生产部署产物
|
||||
|
||||
### 2.4 项目约定同步
|
||||
|
||||
涉及文件:
|
||||
|
||||
- `AGENTS.md`
|
||||
|
||||
实施内容:
|
||||
|
||||
- 补充本地运行、双产物打包、部署使用 `war`、`FOLLOW_LOGS` 开关等仓库级约定
|
||||
- 在 Build / Run / Test Commands 中补充主应用定向打包命令
|
||||
|
||||
## 3. 验证记录
|
||||
|
||||
### 3.1 脚本检查
|
||||
|
||||
执行:
|
||||
|
||||
```bash
|
||||
sh docs/tests/scripts/test-restart-java-backend.sh
|
||||
sh docs/tests/scripts/test-backend-package-and-deploy-conventions.sh
|
||||
```
|
||||
|
||||
结果:
|
||||
|
||||
- 两个脚本均通过
|
||||
|
||||
### 3.2 Maven 双产物验证
|
||||
|
||||
执行:
|
||||
|
||||
```bash
|
||||
mvn -pl ruoyi-admin -am package -DskipTests
|
||||
```
|
||||
|
||||
结果:
|
||||
|
||||
- 构建成功
|
||||
- 生成 `ruoyi-admin/target/ruoyi-admin.jar`
|
||||
- 生成 `ruoyi-admin/target/ruoyi-admin.war`
|
||||
|
||||
## 4. 结论
|
||||
|
||||
- 本地开发链路继续保持 `jar + 内嵌 Tomcat`
|
||||
- 部署链路统一切换为 `war`
|
||||
- 后端重启脚本默认跟日志,且支持显式关闭
|
||||
@@ -0,0 +1,62 @@
|
||||
# 2026-04-14 NAS TongWeb 部署脚本实施记录
|
||||
|
||||
## 1. 目标
|
||||
|
||||
- 新增一套独立于 Docker 的 NAS 部署脚本
|
||||
- 部署链路固定使用 `ruoyi-admin.war`
|
||||
- 远端通过 `TongWeb` 自动部署目录发布应用,并使用 `stopserver.sh` / `startservernohup.sh` 重启服务
|
||||
|
||||
## 2. 实施内容
|
||||
|
||||
### 2.1 新增 TongWeb NAS 部署入口
|
||||
|
||||
涉及文件:
|
||||
|
||||
- `deploy/deploy-to-nas-tongweb.sh`
|
||||
|
||||
实施内容:
|
||||
|
||||
- 提供与现有 NAS 脚本一致的 SSH 连接参数风格
|
||||
- 默认执行 `mvn -pl ruoyi-admin -am package -DskipTests`
|
||||
- 本地仅校验并上传 `ruoyi-admin/target/ruoyi-admin.war`
|
||||
- 支持 `--dry-run` 预览参数
|
||||
|
||||
### 2.2 新增 TongWeb 远端执行器
|
||||
|
||||
涉及文件:
|
||||
|
||||
- `deploy/remote-deploy-tongweb.py`
|
||||
|
||||
实施内容:
|
||||
|
||||
- 通过 SFTP 将 `war` 上传到 NAS 临时目录 `${remoteRoot}/backend/`
|
||||
- 远端复制 `war` 到 `${TONGWEB_HOME}/autodeploy/${appName}.war`
|
||||
- 清理 `${TONGWEB_HOME}/autodeploy/${appName}` 旧解压目录
|
||||
- 依次执行 `stopserver.sh`、`startservernohup.sh`
|
||||
|
||||
### 2.3 新增脚本回归测试
|
||||
|
||||
涉及文件:
|
||||
|
||||
- `tests/deploy/test_deploy_to_nas_tongweb.py`
|
||||
|
||||
实施内容:
|
||||
|
||||
- 覆盖默认参数 `dry-run`
|
||||
- 覆盖自定义参数 `dry-run`
|
||||
- 校验部署入口已调用 `remote-deploy-tongweb.py`
|
||||
- 校验远端执行器包含 `autodeploy`、`stopserver.sh`、`startservernohup.sh`
|
||||
|
||||
## 3. 验证命令
|
||||
|
||||
```bash
|
||||
python3 -m pytest tests/deploy/test_deploy_to_nas_tongweb.py -q
|
||||
bash -n deploy/deploy-to-nas-tongweb.sh
|
||||
bash deploy/deploy-to-nas-tongweb.sh --dry-run
|
||||
```
|
||||
|
||||
## 4. 说明
|
||||
|
||||
- 默认 `TongWebHome` 取 `/opt/TongWeb`,可通过第 6 个位置参数或环境变量 `TONGWEB_HOME` 覆盖
|
||||
- 默认应用名为 `ruoyi-admin`,可通过第 7 个位置参数或环境变量 `APP_NAME` 覆盖
|
||||
- 本次只新增 `TongWeb` 后端部署链路,不改动现有 Docker NAS 部署脚本
|
||||
@@ -0,0 +1,26 @@
|
||||
# LSFX Mock Server `ccdi_account_info` 异常账户字段补迁移后端实施文档
|
||||
|
||||
## 背景
|
||||
|
||||
- `lsfx-mock-server` 上传接口 `/watson/api/project/remoteUploadSplitFile` 在写入 `ccdi_account_info` 时使用了 `is_self_account`、`trans_risk_level` 字段。
|
||||
- 当前开发库中的 `ccdi_account_info` 为历史表结构,不包含这两列,导致 `AbnormalAccountBaselineService.apply(...)` 执行 upsert 时依次抛出 `Unknown column 'is_self_account' in 'field list'`、`Unknown column 'trans_risk_level' in 'field list'`,上传接口直接返回 500。
|
||||
|
||||
## 本次修改
|
||||
|
||||
- 新增增量脚本 `sql/migration/2026-04-15-sync-ccdi-account-info-abnormal-account-columns.sql`。
|
||||
- 脚本以最短路径为已有 `ccdi_account_info` 表补齐异常账户同步当前必需的字段,并保持可重复执行:
|
||||
- 使用 `information_schema.columns` 判断字段是否已存在
|
||||
- 通过 `PREPARE / EXECUTE` 仅在缺列时执行 `ALTER TABLE`
|
||||
- 补齐 `is_self_account` 与 `trans_risk_level`
|
||||
- 列位置与当前写库 SQL 保持一致
|
||||
- 新增回归测试 `lsfx-mock-server/tests/test_schema_migration_scripts.py`,锁定该增量脚本必须存在且包含两条补列语句。
|
||||
|
||||
## 验证
|
||||
|
||||
- `python3 -m pytest /Users/wkc/Desktop/ccdi/ccdi/lsfx-mock-server/tests/test_schema_migration_scripts.py -q`
|
||||
- 使用 `bin/mysql_utf8_exec.sh` 执行增量脚本后,复查 `SHOW COLUMNS FROM ccdi_account_info`,确认存在 `is_self_account`、`trans_risk_level` 字段。
|
||||
|
||||
## 影响范围
|
||||
|
||||
- 仅影响 `lsfx-mock-server` 依赖的 `ccdi_account_info` 历史表结构补齐。
|
||||
- 不修改接口协议,不改动前端。
|
||||
@@ -0,0 +1,53 @@
|
||||
# 账户库双表合单表后端实施计划
|
||||
|
||||
## 1. 目标
|
||||
|
||||
将账户库由 `ccdi_account_info` + `ccdi_account_result` 双表结构收敛为单表 `ccdi_account_info`,迁移完成后删除旧表,同时保持现有账户库接口、字段名和前端交互不变。
|
||||
|
||||
## 2. 实施范围
|
||||
|
||||
- 数据库增量迁移脚本
|
||||
- 账户库后端实体、Mapper XML、服务层
|
||||
- 外部场景种子脚本
|
||||
- 账户库相关回归测试
|
||||
|
||||
本次不调整前端页面、接口路径和接口字段名。
|
||||
|
||||
## 3. 实施步骤
|
||||
|
||||
### 3.1 数据库迁移
|
||||
|
||||
1. 新增 `sql/migration/2026-04-16-merge-ccdi-account-result-into-info.sql`
|
||||
2. 在脚本中先校验 `ccdi_account_info.account_no` 无重复
|
||||
3. 为 `ccdi_account_info` 补齐分析字段
|
||||
4. 按 `account_no` 从 `ccdi_account_result` 回填数据
|
||||
5. 回填完成后删除 `ccdi_account_result`
|
||||
|
||||
### 3.2 后端代码调整
|
||||
|
||||
1. `CcdiAccountInfo` 实体吸收分析字段映射
|
||||
2. 删除 `CcdiAccountResult` 实体与 `CcdiAccountResultMapper`
|
||||
3. `CcdiAccountInfoMapper.xml` 去掉对 `ccdi_account_result` 的联表
|
||||
4. `CcdiAccountInfoServiceImpl` 去掉结果表双写逻辑
|
||||
5. 保持原有业务语义:
|
||||
- `bankScope = EXTERNAL` 时补齐默认分析字段
|
||||
- `bankScope != EXTERNAL` 时清空分析字段,避免误写
|
||||
|
||||
### 3.3 配套脚本与测试
|
||||
|
||||
1. 将 `2026-04-13` 外部账户场景种子脚本改为单表写入
|
||||
2. 新增 SQL 脚本文本断言测试
|
||||
3. 新增账户库服务层与 Mapper SQL 结构测试
|
||||
|
||||
## 4. 验证要点
|
||||
|
||||
- 迁移脚本包含“补字段、回填、删旧表”三步
|
||||
- 账户库列表/详情/导出查询均只读 `ccdi_account_info`
|
||||
- 行外账户保存分析字段
|
||||
- 行内账户清空分析字段
|
||||
- 外部场景种子脚本不再写入 `ccdi_account_result`
|
||||
|
||||
## 5. 风险说明
|
||||
|
||||
- 仓库当前 `ccdi-info-collection` 模块存在既有依赖缺失问题,可能影响常规 Maven 全量编译与测试执行
|
||||
- 本次需要将“账户库改动验证结果”和“仓库原有构建阻塞”分开记录
|
||||
@@ -0,0 +1,83 @@
|
||||
# 员工基础信息新增是否党员字段后端实施计划
|
||||
|
||||
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** 在员工基础信息后端链路中新增“是否党员”字段,保证数据库、实体、接口、导入导出与最小测试契约保持一致。
|
||||
|
||||
**Architecture:** 继续沿用 `ccdi_base_staff` 现有维护链路,在表上增加 `is_party_member` 字段,并在 `CcdiBaseStaff` 的实体、DTO、VO、Excel、Mapper XML 与服务校验中同步补齐。实现保持最短路径,不新增新的接口层或旁路转换逻辑,只在现有员工维护链路上扩字段。
|
||||
|
||||
**Tech Stack:** MySQL, Java 21, Spring Boot 3, MyBatis Plus, EasyExcel, JUnit 5, Markdown
|
||||
|
||||
---
|
||||
|
||||
## 文件结构与职责
|
||||
|
||||
**后端源码**
|
||||
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/CcdiBaseStaff.java`
|
||||
新增 `partyMember` 字段并映射 `is_party_member`。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/dto/CcdiBaseStaffAddDTO.java`
|
||||
新增新增接口入参字段与非空校验。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/dto/CcdiBaseStaffEditDTO.java`
|
||||
新增编辑接口入参字段与非空校验。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/CcdiBaseStaffVO.java`
|
||||
新增详情/列表返回字段。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/excel/CcdiBaseStaffExcel.java`
|
||||
新增 Excel 导入导出列,并挂接“是/否”字典下拉。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/ImportFailureVO.java`
|
||||
新增导入失败记录字段回显。
|
||||
- `ccdi-info-collection/src/main/resources/mapper/info/collection/CcdiBaseStaffMapper.xml`
|
||||
在列表查询、批量新增、批量更新 SQL 中补 `is_party_member`。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiBaseStaffServiceImpl.java`
|
||||
补新增/编辑链路校验,约束 `partyMember` 只能为 `0/1`。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiBaseStaffImportServiceImpl.java`
|
||||
补导入链路的必填和枚举值校验。
|
||||
|
||||
**SQL**
|
||||
|
||||
- `sql/migration/2026-04-17-add-base-staff-party-member.sql`
|
||||
以幂等方式为 `ccdi_base_staff` 增加字段,并补充 `ccdi_yes_no_flag` 字典数据。
|
||||
- `sql/ccdi_yes_no_flag_dict.sql`
|
||||
提供“是/否标记”字典初始化脚本,供新环境或独立初始化使用。
|
||||
|
||||
**测试**
|
||||
|
||||
- `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiBaseStaffServiceImplTest.java`
|
||||
验证服务层新增/修改/详情查询会透传 `partyMember`。
|
||||
- `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiBaseStaffImportServiceImplTest.java`
|
||||
验证导入场景仅允许 `0/1`。
|
||||
- `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/mapper/CcdiBaseStaffMapperTest.java`
|
||||
验证 Mapper XML 已包含 `is_party_member` 与 `#{item.partyMember}`。
|
||||
- `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/utils/EasyExcelUtilTemplateTest.java`
|
||||
验证员工模板已补“是否党员”下拉列。
|
||||
|
||||
## 实施步骤
|
||||
|
||||
- [ ] 在 `ccdi_base_staff` 表增加 `is_party_member`,默认值为 `0`,避免历史数据为空。
|
||||
- [ ] 在员工基础信息实体、DTO、VO、Excel 对象中补齐 `partyMember`。
|
||||
- [ ] 在 `CcdiBaseStaffMapper.xml` 的列表、批量新增、批量更新 SQL 中补 `is_party_member`。
|
||||
- [ ] 在 `CcdiBaseStaffServiceImpl` 与 `CcdiBaseStaffImportServiceImpl` 中增加 `0/1` 值域校验。
|
||||
- [ ] 新增 `ccdi_yes_no_flag` 字典脚本,保证导入模板下拉可用。
|
||||
- [ ] 补充并执行后端定向测试;若执行受现有依赖问题阻塞,需要在记录中明确注明阻塞原因。
|
||||
|
||||
## 验证记录
|
||||
|
||||
- 已尝试执行:
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-info-collection -Dtest=CcdiBaseStaffServiceImplTest,CcdiBaseStaffImportServiceImplTest,CcdiBaseStaffMapperTest,EasyExcelUtilTemplateTest test
|
||||
mvn -pl ccdi-info-collection -DskipTests compile
|
||||
```
|
||||
|
||||
- 当前结果:
|
||||
- `test` 在进入本次新增断言前,被模块内既有测试编译问题拦截,表现为缺少 `org.springframework.data.redis.core.*` 类型。
|
||||
- `compile` 被模块当前既有依赖缺失拦截,表现为缺少 `com.ruoyi.common.annotation.*`、`org.springframework.data.redis.core.*`、`IdCardUtil` 等类型。
|
||||
- 上述阻塞不是本次“是否党员”字段新增引入的新问题,但会影响自动化验证结论,需要后续先修复模块依赖基线。
|
||||
|
||||
## 完成标准
|
||||
|
||||
- 员工基础信息接口可读写 `partyMember`
|
||||
- 员工列表、详情、导入导出链路都包含 `partyMember`
|
||||
- 数据库字段与字典 SQL 已补齐
|
||||
- 后端测试契约已同步更新
|
||||
- 已明确记录当前自动化验证阻塞点
|
||||
@@ -0,0 +1,22 @@
|
||||
# 员工信息仅展示本人资产后端实施计划
|
||||
|
||||
## 变更目标
|
||||
|
||||
- 员工详情接口返回的 `assetInfoList` 仅包含员工本人资产
|
||||
- 员工信息页不再通过详情接口混入亲属资产数据
|
||||
|
||||
## 变更范围
|
||||
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiBaseStaffServiceImpl.java`
|
||||
- `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiBaseStaffServiceImplTest.java`
|
||||
|
||||
## 实施步骤
|
||||
|
||||
1. 将员工详情聚合资产的查询口径从 `family_id = 员工身份证号` 调整为 `family_id = 员工身份证号 and person_id = 员工身份证号`
|
||||
2. 保持员工新增、编辑时的 `replaceByFamilyId` 逻辑不变,继续由后端写入本人资产
|
||||
3. 调整单元测试,验证员工详情仅返回本人资产
|
||||
|
||||
## 验证要点
|
||||
|
||||
- 查询员工详情时,`assetInfoList` 不再返回亲属资产
|
||||
- 现有员工新增、编辑、删除链路不受影响
|
||||
@@ -0,0 +1,188 @@
|
||||
# 实体库管理后端实施计划
|
||||
|
||||
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** 新增独立的实体库管理后端链路,基于 `ccdi_enterprise_base_info` 支持分页查询、详情、新增、编辑、删除、异步导入、导入状态查询和失败记录查询。
|
||||
|
||||
**Architecture:** 复用现有 `CcdiEnterpriseBaseInfo` 实体与 Mapper,新增独立的 Controller、Service、DTO、VO、Excel 与导入服务,接口风格与员工信息维护保持一致。`riskLevel`、`entSource`、`dataSource` 统一通过枚举接口对外提供选项,导入采用严格新增策略,数据库重复和 Excel 内重复统一记为失败。
|
||||
|
||||
**Tech Stack:** Java 21, Spring Boot 3, MyBatis Plus, Redis, EasyExcel, JUnit 5, MySQL, Markdown
|
||||
|
||||
---
|
||||
|
||||
## 文件结构与职责
|
||||
|
||||
**后端源码**
|
||||
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiEnterpriseBaseInfoController.java`
|
||||
实体库管理对外接口入口,提供 CRUD、导入模板、导入任务状态和失败记录查询。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/ICcdiEnterpriseBaseInfoService.java`
|
||||
定义实体库管理服务接口。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/ICcdiEnterpriseBaseInfoImportService.java`
|
||||
定义异步导入状态与失败记录查询接口。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiEnterpriseBaseInfoServiceImpl.java`
|
||||
承接分页查询、详情、增删改和导入任务提交。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiEnterpriseBaseInfoImportServiceImpl.java`
|
||||
处理异步导入、校验、失败记录落 Redis 与状态回写。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/dto/CcdiEnterpriseBaseInfoQueryDTO.java`
|
||||
定义查询条件。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/dto/CcdiEnterpriseBaseInfoAddDTO.java`
|
||||
定义新增入参和校验规则。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/dto/CcdiEnterpriseBaseInfoEditDTO.java`
|
||||
定义编辑入参与主键不可变约束。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/CcdiEnterpriseBaseInfoVO.java`
|
||||
承接列表和详情返回。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/EnterpriseBaseInfoImportFailureVO.java`
|
||||
承接导入失败记录回显。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/excel/CcdiEnterpriseBaseInfoExcel.java`
|
||||
定义导入模板列、导入字段和字典下拉。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/enums/EnterpriseRiskLevel.java`
|
||||
新增实体风险等级枚举。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/enums/EnterpriseSource.java`
|
||||
新增企业来源枚举。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiEnumController.java`
|
||||
新增风险等级与企业来源选项接口。
|
||||
- `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/mapper/CcdiEnterpriseBaseInfoMapper.java`
|
||||
补充分页查询与批量导入方法声明。
|
||||
- `ccdi-info-collection/src/main/resources/mapper/info/collection/CcdiEnterpriseBaseInfoMapper.xml`
|
||||
补充分页查询、结果映射、批量插入 SQL。
|
||||
|
||||
**SQL**
|
||||
|
||||
- `sql/migration/2026-04-17-add-enterprise-base-info-menu.sql`
|
||||
新增“实体库管理”菜单和功能权限。
|
||||
- `sql/migration/2026-04-17-add-enterprise-base-info-dict-or-enum-seed.sql`
|
||||
如需初始化与导入模板一致的固定值说明,可在脚本中补充注释性或字典性数据;若最终走纯枚举接口,则只保留菜单 SQL。
|
||||
|
||||
**测试**
|
||||
|
||||
- `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiEnterpriseBaseInfoServiceImplTest.java`
|
||||
校验新增、编辑、删除、详情和枚举值校验。
|
||||
- `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiEnterpriseBaseInfoImportServiceImplTest.java`
|
||||
校验导入重复失败、Excel 内重复失败和状态回写。
|
||||
- `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/mapper/CcdiEnterpriseBaseInfoMapperTest.java`
|
||||
校验分页 SQL 和结果映射关键片段。
|
||||
- `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/controller/CcdiEnumControllerTest.java`
|
||||
校验新增的枚举选项接口。
|
||||
|
||||
## 实施任务
|
||||
|
||||
### Task 1: 搭建实体库管理 DTO / VO / Excel 契约
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/dto/CcdiEnterpriseBaseInfoQueryDTO.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/dto/CcdiEnterpriseBaseInfoAddDTO.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/dto/CcdiEnterpriseBaseInfoEditDTO.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/CcdiEnterpriseBaseInfoVO.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/vo/EnterpriseBaseInfoImportFailureVO.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/excel/CcdiEnterpriseBaseInfoExcel.java`
|
||||
- Reference: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/CcdiEnterpriseBaseInfo.java`
|
||||
- Reference: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/domain/excel/CcdiIntermediaryEntityExcel.java`
|
||||
|
||||
- [x] 定义 QueryDTO,包含 `enterpriseName`、`socialCreditCode`、`enterpriseType`、`enterpriseNature`、`industryClass`、`status`、`riskLevel`、`entSource`。
|
||||
- [x] 定义 AddDTO / EditDTO,完整覆盖单表维护字段,并给 `socialCreditCode`、`enterpriseName`、`status`、`riskLevel`、`entSource`、`dataSource` 加基础校验。
|
||||
- [x] 在 EditDTO 中保持主键为必填,不新增改主键语义字段。
|
||||
- [x] 定义 VO,补齐列表和详情所需字段,并预留 `createTime` 供前端表格展示。
|
||||
- [x] 定义 Excel 对象,列顺序与页面表单一致,并为 `enterpriseType`、`enterpriseNature`、`legalCertType` 使用现有字典下拉;`riskLevel`、`entSource`、`dataSource` 保持文本列,后续由导入服务做枚举校验。
|
||||
|
||||
### Task 2: 补齐风险等级与企业来源枚举出口
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/enums/EnterpriseRiskLevel.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/enums/EnterpriseSource.java`
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiEnumController.java`
|
||||
|
||||
- [x] 新增 `EnterpriseRiskLevel` 枚举,口径固定为 `1/2/3` 对应高/中/低风险,并提供 `getCode()`、`getDesc()`、`getDescByCode()`、`contains()`。
|
||||
- [x] 新增 `EnterpriseSource` 枚举,口径固定为 `GENERAL`、`EMP_RELATION`、`CREDIT_CUSTOMER`、`INTERMEDIARY`、`BOTH`,并提供与现有 `DataSource` 一致的方法。
|
||||
- [x] 在 `CcdiEnumController` 中新增 `/enterpriseRiskLevel` 与 `/enterpriseSource` 两个接口,返回 `EnumOptionVO` 列表。
|
||||
- [x] 保持现有 `/dataSource` 不变,避免前端重复造轮子。
|
||||
|
||||
### Task 3: 实现分页查询与 CRUD 服务链路
|
||||
|
||||
**Files:**
|
||||
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/mapper/CcdiEnterpriseBaseInfoMapper.java`
|
||||
- Modify: `ccdi-info-collection/src/main/resources/mapper/info/collection/CcdiEnterpriseBaseInfoMapper.xml`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/ICcdiEnterpriseBaseInfoService.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiEnterpriseBaseInfoServiceImpl.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiEnterpriseBaseInfoController.java`
|
||||
- Reference: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiBaseStaffController.java`
|
||||
- Reference: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiBaseStaffServiceImpl.java`
|
||||
|
||||
- [x] 在 Mapper 中新增分页查询方法,按 QueryDTO 动态拼装筛选条件。
|
||||
- [x] 在 Mapper XML 中新增结果映射和 `selectEnterpriseBaseInfoPage` SQL,输出 `create_time`、`risk_level`、`ent_source`、`data_source` 等字段。
|
||||
- [x] 在 Service 接口中定义分页、详情、新增、编辑、删除、导出列表、导入任务提交方法。
|
||||
- [x] 在 ServiceImpl 中实现主键唯一校验、编辑存在性校验、枚举值校验和删除批量处理。
|
||||
- [x] 新建 Controller,接口路径统一使用 `/ccdi/enterpriseBaseInfo`,返回风格完全对齐员工信息维护。
|
||||
- [x] 导入模板下载复用 `EasyExcelUtil.importTemplateWithDictDropdown`。
|
||||
|
||||
### Task 4: 实现异步导入与失败记录查询
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/ICcdiEnterpriseBaseInfoImportService.java`
|
||||
- Create: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiEnterpriseBaseInfoImportServiceImpl.java`
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiEnterpriseBaseInfoServiceImpl.java`
|
||||
- Modify: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/controller/CcdiEnterpriseBaseInfoController.java`
|
||||
- Reference: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiBaseStaffImportServiceImpl.java`
|
||||
- Reference: `ccdi-info-collection/src/main/java/com/ruoyi/info/collection/service/impl/CcdiIntermediaryEntityImportServiceImpl.java`
|
||||
|
||||
- [x] 在 ServiceImpl 中提交导入任务,Redis key 建议使用 `import:enterpriseBaseInfo:{taskId}`。
|
||||
- [x] 在导入服务中实现 Excel 行校验,数据库重复与 Excel 内重复统一生成失败记录。
|
||||
- [x] 新增 `riskLevel`、`entSource`、`dataSource` 枚举校验,拒绝非法值。
|
||||
- [x] 成功记录分批批量插入;导入不支持更新,不提供 `updateSupport` 分支逻辑。
|
||||
- [x] 失败记录写入 Redis,状态统一支持 `PROCESSING`、`SUCCESS`、`PARTIAL_SUCCESS`。
|
||||
- [x] Controller 补 `/importData`、`/importStatus/{taskId}`、`/importFailures/{taskId}` 三个接口,分页失败记录方式与员工信息维护一致。
|
||||
|
||||
### Task 5: 补菜单 SQL 和权限口径
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `sql/migration/2026-04-17-add-enterprise-base-info-menu.sql`
|
||||
- Reference: `sql/ccdi_staff_fmy_relation_menu.sql`
|
||||
- Reference: `sql/migration/2026-04-13-add-ccdi-account-info-menu.sql`
|
||||
|
||||
- [x] 在“信息维护”目录下新增“实体库管理”菜单。
|
||||
- [x] 菜单 path 固定为 `enterpriseBaseInfo`,component 固定为 `ccdiEnterpriseBaseInfo/index`。
|
||||
- [x] 功能权限至少包含 `list`、`query`、`add`、`edit`、`remove`、`import`。
|
||||
- [x] SQL 保持幂等写法,避免重复插入菜单。
|
||||
|
||||
### Task 6: 补后端测试与验证命令
|
||||
|
||||
**Files:**
|
||||
|
||||
- Create: `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiEnterpriseBaseInfoServiceImplTest.java`
|
||||
- Create: `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/service/CcdiEnterpriseBaseInfoImportServiceImplTest.java`
|
||||
- Create: `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/mapper/CcdiEnterpriseBaseInfoMapperTest.java`
|
||||
- Create: `ccdi-info-collection/src/test/java/com/ruoyi/info/collection/controller/CcdiEnumControllerTest.java`
|
||||
|
||||
- [x] 为新增、编辑、删除、详情和枚举值校验补服务测试。
|
||||
- [x] 为导入数据库重复失败、Excel 内重复失败补导入服务测试。
|
||||
- [x] 为 Mapper XML 的分页查询关键 SQL 补测试或最小断言。
|
||||
- [x] 为新增枚举接口补 Controller 测试。
|
||||
- [x] 执行后端验证命令并记录结果。
|
||||
|
||||
## 验证命令
|
||||
|
||||
```bash
|
||||
mvn -pl ccdi-info-collection -Dtest=CcdiEnterpriseBaseInfoServiceImplTest,CcdiEnterpriseBaseInfoImportServiceImplTest,CcdiEnterpriseBaseInfoMapperTest,CcdiEnumControllerTest test
|
||||
mvn -pl ccdi-info-collection -DskipTests compile
|
||||
```
|
||||
|
||||
## 执行结果
|
||||
|
||||
- 实际测试命令:`mvn -pl ccdi-info-collection -am -Dsurefire.failIfNoSpecifiedTests=false -Dtest=CcdiEnterpriseBaseInfoServiceImplTest,CcdiEnterpriseBaseInfoImportServiceImplTest,CcdiEnterpriseBaseInfoMapperTest,CcdiEnumControllerTest test`
|
||||
- 测试结果:`BUILD SUCCESS`,共执行 11 个测试,`Failures: 0, Errors: 0, Skipped: 0`
|
||||
- 实际编译命令:`mvn -pl ccdi-info-collection -am -DskipTests compile`
|
||||
- 编译结果:`BUILD SUCCESS`
|
||||
|
||||
## 完成标准
|
||||
|
||||
- 后端新增独立 `/ccdi/enterpriseBaseInfo` 管理接口
|
||||
- 列表、详情、新增、编辑、删除链路可用
|
||||
- 导入严格新增,数据库重复与 Excel 内重复都进入失败记录
|
||||
- `riskLevel`、`entSource`、`dataSource` 均有统一选项口径
|
||||
- 菜单 SQL 与权限标识已补齐
|
||||
- 后端定向测试与编译验证已执行并记录结果
|
||||
@@ -0,0 +1,23 @@
|
||||
# 修复员工资产信息表注释乱码后端实施计划
|
||||
|
||||
## 变更目标
|
||||
|
||||
- 修复 `ccdi_asset_info` 表中 `family_id`、`person_id` 列注释乱码问题
|
||||
- 保持表结构、字段类型和业务数据不变,仅修正元数据注释
|
||||
|
||||
## 变更范围
|
||||
|
||||
- `sql/migration/2026-04-17-fix-ccdi-asset-info-comment-encoding.sql`
|
||||
|
||||
## 实施步骤
|
||||
|
||||
1. 查询 `information_schema.COLUMNS`,确认 `ccdi_asset_info` 列注释实际存在乱码
|
||||
2. 新增增量 SQL,使用 `ALTER TABLE ... MODIFY COLUMN ... COMMENT` 修复 `family_id`、`person_id` 注释
|
||||
3. 通过 `bin/mysql_utf8_exec.sh` 以 `utf8mb4` 会话执行脚本
|
||||
4. 再次查询 `information_schema.COLUMNS` 验证注释已恢复为中文
|
||||
|
||||
## 验证要点
|
||||
|
||||
- `family_id` 注释显示为“归属员工证件号”
|
||||
- `person_id` 注释显示为“资产实际持有人证件号”
|
||||
- 字段类型仍为 `VARCHAR(100) NOT NULL`
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user