Compare commits

...

31 Commits

Author SHA1 Message Date
wkc
ca591ebd3c 补充全量打标规则验证与未命中追因记录 2026-03-23 09:08:14 +08:00
wkc
e3dcc65780 补充LSFX Mock启动说明文档 2026-03-22 17:04:35 +08:00
wkc
8a18ccc5bf Merge branch 'codex/lsfx-rule-hit-mode-backend' into dev 2026-03-22 16:20:58 +08:00
wkc
26ec386394 补充Mock命中模式后端实施与验证记录 2026-03-22 16:18:53 +08:00
wkc
6bfe7f83f2 补充Mock全部兼容规则命中计划 2026-03-22 16:14:38 +08:00
wkc
e6809c67fe 补充Mock规则命中模式启动参数 2026-03-22 12:59:12 +08:00
wkc
07ed6c76c5 补充 lsfx 命中模式前后端实施计划 2026-03-22 12:42:44 +08:00
wkc
ad88dc4c47 补充 lsfx 命中模式切换设计文档 2026-03-22 12:40:03 +08:00
wkc
dda1da78db 补充结果总览员工结果表前端验证记录 2026-03-22 12:07:46 +08:00
wkc
d619a7a34c 补充结果总览前端展示回归保护 2026-03-22 12:06:42 +08:00
wkc
44c782c1e2 锁定结果总览前端接口契约 2026-03-22 12:06:38 +08:00
wkc
69dfe92ce8 Merge branch 'codex/results-overview-employee-result-table' into dev 2026-03-22 12:01:12 +08:00
wkc
644485c4a2 补充结果总览员工结果表后端实施记录 2026-03-22 11:54:35 +08:00
wkc
ef106169dc 切换结果总览查询到员工结果表 2026-03-22 11:52:09 +08:00
wkc
ef0eb2dc72 Merge branch 'codex/lsfx-mock-phase2-random-hit' into dev 2026-03-22 11:48:23 +08:00
wkc
cc209f04e2 完成LSFX Mock第二期稳定随机命中后端实施 2026-03-22 11:48:22 +08:00
wkc
f539c4ba27 接入结果总览员工结果同步重算 2026-03-22 11:47:37 +08:00
wkc
0a58ac3251 实现结果总览员工结果聚合构建 2026-03-22 11:45:26 +08:00
wkc
ec006f202b 新增结果总览员工结果表结构 2026-03-22 11:40:46 +08:00
wkc
4c9ffb3fde 新增结果总览员工结果表实施计划 2026-03-20 17:14:12 +08:00
wkc
f2d1ad912f 新增结果总览员工结果表设计文档 2026-03-20 17:09:09 +08:00
wkc
1958a00ce6 修复银行流水批量入库MySQL1869异常 2026-03-20 17:06:18 +08:00
wkc
4e4af5d9fb 补充LSFX Mock第二期稳定随机命中实施计划 2026-03-20 17:00:50 +08:00
wkc
782e41c0f0 补充LSFX Mock第二期稳定随机命中设计文档 2026-03-20 16:56:44 +08:00
wkc
1d89e9e77a 合并第二期流水真实规则后端修复 2026-03-20 16:32:14 +08:00
wkc
a0e8057d70 修复第二期流水规则元数据 2026-03-20 16:31:58 +08:00
wkc
085e2e7e35 Merge branch 'dev' into codex/bank-tag-real-rule-phase2-backend 2026-03-20 16:28:47 +08:00
wkc
dcf4ea603a 补充第二期流水模型后端实施记录 2026-03-20 15:00:29 +08:00
wkc
f270d79502 实现第二期对象聚合规则真实SQL 2026-03-20 14:56:15 +08:00
wkc
9451bbcc10 实现第二期资产比对规则真实SQL 2026-03-20 14:49:28 +08:00
wkc
6d0c6c2abf 补齐第二期流水模型参数映射 2026-03-20 14:45:39 +08:00
83 changed files with 6953 additions and 160 deletions

View File

@@ -0,0 +1,64 @@
package com.ruoyi.ccdi.project.domain.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.io.Serial;
import java.io.Serializable;
import java.util.Date;
/**
* 结果总览员工结果实体
*/
@Data
@TableName("ccdi_project_overview_employee_result")
public class CcdiProjectOverviewEmployeeResult implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
@TableId(type = IdType.AUTO)
private Long id;
private Long projectId;
private String staffIdCard;
private String staffCode;
private String staffName;
private Long deptId;
private String deptName;
private Integer ruleCount;
private Integer modelCount;
private Integer hitCount;
private String riskLevelCode;
private String riskPoint;
private String modelCodesCsv;
private String modelNamesJson;
private String hitRulesJson;
private String modelHitSummaryJson;
private String createBy;
private Date createTime;
private String updateBy;
private Date updateTime;
private String remark;
}

View File

@@ -0,0 +1,32 @@
package com.ruoyi.ccdi.project.domain.vo;
import lombok.Data;
/**
* 结果总览员工命中明细行
*/
@Data
public class CcdiProjectOverviewEmployeeHitRowVO {
private Long projectId;
private String staffIdCard;
private String staffName;
private String staffCode;
private Long deptId;
private String deptName;
private String modelCode;
private String modelName;
private String ruleCode;
private String ruleName;
private String riskLevel;
}

View File

@@ -0,0 +1,16 @@
package com.ruoyi.ccdi.project.domain.vo;
import lombok.Data;
/**
* 结果总览员工模型汇总
*/
@Data
public class CcdiProjectOverviewEmployeeModelSummaryVO {
private String modelCode;
private String modelName;
private Integer warningCount;
}

View File

@@ -0,0 +1,20 @@
package com.ruoyi.ccdi.project.domain.vo;
import lombok.Data;
/**
* 结果总览员工规则汇总
*/
@Data
public class CcdiProjectOverviewEmployeeRuleSummaryVO {
private String modelCode;
private String ruleCode;
private String ruleName;
private String riskLevel;
private Integer warningCount;
}

View File

@@ -110,9 +110,13 @@ public interface CcdiBankTagAnalysisMapper {
* 疑似赌博交易 * 疑似赌博交易
* *
* @param projectId 项目ID * @param projectId 项目ID
* @param amountMinThreshold 可疑金额下限
* @param amountMaxThreshold 可疑金额上限
* @return 对象命中结果 * @return 对象命中结果
*/ */
List<BankTagObjectHitVO> selectMultiPartyGamblingTransferObjects(@Param("projectId") Long projectId); List<BankTagObjectHitVO> selectMultiPartyGamblingTransferObjects(@Param("projectId") Long projectId,
@Param("amountMinThreshold") BigDecimal amountMinThreshold,
@Param("amountMaxThreshold") BigDecimal amountMaxThreshold);
/** /**
* 疑似敏感交易 * 疑似敏感交易
@@ -134,17 +138,23 @@ public interface CcdiBankTagAnalysisMapper {
* 月度固定收入疑似兼职 * 月度固定收入疑似兼职
* *
* @param projectId 项目ID * @param projectId 项目ID
* @param threshold 月度固定收入阈值
* @return 对象命中结果 * @return 对象命中结果
*/ */
List<BankTagObjectHitVO> selectMonthlyFixedIncomeObjects(@Param("projectId") Long projectId); List<BankTagObjectHitVO> selectMonthlyFixedIncomeObjects(@Param("projectId") Long projectId,
@Param("threshold") BigDecimal threshold);
/** /**
* 固定交易对手转入疑似兼职 * 固定交易对手转入疑似兼职
* *
* @param projectId 项目ID * @param projectId 项目ID
* @param quarterMinThreshold 季度收入下限
* @param quarterMaxThreshold 季度收入上限
* @return 对象命中结果 * @return 对象命中结果
*/ */
List<BankTagObjectHitVO> selectFixedCounterpartyTransferObjects(@Param("projectId") Long projectId); List<BankTagObjectHitVO> selectFixedCounterpartyTransferObjects(@Param("projectId") Long projectId,
@Param("quarterMinThreshold") BigDecimal quarterMinThreshold,
@Param("quarterMaxThreshold") BigDecimal quarterMaxThreshold);
/** /**
* 摘要收入疑似兼职 * 摘要收入疑似兼职

View File

@@ -0,0 +1,46 @@
package com.ruoyi.ccdi.project.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.ruoyi.ccdi.project.domain.entity.CcdiProjectOverviewEmployeeResult;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewEmployeeHitRowVO;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* 结果总览员工结果 Mapper
*/
public interface CcdiProjectOverviewEmployeeResultMapper extends BaseMapper<CcdiProjectOverviewEmployeeResult> {
/**
* 按项目删除结果
*
* @param projectId 项目ID
* @return 删除条数
*/
int deleteByProjectId(@Param("projectId") Long projectId);
/**
* 批量插入结果
*
* @param list 结果列表
* @return 插入条数
*/
int insertBatch(@Param("list") List<CcdiProjectOverviewEmployeeResult> list);
/**
* 按项目查询结果
*
* @param projectId 项目ID
* @return 结果列表
*/
List<CcdiProjectOverviewEmployeeResult> selectByProjectId(@Param("projectId") Long projectId);
/**
* 按项目查询员工归并命中明细
*
* @param projectId 项目ID
* @return 命中明细
*/
List<CcdiProjectOverviewEmployeeHitRowVO> selectEmployeeHitRowsByProjectId(@Param("projectId") Long projectId);
}

View File

@@ -56,6 +56,14 @@ public interface ICcdiProjectOverviewService {
return new CcdiProjectRiskModelPeopleVO(); return new CcdiProjectRiskModelPeopleVO();
} }
/**
* 重算结果总览员工结果并同步项目风险人数
*
* @param projectId 项目ID
* @param operator 操作人
*/
void refreshOverviewEmployeeResults(Long projectId, String operator);
/** /**
* 刷新项目风险人数 * 刷新项目风险人数
* *

View File

@@ -35,7 +35,10 @@ public class BankTagRuleConfigResolver {
Map.entry("FOREX_SELL_AMT", Set.of("SINGLE_SETTLEMENT_AMOUNT")), Map.entry("FOREX_SELL_AMT", Set.of("SINGLE_SETTLEMENT_AMOUNT")),
Map.entry("WITHDRAW_CNT", Set.of("WITHDRAW_CNT")), Map.entry("WITHDRAW_CNT", Set.of("WITHDRAW_CNT")),
Map.entry("STOCK_TFR_LARGE", Set.of("STOCK_TFR_LARGE")), Map.entry("STOCK_TFR_LARGE", Set.of("STOCK_TFR_LARGE")),
Map.entry("LARGE_STOCK_TRADING", Set.of("STOCK_TFR_LARGE")) Map.entry("LARGE_STOCK_TRADING", Set.of("STOCK_TFR_LARGE")),
Map.entry("MULTI_PARTY_GAMBLING_TRANSFER", Set.of("MULTI_PARTY_AMT_MIN", "MULTI_PARTY_AMT_MAX")),
Map.entry("MONTHLY_FIXED_INCOME", Set.of("MONTHLY_FIXED_INCOME")),
Map.entry("FIXED_COUNTERPARTY_TRANSFER", Set.of("FIXED_COUNTERPARTY_TRANSFER_MIN", "FIXED_COUNTERPARTY_TRANSFER_MAX"))
); );
@Resource @Resource

View File

@@ -129,7 +129,7 @@ public class CcdiBankTagServiceImpl implements ICcdiBankTagService {
resultMapper.insertBatch(allResults); resultMapper.insertBatch(allResults);
} }
projectOverviewService.refreshProjectRiskCounts(projectId, operator); projectOverviewService.refreshOverviewEmployeeResults(projectId, operator);
task.setStatus(STATUS_SUCCESS); task.setStatus(STATUS_SUCCESS);
task.setSuccessRuleCount(rules.size()); task.setSuccessRuleCount(rules.size());
@@ -267,9 +267,19 @@ public class CcdiBankTagServiceImpl implements ICcdiBankTagService {
toInteger(config.getThresholdValue("FREQUENT_CASH_DEPOSIT")) toInteger(config.getThresholdValue("FREQUENT_CASH_DEPOSIT"))
); );
case "LOW_INCOME_RELATIVE_LARGE_TRANSACTION" -> analysisMapper.selectLowIncomeRelativeLargeTransactionObjects(projectId); case "LOW_INCOME_RELATIVE_LARGE_TRANSACTION" -> analysisMapper.selectLowIncomeRelativeLargeTransactionObjects(projectId);
case "MULTI_PARTY_GAMBLING_TRANSFER" -> analysisMapper.selectMultiPartyGamblingTransferObjects(projectId); case "MULTI_PARTY_GAMBLING_TRANSFER" -> analysisMapper.selectMultiPartyGamblingTransferObjects(
case "MONTHLY_FIXED_INCOME" -> analysisMapper.selectMonthlyFixedIncomeObjects(projectId); projectId,
case "FIXED_COUNTERPARTY_TRANSFER" -> analysisMapper.selectFixedCounterpartyTransferObjects(projectId); toBigDecimal(config.getThresholdValue("MULTI_PARTY_AMT_MIN")),
toBigDecimal(config.getThresholdValue("MULTI_PARTY_AMT_MAX"))
);
case "MONTHLY_FIXED_INCOME" -> analysisMapper.selectMonthlyFixedIncomeObjects(
projectId, toBigDecimal(config.getThresholdValue("MONTHLY_FIXED_INCOME"))
);
case "FIXED_COUNTERPARTY_TRANSFER" -> analysisMapper.selectFixedCounterpartyTransferObjects(
projectId,
toBigDecimal(config.getThresholdValue("FIXED_COUNTERPARTY_TRANSFER_MIN")),
toBigDecimal(config.getThresholdValue("FIXED_COUNTERPARTY_TRANSFER_MAX"))
);
case "INTEREST_PAYMENT_BY_OTHERS" -> analysisMapper.selectInterestPaymentByOthersObjects(projectId); case "INTEREST_PAYMENT_BY_OTHERS" -> analysisMapper.selectInterestPaymentByOthersObjects(projectId);
case "SUPPLIER_CONCENTRATION" -> analysisMapper.selectSupplierConcentrationObjects(projectId); case "SUPPLIER_CONCENTRATION" -> analysisMapper.selectSupplierConcentrationObjects(projectId);
case "WITHDRAW_CNT" -> analysisMapper.selectWithdrawCntObjects( case "WITHDRAW_CNT" -> analysisMapper.selectWithdrawCntObjects(

View File

@@ -0,0 +1,172 @@
package com.ruoyi.ccdi.project.service.impl;
import com.alibaba.fastjson2.JSON;
import com.ruoyi.ccdi.project.domain.entity.CcdiProjectOverviewEmployeeResult;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewEmployeeHitRowVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewEmployeeModelSummaryVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewEmployeeRuleSummaryVO;
import org.springframework.stereotype.Component;
import java.util.Comparator;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* 结果总览员工结果构建器
*/
@Component
public class CcdiProjectOverviewEmployeeResultBuilder {
/**
* 按员工归并命中明细并构建结果表实体
*
* @param projectId 项目ID
* @param hitRows 命中明细
* @param operator 操作人
* @return 结果表实体列表
*/
public List<CcdiProjectOverviewEmployeeResult> build(Long projectId,
List<CcdiProjectOverviewEmployeeHitRowVO> hitRows,
String operator) {
if (hitRows == null || hitRows.isEmpty()) {
return List.of();
}
Date now = new Date();
return hitRows.stream()
.filter(item -> isNotBlank(item.getStaffIdCard()))
.collect(Collectors.groupingBy(
CcdiProjectOverviewEmployeeHitRowVO::getStaffIdCard,
LinkedHashMap::new,
Collectors.toList()
))
.entrySet()
.stream()
.sorted(Map.Entry.comparingByKey())
.map(entry -> buildSingleResult(projectId, entry.getKey(), entry.getValue(), operator, now))
.toList();
}
private CcdiProjectOverviewEmployeeResult buildSingleResult(Long projectId,
String staffIdCard,
List<CcdiProjectOverviewEmployeeHitRowVO> staffRows,
String operator,
Date now) {
List<CcdiProjectOverviewEmployeeRuleSummaryVO> ruleSummaries = buildRuleSummaries(staffRows);
List<CcdiProjectOverviewEmployeeModelSummaryVO> modelSummaries = buildModelSummaries(staffRows);
CcdiProjectOverviewEmployeeResult result = new CcdiProjectOverviewEmployeeResult();
result.setProjectId(projectId);
result.setStaffIdCard(staffIdCard);
result.setStaffName(firstNonBlank(staffRows, CcdiProjectOverviewEmployeeHitRowVO::getStaffName));
result.setStaffCode(firstNonBlank(staffRows, CcdiProjectOverviewEmployeeHitRowVO::getStaffCode));
result.setDeptId(firstNonNull(staffRows, CcdiProjectOverviewEmployeeHitRowVO::getDeptId));
result.setDeptName(firstNonBlank(staffRows, CcdiProjectOverviewEmployeeHitRowVO::getDeptName));
result.setRuleCount(ruleSummaries.size());
result.setModelCount(modelSummaries.size());
result.setHitCount(staffRows.size());
result.setRiskLevelCode(resolveRiskLevelCode(ruleSummaries.size()));
result.setRiskPoint(ruleSummaries.stream()
.map(CcdiProjectOverviewEmployeeRuleSummaryVO::getRuleName)
.filter(this::isNotBlank)
.collect(Collectors.joining("")));
result.setModelCodesCsv(modelSummaries.stream()
.map(CcdiProjectOverviewEmployeeModelSummaryVO::getModelCode)
.collect(Collectors.joining(",")));
result.setModelNamesJson(JSON.toJSONString(modelSummaries.stream()
.map(CcdiProjectOverviewEmployeeModelSummaryVO::getModelName)
.toList()));
result.setHitRulesJson(JSON.toJSONString(ruleSummaries));
result.setModelHitSummaryJson(JSON.toJSONString(modelSummaries));
result.setCreateBy(operator);
result.setCreateTime(now);
result.setUpdateBy(operator);
result.setUpdateTime(now);
return result;
}
private List<CcdiProjectOverviewEmployeeRuleSummaryVO> buildRuleSummaries(
List<CcdiProjectOverviewEmployeeHitRowVO> staffRows
) {
return staffRows.stream()
.collect(Collectors.groupingBy(
CcdiProjectOverviewEmployeeHitRowVO::getRuleCode,
LinkedHashMap::new,
Collectors.toList()
))
.values()
.stream()
.map(rows -> {
CcdiProjectOverviewEmployeeRuleSummaryVO summary = new CcdiProjectOverviewEmployeeRuleSummaryVO();
CcdiProjectOverviewEmployeeHitRowVO first = rows.getFirst();
summary.setModelCode(first.getModelCode());
summary.setRuleCode(first.getRuleCode());
summary.setRuleName(first.getRuleName());
summary.setRiskLevel(first.getRiskLevel());
summary.setWarningCount(rows.size());
return summary;
})
.sorted(Comparator.comparing(CcdiProjectOverviewEmployeeRuleSummaryVO::getWarningCount).reversed()
.thenComparing(CcdiProjectOverviewEmployeeRuleSummaryVO::getRuleCode))
.toList();
}
private List<CcdiProjectOverviewEmployeeModelSummaryVO> buildModelSummaries(
List<CcdiProjectOverviewEmployeeHitRowVO> staffRows
) {
return staffRows.stream()
.collect(Collectors.groupingBy(
CcdiProjectOverviewEmployeeHitRowVO::getModelCode,
LinkedHashMap::new,
Collectors.toList()
))
.values()
.stream()
.map(rows -> {
CcdiProjectOverviewEmployeeModelSummaryVO summary = new CcdiProjectOverviewEmployeeModelSummaryVO();
CcdiProjectOverviewEmployeeHitRowVO first = rows.getFirst();
summary.setModelCode(first.getModelCode());
summary.setModelName(first.getModelName());
summary.setWarningCount(rows.size());
return summary;
})
.sorted(Comparator.comparing(CcdiProjectOverviewEmployeeModelSummaryVO::getModelCode))
.toList();
}
private String resolveRiskLevelCode(int ruleCount) {
if (ruleCount >= 5) {
return "HIGH";
}
if (ruleCount >= 2) {
return "MEDIUM";
}
return "LOW";
}
private String firstNonBlank(List<CcdiProjectOverviewEmployeeHitRowVO> staffRows,
java.util.function.Function<CcdiProjectOverviewEmployeeHitRowVO, String> getter) {
return staffRows.stream()
.map(getter)
.filter(this::isNotBlank)
.findFirst()
.orElse(null);
}
private <T> T firstNonNull(List<CcdiProjectOverviewEmployeeHitRowVO> staffRows,
java.util.function.Function<CcdiProjectOverviewEmployeeHitRowVO, T> getter) {
return staffRows.stream()
.map(getter)
.filter(Objects::nonNull)
.findFirst()
.orElse(null);
}
private boolean isNotBlank(String value) {
return value != null && !value.isBlank();
}
}

View File

@@ -3,8 +3,10 @@ package com.ruoyi.ccdi.project.service.impl;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.ruoyi.ccdi.project.domain.CcdiProject; import com.ruoyi.ccdi.project.domain.CcdiProject;
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskModelPeopleQueryDTO; import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskModelPeopleQueryDTO;
import com.ruoyi.ccdi.project.domain.entity.CcdiProjectOverviewEmployeeResult;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeRiskAggregateVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeRiskAggregateVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewDashboardVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewDashboardVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewEmployeeHitRowVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewStatVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewStatVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelCardsVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelCardsVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelPeopleItemVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelPeopleItemVO;
@@ -14,6 +16,7 @@ import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskPeopleOverviewVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectTopRiskPeopleItemVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectTopRiskPeopleItemVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectTopRiskPeopleVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectTopRiskPeopleVO;
import com.ruoyi.ccdi.project.mapper.CcdiProjectMapper; import com.ruoyi.ccdi.project.mapper.CcdiProjectMapper;
import com.ruoyi.ccdi.project.mapper.CcdiProjectOverviewEmployeeResultMapper;
import com.ruoyi.ccdi.project.mapper.CcdiProjectOverviewMapper; import com.ruoyi.ccdi.project.mapper.CcdiProjectOverviewMapper;
import com.ruoyi.ccdi.project.service.ICcdiProjectOverviewService; import com.ruoyi.ccdi.project.service.ICcdiProjectOverviewService;
import com.ruoyi.common.exception.ServiceException; import com.ruoyi.common.exception.ServiceException;
@@ -37,6 +40,12 @@ public class CcdiProjectOverviewServiceImpl implements ICcdiProjectOverviewServi
@Resource @Resource
private CcdiProjectMapper projectMapper; private CcdiProjectMapper projectMapper;
@Resource
private CcdiProjectOverviewEmployeeResultMapper overviewEmployeeResultMapper;
@Resource
private CcdiProjectOverviewEmployeeResultBuilder overviewEmployeeResultBuilder;
@Override @Override
public CcdiProjectOverviewDashboardVO getDashboard(Long projectId) { public CcdiProjectOverviewDashboardVO getDashboard(Long projectId) {
CcdiProject project = overviewMapper.selectDashboardBaseByProjectId(projectId); CcdiProject project = overviewMapper.selectDashboardBaseByProjectId(projectId);
@@ -122,6 +131,29 @@ public class CcdiProjectOverviewServiceImpl implements ICcdiProjectOverviewServi
return people; return people;
} }
@Override
@Transactional(rollbackFor = Exception.class)
public void refreshOverviewEmployeeResults(Long projectId, String operator) {
getRequiredProject(projectId);
overviewEmployeeResultMapper.deleteByProjectId(projectId);
List<CcdiProjectOverviewEmployeeHitRowVO> hitRows =
overviewEmployeeResultMapper.selectEmployeeHitRowsByProjectId(projectId);
List<CcdiProjectOverviewEmployeeResult> results =
overviewEmployeeResultBuilder.build(projectId, hitRows, operator);
if (!results.isEmpty()) {
overviewEmployeeResultMapper.insertBatch(results);
}
projectMapper.updateRiskCountsByProjectId(
projectId,
countRiskLevel(results, "HIGH"),
countRiskLevel(results, "MEDIUM"),
countRiskLevel(results, "LOW"),
operator
);
}
@Override @Override
@Transactional(rollbackFor = Exception.class) @Transactional(rollbackFor = Exception.class)
public void refreshProjectRiskCounts(Long projectId, String operator) { public void refreshProjectRiskCounts(Long projectId, String operator) {
@@ -196,6 +228,12 @@ public class CcdiProjectOverviewServiceImpl implements ICcdiProjectOverviewServi
throw new ServiceException("项目风险人数统计结果类型异常"); throw new ServiceException("项目风险人数统计结果类型异常");
} }
private Integer countRiskLevel(List<CcdiProjectOverviewEmployeeResult> results, String riskLevelCode) {
return Math.toIntExact(results.stream()
.filter(item -> riskLevelCode.equals(item.getRiskLevelCode()))
.count());
}
private Integer defaultZero(Integer value) { private Integer defaultZero(Integer value) {
return value == null ? 0 : value; return value == null ? 0 : value;
} }

View File

@@ -403,7 +403,7 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
) )
</foreach> </foreach>
on duplicate key update on duplicate key update
bank_statement_id = bank_statement_id batch_id = batch_id
</insert> </insert>
<delete id="deleteByProjectIdAndBatchId"> <delete id="deleteByProjectIdAndBatchId">

View File

@@ -124,6 +124,22 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
) )
</sql> </sql>
<sql id="salaryIncomePredicate">
bs.CUSTOMER_ACCOUNT_NAME = '浙江兰溪农村商业银行股份有限公司'
and (
IFNULL(bs.USER_MEMO, '') REGEXP '代发|工资|奖金|薪酬|薪金|补贴|薪|年终奖|年金|加班费|劳务费|劳务外包|提成|劳务派遣|绩效|酬劳|PAYROLL|SALA|CPF|directors.*fees'
or IFNULL(bs.CASH_TYPE, '') REGEXP '代发|工资|劳务费'
)
</sql>
<sql id="salaryDeductionPredicate">
(
IFNULL(bs.USER_MEMO, '') REGEXP '代扣|个税|社保|公积金|水费|电费|燃气|话费|党费|医保'
or IFNULL(bs.CASH_TYPE, '') REGEXP '代扣|个税|社保|公积金'
or IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '税务|社保|公积金'
)
</sql>
<select id="selectHouseOrCarExpenseStatements" resultMap="BankTagStatementHitResultMap"> <select id="selectHouseOrCarExpenseStatements" resultMap="BankTagStatementHitResultMap">
select select
bs.bank_statement_id AS bankStatementId, bs.bank_statement_id AS bankStatementId,
@@ -376,19 +392,89 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
<select id="selectLowIncomeRelativeLargeTransactionObjects" resultMap="BankTagObjectHitResultMap"> <select id="selectLowIncomeRelativeLargeTransactionObjects" resultMap="BankTagObjectHitResultMap">
select select
'STAFF_ID_CARD' AS objectType, 'STAFF_ID_CARD' AS objectType,
'' AS objectKey, t.objectKey AS objectKey,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '低收入关系人累计交易 ', CAST(t.totalAmount AS CHAR),
where 1 = 0 ' 元,命中关系人数 ', CAST(t.relationCount AS CHAR), ' 人'
) AS reasonDetail
from (
select
relation.person_id AS objectKey,
ROUND(SUM(IFNULL(bs.AMOUNT_DR, 0) + IFNULL(bs.AMOUNT_CR, 0)), 2) AS totalAmount,
COUNT(DISTINCT relation.relation_cert_no) AS relationCount
from ccdi_staff_fmy_relation relation
inner join ccdi_bank_statement bs on relation.relation_cert_no = bs.cret_no
where relation.status = 1
and (
relation.annual_income is null
or relation.annual_income = 0
or relation.annual_income / 12 &lt; 3000
)
and bs.project_id = #{projectId}
and IFNULL(bs.LE_ACCOUNT_NAME, '') &lt;&gt; IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '')
group by relation.person_id
having SUM(IFNULL(bs.AMOUNT_DR, 0) + IFNULL(bs.AMOUNT_CR, 0)) > 100000
) t
</select> </select>
<select id="selectMultiPartyGamblingTransferObjects" resultMap="BankTagObjectHitResultMap"> <select id="selectMultiPartyGamblingTransferObjects" resultMap="BankTagObjectHitResultMap">
select select
'STAFF_ID_CARD' AS objectType, 'STAFF_ID_CARD' AS objectType,
'' AS objectKey, t.objectKey AS objectKey,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '交易日 ', MAX(t.tradeDate),
where 1 = 0 ' 发生 ', CAST(MAX(t.hitCount) AS CHAR),
' 笔疑似赌博交易,涉及 ', CAST(MAX(t.partyCount) AS CHAR),
' 个对手方,金额合计 ', CAST(MAX(t.totalAmount) AS CHAR), ' 元'
) AS reasonDetail
from (
select
source.objectKey AS objectKey,
source.tradeDate AS tradeDate,
COUNT(1) AS hitCount,
COUNT(DISTINCT source.customerAccountName) AS partyCount,
ROUND(SUM(source.tradeAmount), 2) AS totalAmount
from (
select
staff.id_card AS objectKey,
LEFT(TRIM(bs.TRX_DATE), 10) AS tradeDate,
bs.CUSTOMER_ACCOUNT_NAME AS customerAccountName,
GREATEST(IFNULL(bs.AMOUNT_DR, 0), IFNULL(bs.AMOUNT_CR, 0)) AS tradeAmount
from ccdi_bank_statement bs
inner join ccdi_base_staff staff on staff.id_card = bs.cret_no
where bs.project_id = #{projectId}
and GREATEST(IFNULL(bs.AMOUNT_DR, 0), IFNULL(bs.AMOUNT_CR, 0)) between #{amountMinThreshold} and #{amountMaxThreshold}
and IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') &lt;&gt; ''
and (
IFNULL(bs.USER_MEMO, '') REGEXP '微信|wechat|WeChat|财付通|Tenpay|支付宝|Alipay|转账|红包'
or IFNULL(bs.CASH_TYPE, '') REGEXP '微信|wechat|WeChat|财付通|Tenpay|支付宝|Alipay|转账|红包'
or IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '微信|wechat|WeChat|财付通|Tenpay|支付宝|Alipay'
)
union all
select
relation.person_id AS objectKey,
LEFT(TRIM(bs.TRX_DATE), 10) AS tradeDate,
bs.CUSTOMER_ACCOUNT_NAME AS customerAccountName,
GREATEST(IFNULL(bs.AMOUNT_DR, 0), IFNULL(bs.AMOUNT_CR, 0)) AS tradeAmount
from ccdi_bank_statement bs
inner join ccdi_staff_fmy_relation relation on relation.relation_cert_no = bs.cret_no
where relation.status = 1
and bs.project_id = #{projectId}
and GREATEST(IFNULL(bs.AMOUNT_DR, 0), IFNULL(bs.AMOUNT_CR, 0)) between #{amountMinThreshold} and #{amountMaxThreshold}
and IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') &lt;&gt; ''
and (
IFNULL(bs.USER_MEMO, '') REGEXP '微信|wechat|WeChat|财付通|Tenpay|支付宝|Alipay|转账|红包'
or IFNULL(bs.CASH_TYPE, '') REGEXP '微信|wechat|WeChat|财付通|Tenpay|支付宝|Alipay|转账|红包'
or IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '微信|wechat|WeChat|财付通|Tenpay|支付宝|Alipay'
)
) source
group by source.objectKey, source.tradeDate
having COUNT(1) > 2
and COUNT(DISTINCT source.customerAccountName) >= 2
) t
group by t.objectKey
</select> </select>
<select id="selectGamblingSensitiveKeywordStatements" resultMap="BankTagStatementHitResultMap"> <select id="selectGamblingSensitiveKeywordStatements" resultMap="BankTagStatementHitResultMap">
@@ -440,19 +526,110 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
<select id="selectMonthlyFixedIncomeObjects" resultMap="BankTagObjectHitResultMap"> <select id="selectMonthlyFixedIncomeObjects" resultMap="BankTagObjectHitResultMap">
select select
'STAFF_ID_CARD' AS objectType, 'STAFF_ID_CARD' AS objectType,
'' AS objectKey, t.objectKey AS objectKey,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '近12个月有 ', CAST(t.monthCount AS CHAR),
where 1 = 0 ' 个月固定收入超过阈值,月均收入 ', CAST(t.avgAmount AS CHAR), ' 元'
) AS reasonDetail
from (
select
monthly_income.idCard AS objectKey,
COUNT(DISTINCT monthly_income.incomeMonth) AS monthCount,
ROUND(AVG(monthly_income.monthAmount), 2) AS avgAmount
from (
select
staff.id_card AS idCard,
LEFT(TRIM(bs.TRX_DATE), 7) AS incomeMonth,
ROUND(SUM(IFNULL(bs.AMOUNT_CR, 0)), 2) AS monthAmount
from ccdi_bank_statement bs
inner join ccdi_base_staff staff on staff.id_card = bs.cret_no
where bs.project_id = #{projectId}
and IFNULL(bs.AMOUNT_CR, 0) > 0
and IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') &lt;&gt; ''
and IFNULL(bs.LE_ACCOUNT_NAME, '') &lt;&gt; IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '')
and <include refid="salaryExclusionPredicate"/>
and COALESCE(
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
) >= DATE_SUB(CURDATE(), INTERVAL 12 MONTH)
group by staff.id_card, LEFT(TRIM(bs.TRX_DATE), 7)
having SUM(IFNULL(bs.AMOUNT_CR, 0)) > #{threshold}
) monthly_income
group by monthly_income.idCard
having COUNT(DISTINCT monthly_income.incomeMonth) >= 6
and STDDEV(monthly_income.monthAmount) / NULLIF(AVG(monthly_income.monthAmount), 0) &lt;= 0.3
) t
</select> </select>
<select id="selectFixedCounterpartyTransferObjects" resultMap="BankTagObjectHitResultMap"> <select id="selectFixedCounterpartyTransferObjects" resultMap="BankTagObjectHitResultMap">
select select
'STAFF_ID_CARD' AS objectType, 'STAFF_ID_CARD' AS objectType,
'' AS objectKey, t.objectKey AS objectKey,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '固定对手“', t.customerAccountNames,
where 1 = 0 '”在 ', CAST(t.quarterCount AS CHAR),
' 个季度累计转入位于区间 [', CAST(#{quarterMinThreshold} AS CHAR),
', ', CAST(#{quarterMaxThreshold} AS CHAR), '] 元'
) AS reasonDetail
from (
select
stable_income.idCard AS objectKey,
GROUP_CONCAT(DISTINCT stable_income.customerAccountName ORDER BY stable_income.customerAccountName SEPARATOR '、') AS customerAccountNames,
MAX(stable_income.quarterCount) AS quarterCount
from (
select
quarter_income.idCard AS idCard,
quarter_income.customerAccountName AS customerAccountName,
COUNT(DISTINCT quarter_income.transQuarter) AS quarterCount
from (
select
staff.id_card AS idCard,
bs.CUSTOMER_ACCOUNT_NAME AS customerAccountName,
CONCAT(
YEAR(COALESCE(
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
)),
'-Q',
QUARTER(COALESCE(
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
))
) AS transQuarter,
ROUND(SUM(IFNULL(bs.AMOUNT_CR, 0)), 2) AS quarterAmount
from ccdi_bank_statement bs
inner join ccdi_base_staff staff on staff.id_card = bs.cret_no
where bs.project_id = #{projectId}
and IFNULL(bs.AMOUNT_CR, 0) > 0
and IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') &lt;&gt; ''
and IFNULL(bs.LE_ACCOUNT_NAME, '') &lt;&gt; IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '')
and <include refid="salaryExclusionPredicate"/>
and COALESCE(
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
) >= DATE_SUB(CURDATE(), INTERVAL 12 MONTH)
group by
staff.id_card,
bs.CUSTOMER_ACCOUNT_NAME,
CONCAT(
YEAR(COALESCE(
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
)),
'-Q',
QUARTER(COALESCE(
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
))
)
having SUM(IFNULL(bs.AMOUNT_CR, 0)) between #{quarterMinThreshold} and #{quarterMaxThreshold}
) quarter_income
group by quarter_income.idCard, quarter_income.customerAccountName
having COUNT(DISTINCT quarter_income.transQuarter) >= 2
) stable_income
group by stable_income.idCard
having COUNT(DISTINCT stable_income.customerAccountName) &lt; 3
) t
</select> </select>
<select id="selectSuspiciousIncomeKeywordStatements" resultMap="BankTagStatementHitResultMap"> <select id="selectSuspiciousIncomeKeywordStatements" resultMap="BankTagStatementHitResultMap">
@@ -478,32 +655,176 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
<select id="selectHouseRegistrationMismatchStatements" resultMap="BankTagStatementHitResultMap"> <select id="selectHouseRegistrationMismatchStatements" resultMap="BankTagStatementHitResultMap">
select select
bs.bank_statement_id AS bankStatementId, trade.bankStatementId AS bankStatementId,
bs.group_id AS groupId, trade.groupId AS groupId,
bs.batch_id AS logId, trade.logId AS logId,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '购房交易金额 ', CAST(trade.amountDr AS CHAR),
where 1 = 0 ' 元,对手方“', IFNULL(trade.customerAccountName, ''),
'”,证件号 ', trade.personId, ' 名下无房产登记'
) AS reasonDetail
from (
select
staff.id_card AS personId,
bs.bank_statement_id AS bankStatementId,
bs.group_id AS groupId,
bs.batch_id AS logId,
IFNULL(bs.AMOUNT_DR, 0) AS amountDr,
bs.CUSTOMER_ACCOUNT_NAME AS customerAccountName
from ccdi_bank_statement bs
inner join ccdi_base_staff staff on staff.id_card = bs.cret_no
where bs.project_id = #{projectId}
and IFNULL(bs.AMOUNT_DR, 0) > 0
and (
IFNULL(bs.USER_MEMO, '') REGEXP '(购|买).*房|房款|首付'
or IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '房地产|置业|置地|地产|房产|不动产|链家|贝壳|我爱我家|房管局'
)
union all
select
relation.relation_cert_no AS personId,
bs.bank_statement_id AS bankStatementId,
bs.group_id AS groupId,
bs.batch_id AS logId,
IFNULL(bs.AMOUNT_DR, 0) AS amountDr,
bs.CUSTOMER_ACCOUNT_NAME AS customerAccountName
from ccdi_bank_statement bs
inner join ccdi_staff_fmy_relation relation on relation.relation_cert_no = bs.cret_no
where bs.project_id = #{projectId}
and relation.status = 1
and IFNULL(bs.AMOUNT_DR, 0) > 0
and (
IFNULL(bs.USER_MEMO, '') REGEXP '(购|买).*房|房款|首付'
or IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '房地产|置业|置地|地产|房产|不动产|链家|贝壳|我爱我家|房管局'
)
) trade
left join (
select distinct
asset.person_id AS personId
from ccdi_asset_info asset
where asset.asset_main_type = '房产'
and asset.asset_sub_type = '住宅'
and asset.asset_status = '正常'
) asset
on asset.personId = trade.personId
where asset.personId is null
</select> </select>
<select id="selectPropertyFeeRegistrationMismatchStatements" resultMap="BankTagStatementHitResultMap"> <select id="selectPropertyFeeRegistrationMismatchStatements" resultMap="BankTagStatementHitResultMap">
select select
bs.bank_statement_id AS bankStatementId, trade.bankStatementId AS bankStatementId,
bs.group_id AS groupId, trade.groupId AS groupId,
bs.batch_id AS logId, trade.logId AS logId,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '物业缴费金额 ', CAST(trade.amountDr AS CHAR),
where 1 = 0 ' 元,对手方“', IFNULL(trade.customerAccountName, ''),
'”,证件号 ', trade.personId, ' 名下无房产登记'
) AS reasonDetail
from (
select
staff.id_card AS personId,
bs.bank_statement_id AS bankStatementId,
bs.group_id AS groupId,
bs.batch_id AS logId,
IFNULL(bs.AMOUNT_DR, 0) AS amountDr,
bs.CUSTOMER_ACCOUNT_NAME AS customerAccountName
from ccdi_bank_statement bs
inner join ccdi_base_staff staff on staff.id_card = bs.cret_no
where bs.project_id = #{projectId}
and IFNULL(bs.AMOUNT_DR, 0) > 0
and (
IFNULL(bs.USER_MEMO, '') REGEXP '物业|物业费|管理费|物业服务|综合服务'
or IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '物业|小区|花园|苑|中心|大厦|业委会|业主委员会|置业|房地产|服务中心|管理处|社区'
)
union all
select
relation.relation_cert_no AS personId,
bs.bank_statement_id AS bankStatementId,
bs.group_id AS groupId,
bs.batch_id AS logId,
IFNULL(bs.AMOUNT_DR, 0) AS amountDr,
bs.CUSTOMER_ACCOUNT_NAME AS customerAccountName
from ccdi_bank_statement bs
inner join ccdi_staff_fmy_relation relation on relation.relation_cert_no = bs.cret_no
where bs.project_id = #{projectId}
and relation.status = 1
and IFNULL(bs.AMOUNT_DR, 0) > 0
and (
IFNULL(bs.USER_MEMO, '') REGEXP '物业|物业费|管理费|物业服务|综合服务'
or IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '物业|小区|花园|苑|中心|大厦|业委会|业主委员会|置业|房地产|服务中心|管理处|社区'
)
) trade
left join (
select distinct
asset.person_id AS personId
from ccdi_asset_info asset
where asset.asset_main_type = '房产'
and asset.asset_sub_type = '住宅'
and asset.asset_status = '正常'
) asset
on asset.personId = trade.personId
where asset.personId is null
</select> </select>
<select id="selectTaxAssetRegistrationMismatchStatements" resultMap="BankTagStatementHitResultMap"> <select id="selectTaxAssetRegistrationMismatchStatements" resultMap="BankTagStatementHitResultMap">
select select
bs.bank_statement_id AS bankStatementId, trade.bankStatementId AS bankStatementId,
bs.group_id AS groupId, trade.groupId AS groupId,
bs.batch_id AS logId, trade.logId AS logId,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '税费支出金额 ', CAST(trade.amountDr AS CHAR),
where 1 = 0 ' 元,对手方“', IFNULL(trade.customerAccountName, ''),
'”,证件号 ', trade.personId, ' 名下无房产登记'
) AS reasonDetail
from (
select
staff.id_card AS personId,
bs.bank_statement_id AS bankStatementId,
bs.group_id AS groupId,
bs.batch_id AS logId,
IFNULL(bs.AMOUNT_DR, 0) AS amountDr,
bs.CUSTOMER_ACCOUNT_NAME AS customerAccountName
from ccdi_bank_statement bs
inner join ccdi_base_staff staff on staff.id_card = bs.cret_no
where bs.project_id = #{projectId}
and IFNULL(bs.AMOUNT_DR, 0) >= 5000
and (
IFNULL(bs.USER_MEMO, '') REGEXP '税务|缴税|税款'
or IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '税务|税务局|国库|国家金库|财政'
)
union all
select
relation.relation_cert_no AS personId,
bs.bank_statement_id AS bankStatementId,
bs.group_id AS groupId,
bs.batch_id AS logId,
IFNULL(bs.AMOUNT_DR, 0) AS amountDr,
bs.CUSTOMER_ACCOUNT_NAME AS customerAccountName
from ccdi_bank_statement bs
inner join ccdi_staff_fmy_relation relation on relation.relation_cert_no = bs.cret_no
where bs.project_id = #{projectId}
and relation.status = 1
and IFNULL(bs.AMOUNT_DR, 0) >= 5000
and (
IFNULL(bs.USER_MEMO, '') REGEXP '税务|缴税|税款'
or IFNULL(bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '税务|税务局|国库|国家金库|财政'
)
) trade
left join (
select distinct
asset.person_id AS personId
from ccdi_asset_info asset
where asset.asset_main_type = '房产'
and asset.asset_sub_type = '住宅'
and asset.asset_status = '正常'
) asset
on asset.personId = trade.personId
where asset.personId is null
</select> </select>
<select id="selectIncomeAssetMismatchStatements" resultMap="BankTagStatementHitResultMap"> <select id="selectIncomeAssetMismatchStatements" resultMap="BankTagStatementHitResultMap">
@@ -610,10 +931,84 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
<select id="selectSupplierConcentrationObjects" resultMap="BankTagObjectHitResultMap"> <select id="selectSupplierConcentrationObjects" resultMap="BankTagObjectHitResultMap">
select select
'STAFF_ID_CARD' AS objectType, 'STAFF_ID_CARD' AS objectType,
'' AS objectKey, t.objectKey AS objectKey,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '供应商“', t.supplierName,
where 1 = 0 '”采购金额 ', CAST(t.supplierAmount AS CHAR),
' 元,占总采购金额 ', CAST(t.supplierRatioPct AS CHAR), '%'
) AS reasonDetail
from (
select
supplier_hit.objectKey AS objectKey,
SUBSTRING_INDEX(
GROUP_CONCAT(supplier_hit.supplierName ORDER BY supplier_hit.supplierRatio DESC SEPARATOR ','),
',',
1
) AS supplierName,
MAX(supplier_hit.supplierAmount) AS supplierAmount,
ROUND(MAX(supplier_hit.supplierRatio) * 100, 2) AS supplierRatioPct
from (
select
source.objectKey AS objectKey,
source.supplierName AS supplierName,
ROUND(SUM(source.actualAmount), 2) AS supplierAmount,
SUM(source.actualAmount) / NULLIF(total_amount.totalAmount, 0) AS supplierRatio
from (
select distinct
staff.id_card AS objectKey,
pt.purchase_id AS purchaseId,
pt.supplier_name AS supplierName,
IFNULL(pt.actual_amount, 0) AS actualAmount
from ccdi_purchase_transaction pt
inner join ccdi_base_staff staff on CAST(staff.staff_id AS CHAR) = pt.applicant_id
where IFNULL(pt.actual_amount, 0) > 0
and IFNULL(pt.supplier_name, '') &lt;&gt; ''
union
select distinct
staff.id_card AS objectKey,
pt.purchase_id AS purchaseId,
pt.supplier_name AS supplierName,
IFNULL(pt.actual_amount, 0) AS actualAmount
from ccdi_purchase_transaction pt
inner join ccdi_base_staff staff on CAST(staff.staff_id AS CHAR) = pt.purchase_leader_id
where pt.purchase_leader_id is not null
and IFNULL(pt.actual_amount, 0) > 0
and IFNULL(pt.supplier_name, '') &lt;&gt; ''
) source
inner join (
select
source_total.objectKey AS objectKey,
ROUND(SUM(source_total.actualAmount), 2) AS totalAmount
from (
select distinct
staff.id_card AS objectKey,
pt.purchase_id AS purchaseId,
IFNULL(pt.actual_amount, 0) AS actualAmount
from ccdi_purchase_transaction pt
inner join ccdi_base_staff staff on CAST(staff.staff_id AS CHAR) = pt.applicant_id
where IFNULL(pt.actual_amount, 0) > 0
union
select distinct
staff.id_card AS objectKey,
pt.purchase_id AS purchaseId,
IFNULL(pt.actual_amount, 0) AS actualAmount
from ccdi_purchase_transaction pt
inner join ccdi_base_staff staff on CAST(staff.staff_id AS CHAR) = pt.purchase_leader_id
where pt.purchase_leader_id is not null
and IFNULL(pt.actual_amount, 0) > 0
) source_total
group by source_total.objectKey
) total_amount
on total_amount.objectKey = source.objectKey
group by source.objectKey, source.supplierName, total_amount.totalAmount
having SUM(source.actualAmount) / NULLIF(total_amount.totalAmount, 0) > 0.7
) supplier_hit
group by supplier_hit.objectKey
) t
</select> </select>
<select id="selectStockTfrLargeStatements" resultMap="BankTagStatementHitResultMap"> <select id="selectStockTfrLargeStatements" resultMap="BankTagStatementHitResultMap">
@@ -680,19 +1075,119 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
<select id="selectSalaryQuickTransferObjects" resultMap="BankTagObjectHitResultMap"> <select id="selectSalaryQuickTransferObjects" resultMap="BankTagObjectHitResultMap">
select select
'STAFF_ID_CARD' AS objectType, 'STAFF_ID_CARD' AS objectType,
'' AS objectKey, t.objectKey AS objectKey,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '工资入账 ', CAST(t.salaryAmount AS CHAR),
where 1 = 0 ' 元后24小时内转出 ', CAST(t.transferAmount AS CHAR),
' 元,占比 ', CAST(t.transferRatioPct AS CHAR), '%'
) AS reasonDetail
from (
select
salary.objectKey AS objectKey,
MAX(salary.salaryAmount) AS salaryAmount,
MAX(out_trade.transferAmount) AS transferAmount,
ROUND(MAX(out_trade.transferAmount / NULLIF(salary.salaryAmount, 0)) * 100, 2) AS transferRatioPct
from (
select
staff.id_card AS objectKey,
IFNULL(bs.AMOUNT_CR, 0) AS salaryAmount,
COALESCE(
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
) AS salaryTime
from ccdi_bank_statement bs
inner join ccdi_base_staff staff on staff.id_card = bs.cret_no
where bs.project_id = #{projectId}
and IFNULL(bs.AMOUNT_CR, 0) > 0
and <include refid="salaryIncomePredicate"/>
) salary
inner join (
select
salary_source.objectKey AS objectKey,
salary_source.salaryTime AS salaryTime,
ROUND(SUM(IFNULL(out_bs.AMOUNT_DR, 0)), 2) AS transferAmount
from (
select
staff.id_card AS objectKey,
COALESCE(
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
) AS salaryTime
from ccdi_bank_statement bs
inner join ccdi_base_staff staff on staff.id_card = bs.cret_no
where bs.project_id = #{projectId}
and IFNULL(bs.AMOUNT_CR, 0) > 0
and <include refid="salaryIncomePredicate"/>
) salary_source
inner join ccdi_bank_statement out_bs
on out_bs.project_id = #{projectId}
and out_bs.cret_no = salary_source.objectKey
and IFNULL(out_bs.AMOUNT_DR, 0) > 0
and COALESCE(
STR_TO_DATE(LEFT(TRIM(out_bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(out_bs.TRX_DATE), 10), '%Y-%m-%d')
) > salary_source.salaryTime
and COALESCE(
STR_TO_DATE(LEFT(TRIM(out_bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(out_bs.TRX_DATE), 10), '%Y-%m-%d')
) &lt;= DATE_ADD(salary_source.salaryTime, INTERVAL 24 HOUR)
group by salary_source.objectKey, salary_source.salaryTime
) out_trade
on out_trade.objectKey = salary.objectKey
and out_trade.salaryTime = salary.salaryTime
where out_trade.transferAmount / NULLIF(salary.salaryAmount, 0) > 0.8
group by salary.objectKey
) t
</select> </select>
<select id="selectSalaryUnusedObjects" resultMap="BankTagObjectHitResultMap"> <select id="selectSalaryUnusedObjects" resultMap="BankTagObjectHitResultMap">
select select
'STAFF_ID_CARD' AS objectType, 'STAFF_ID_CARD' AS objectType,
'' AS objectKey, t.objectKey AS objectKey,
'占位SQL待补充真实规则' AS reasonDetail CONCAT(
from ccdi_bank_statement bs '工资入账 ', CAST(t.salaryAmount AS CHAR),
where 1 = 0 ' 元后30天内无消费或转账支出'
) AS reasonDetail
from (
select
salary.objectKey AS objectKey,
MAX(salary.salaryAmount) AS salaryAmount
from (
select
staff.id_card AS objectKey,
IFNULL(bs.AMOUNT_CR, 0) AS salaryAmount,
COALESCE(
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(bs.TRX_DATE), 10), '%Y-%m-%d')
) AS salaryTime
from ccdi_bank_statement bs
inner join ccdi_base_staff staff on staff.id_card = bs.cret_no
where bs.project_id = #{projectId}
and IFNULL(bs.AMOUNT_CR, 0) > 0
and <include refid="salaryIncomePredicate"/>
) salary
where not exists (
select 1
from ccdi_bank_statement expense_bs
where expense_bs.project_id = #{projectId}
and expense_bs.cret_no = salary.objectKey
and IFNULL(expense_bs.AMOUNT_DR, 0) > 0
and COALESCE(
STR_TO_DATE(LEFT(TRIM(expense_bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(expense_bs.TRX_DATE), 10), '%Y-%m-%d')
) > salary.salaryTime
and COALESCE(
STR_TO_DATE(LEFT(TRIM(expense_bs.TRX_DATE), 19), '%Y-%m-%d %H:%i:%s'),
STR_TO_DATE(LEFT(TRIM(expense_bs.TRX_DATE), 10), '%Y-%m-%d')
) &lt;= DATE_ADD(salary.salaryTime, INTERVAL 30 DAY)
and not (
IFNULL(expense_bs.USER_MEMO, '') REGEXP '代扣|个税|社保|公积金|水费|电费|燃气|话费|党费|医保'
or IFNULL(expense_bs.CASH_TYPE, '') REGEXP '代扣|个税|社保|公积金'
or IFNULL(expense_bs.CUSTOMER_ACCOUNT_NAME, '') REGEXP '税务|社保|公积金'
)
)
group by salary.objectKey
) t
</select> </select>
<select id="selectLargeStockTradingStatements" resultMap="BankTagStatementHitResultMap"> <select id="selectLargeStockTradingStatements" resultMap="BankTagStatementHitResultMap">

View File

@@ -0,0 +1,157 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.ruoyi.ccdi.project.mapper.CcdiProjectOverviewEmployeeResultMapper">
<resultMap id="CcdiProjectOverviewEmployeeResultMap"
type="com.ruoyi.ccdi.project.domain.entity.CcdiProjectOverviewEmployeeResult">
<id property="id" column="id"/>
<result property="projectId" column="project_id"/>
<result property="staffIdCard" column="staff_id_card"/>
<result property="staffCode" column="staff_code"/>
<result property="staffName" column="staff_name"/>
<result property="deptId" column="dept_id"/>
<result property="deptName" column="dept_name"/>
<result property="ruleCount" column="rule_count"/>
<result property="modelCount" column="model_count"/>
<result property="hitCount" column="hit_count"/>
<result property="riskLevelCode" column="risk_level_code"/>
<result property="riskPoint" column="risk_point"/>
<result property="modelCodesCsv" column="model_codes_csv"/>
<result property="modelNamesJson" column="model_names_json"/>
<result property="hitRulesJson" column="hit_rules_json"/>
<result property="modelHitSummaryJson" column="model_hit_summary_json"/>
<result property="createBy" column="create_by"/>
<result property="createTime" column="create_time"/>
<result property="updateBy" column="update_by"/>
<result property="updateTime" column="update_time"/>
<result property="remark" column="remark"/>
</resultMap>
<resultMap id="CcdiProjectOverviewEmployeeHitRowMap"
type="com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewEmployeeHitRowVO">
<result property="projectId" column="project_id"/>
<result property="staffIdCard" column="staff_id_card"/>
<result property="staffName" column="staff_name"/>
<result property="staffCode" column="staff_code"/>
<result property="deptId" column="dept_id"/>
<result property="deptName" column="dept_name"/>
<result property="modelCode" column="model_code"/>
<result property="modelName" column="model_name"/>
<result property="ruleCode" column="rule_code"/>
<result property="ruleName" column="rule_name"/>
<result property="riskLevel" column="risk_level"/>
</resultMap>
<sql id="resolvedEmployeeHitRowsSql">
select distinct
tr.project_id,
coalesce(direct_staff.id_card, statement_staff.id_card, family_staff.id_card) as staff_id_card,
coalesce(direct_staff.name, statement_staff.name, family_staff.name) as staff_name,
cast(coalesce(direct_staff.staff_id, statement_staff.staff_id, family_staff.staff_id) as char) as staff_code,
coalesce(direct_staff.dept_id, statement_staff.dept_id, family_staff.dept_id) as dept_id,
dept.dept_name,
tr.model_code,
tr.model_name,
tr.rule_code,
tr.rule_name,
tr.risk_level
from ccdi_bank_statement_tag_result tr
left join ccdi_base_staff direct_staff
on tr.object_type = 'STAFF_ID_CARD'
and tr.object_key = direct_staff.id_card
left join ccdi_bank_statement bs
on tr.bank_statement_id = bs.bank_statement_id
left join ccdi_base_staff statement_staff
on (tr.object_key is null or tr.object_key = '')
and bs.cret_no = statement_staff.id_card
left join ccdi_staff_fmy_relation relation
on relation.status = 1
and (
((tr.object_key is null or tr.object_key = '') and bs.cret_no = relation.relation_cert_no)
or ((tr.object_key is not null and tr.object_key != '') and tr.object_type != 'STAFF_ID_CARD'
and tr.object_key = relation.relation_cert_no)
)
left join ccdi_base_staff family_staff
on relation.person_id = family_staff.id_card
left join sys_dept dept
on dept.dept_id = coalesce(direct_staff.dept_id, statement_staff.dept_id, family_staff.dept_id)
where tr.project_id = #{projectId}
and coalesce(direct_staff.id_card, statement_staff.id_card, family_staff.id_card) is not null
</sql>
<delete id="deleteByProjectId">
delete from ccdi_project_overview_employee_result
where project_id = #{projectId}
</delete>
<insert id="insertBatch" parameterType="java.util.List">
insert into ccdi_project_overview_employee_result (
project_id, staff_id_card, staff_code, staff_name, dept_id, dept_name,
rule_count, model_count, hit_count, risk_level_code, risk_point,
model_codes_csv, model_names_json, hit_rules_json, model_hit_summary_json,
create_by, create_time, update_by, update_time, remark
) values
<foreach collection="list" item="item" separator=",">
(
#{item.projectId}, #{item.staffIdCard}, #{item.staffCode}, #{item.staffName},
#{item.deptId}, #{item.deptName}, #{item.ruleCount}, #{item.modelCount},
#{item.hitCount}, #{item.riskLevelCode}, #{item.riskPoint},
#{item.modelCodesCsv}, #{item.modelNamesJson}, #{item.hitRulesJson}, #{item.modelHitSummaryJson},
#{item.createBy}, #{item.createTime}, #{item.updateBy}, #{item.updateTime}, #{item.remark}
)
</foreach>
on duplicate key update
staff_code = values(staff_code),
staff_name = values(staff_name),
dept_id = values(dept_id),
dept_name = values(dept_name),
rule_count = values(rule_count),
model_count = values(model_count),
hit_count = values(hit_count),
risk_level_code = values(risk_level_code),
risk_point = values(risk_point),
model_codes_csv = values(model_codes_csv),
model_names_json = values(model_names_json),
hit_rules_json = values(hit_rules_json),
model_hit_summary_json = values(model_hit_summary_json),
update_by = values(update_by),
update_time = values(update_time),
remark = values(remark)
</insert>
<select id="selectByProjectId" resultMap="CcdiProjectOverviewEmployeeResultMap">
select
id,
project_id,
staff_id_card,
staff_code,
staff_name,
dept_id,
dept_name,
rule_count,
model_count,
hit_count,
risk_level_code,
risk_point,
model_codes_csv,
model_names_json,
hit_rules_json,
model_hit_summary_json,
create_by,
create_time,
update_by,
update_time,
remark
from ccdi_project_overview_employee_result
where project_id = #{projectId}
order by id asc
</select>
<select id="selectEmployeeHitRowsByProjectId" resultMap="CcdiProjectOverviewEmployeeHitRowMap">
<include refid="resolvedEmployeeHitRowsSql"/>
order by staff_id_card asc, model_code asc, rule_code asc
</select>
</mapper>

View File

@@ -33,6 +33,29 @@
select="selectRiskHitTagsByScope"/> select="selectRiskHitTagsByScope"/>
</resultMap> </resultMap>
<sql id="digitTableSql">
select 0 as digit
union all select 1
union all select 2
union all select 3
union all select 4
union all select 5
union all select 6
union all select 7
union all select 8
union all select 9
</sql>
<sql id="jsonArrayIndexSql">
select ones.digit + tens.digit * 10 as idx
from (
<include refid="digitTableSql"/>
) ones
cross join (
<include refid="digitTableSql"/>
) tens
</sql>
<sql id="resolvedEmployeeRiskBaseSql"> <sql id="resolvedEmployeeRiskBaseSql">
select distinct select distinct
tr.id, tr.id,
@@ -186,14 +209,60 @@
</select> </select>
<select id="selectRiskPeopleOverviewByProjectId" resultMap="EmployeeRiskAggregateResultMap"> <select id="selectRiskPeopleOverviewByProjectId" resultMap="EmployeeRiskAggregateResultMap">
<include refid="employeeRiskAggregateSql"/> select
order by risk_level_sort asc, model_count desc, rule_count desc, staff_id_card asc result.staff_id_card,
result.staff_name,
result.dept_id,
result.dept_name,
result.rule_count,
result.model_count,
result.hit_count,
null as top_rule_code,
null as top_rule_name,
result.risk_point,
result.risk_level_code,
case
when result.risk_level_code = 'HIGH' then '高风险'
when result.risk_level_code = 'MEDIUM' then '中风险'
else '低风险'
end as risk_level_name,
case
when result.risk_level_code = 'HIGH' then 1
when result.risk_level_code = 'MEDIUM' then 2
else 3
end as risk_level_sort
from ccdi_project_overview_employee_result result
where result.project_id = #{projectId}
order by risk_level_sort asc, result.model_count desc, result.rule_count desc, result.staff_id_card asc
</select> </select>
<select id="selectTopRiskPeopleByProjectId" resultMap="EmployeeRiskAggregateResultMap"> <select id="selectTopRiskPeopleByProjectId" resultMap="EmployeeRiskAggregateResultMap">
<include refid="employeeRiskAggregateSql"/> select
where rule_count >= 2 result.staff_id_card,
order by risk_level_sort asc, model_count desc, rule_count desc, staff_id_card asc result.staff_name,
result.dept_id,
result.dept_name,
result.rule_count,
result.model_count,
result.hit_count,
null as top_rule_code,
null as top_rule_name,
result.risk_point,
result.risk_level_code,
case
when result.risk_level_code = 'HIGH' then '高风险'
when result.risk_level_code = 'MEDIUM' then '中风险'
else '低风险'
end as risk_level_name,
case
when result.risk_level_code = 'HIGH' then 1
when result.risk_level_code = 'MEDIUM' then 2
else 3
end as risk_level_sort
from ccdi_project_overview_employee_result result
where result.project_id = #{projectId}
and result.risk_level_code in ('HIGH', 'MEDIUM')
order by risk_level_sort asc, result.model_count desc, result.rule_count desc, result.staff_id_card asc
limit 10 limit 10
</select> </select>
@@ -213,13 +282,18 @@
) models ) models
left join ( left join (
select select
base.model_code, json_unquote(json_extract(result.model_hit_summary_json, concat('$[', idx.idx, '].modelCode'))) as model_code,
count(1) as warning_count, sum(cast(json_unquote(json_extract(
count(distinct base.staff_id_card) as people_count result.model_hit_summary_json,
from ( concat('$[', idx.idx, '].warningCount')
<include refid="resolvedEmployeeRiskBaseSql"/> )) as unsigned)) as warning_count,
) base count(distinct result.staff_id_card) as people_count
group by base.model_code from ccdi_project_overview_employee_result result
join (
<include refid="jsonArrayIndexSql"/>
) idx on idx.idx &lt; json_length(result.model_hit_summary_json)
where result.project_id = #{projectId}
group by json_unquote(json_extract(result.model_hit_summary_json, concat('$[', idx.idx, '].modelCode')))
) stats on models.model_code = stats.model_code ) stats on models.model_code = stats.model_code
order by warning_count desc, model_code asc order by warning_count desc, model_code asc
</select> </select>
@@ -227,73 +301,88 @@
<select id="selectRiskModelPeoplePage" resultMap="RiskModelPeopleItemResultMap"> <select id="selectRiskModelPeoplePage" resultMap="RiskModelPeopleItemResultMap">
<bind name="projectId" value="query.projectId"/> <bind name="projectId" value="query.projectId"/>
select select
base.project_id, result.project_id,
base.staff_id_card, result.staff_id_card,
max(base.staff_name) as staff_name, result.staff_name,
max(base.staff_code) as staff_code, result.staff_code,
max(dept.dept_name) as department, result.dept_name as department,
#{query.modelCodesCsv} as selected_model_codes #{query.modelCodesCsv} as selected_model_codes
from ( from ccdi_project_overview_employee_result result
<include refid="resolvedEmployeeRiskBaseSql"/>
) base
left join sys_dept dept on base.dept_id = dept.dept_id
where 1 = 1 where 1 = 1
and result.project_id = #{query.projectId}
<if test="query.modelCodes != null and query.modelCodes.size() > 0"> <if test="query.modelCodes != null and query.modelCodes.size() > 0">
and base.model_code in <choose>
<foreach collection="query.modelCodes" item="modelCode" open="(" separator="," close=")"> <when test="query.matchMode == 'ALL'">
#{modelCode} <foreach collection="query.modelCodes" item="modelCode">
</foreach> and find_in_set(#{modelCode}, result.model_codes_csv)
</foreach>
</when>
<otherwise>
and (
<foreach collection="query.modelCodes" item="modelCode" separator=" or ">
find_in_set(#{modelCode}, result.model_codes_csv)
</foreach>
)
</otherwise>
</choose>
</if> </if>
<if test="query.keyword != null and query.keyword != ''"> <if test="query.keyword != null and query.keyword != ''">
and ( and (
base.staff_name like concat('%', trim(#{query.keyword}), '%') result.staff_name like concat('%', trim(#{query.keyword}), '%')
or cast(base.staff_code as char) like concat('%', trim(#{query.keyword}), '%') or result.staff_code like concat('%', trim(#{query.keyword}), '%')
) )
</if> </if>
<if test="query.deptId != null"> <if test="query.deptId != null">
and base.dept_id = #{query.deptId} and result.dept_id = #{query.deptId}
</if> </if>
group by base.project_id, base.staff_id_card order by result.staff_name asc, result.staff_id_card asc
<if test="query.modelCodes != null and query.modelCodes.size() > 0 and query.matchMode == 'ALL'">
having count(distinct base.model_code) = #{query.modelCodesCount}
</if>
order by max(base.staff_name) asc, base.staff_id_card asc
</select> </select>
<select id="selectRiskModelNamesByScope" resultType="java.lang.String"> <select id="selectRiskModelNamesByScope" resultType="java.lang.String">
select scoped.model_name select
from ( json_unquote(json_extract(result.model_hit_summary_json, concat('$[', idx.idx, '].modelName'))) as model_name
<include refid="resolvedEmployeeRiskBaseSql"/> from ccdi_project_overview_employee_result result
) scoped join (
where scoped.project_id = #{projectId} <include refid="jsonArrayIndexSql"/>
and scoped.staff_id_card = #{staffIdCard} ) idx on idx.idx &lt; json_length(result.model_hit_summary_json)
where result.project_id = #{projectId}
and result.staff_id_card = #{staffIdCard}
<if test="selectedModelCodes != null and selectedModelCodes != ''"> <if test="selectedModelCodes != null and selectedModelCodes != ''">
and find_in_set(scoped.model_code, #{selectedModelCodes}) and find_in_set(
json_unquote(json_extract(result.model_hit_summary_json, concat('$[', idx.idx, '].modelCode'))),
#{selectedModelCodes}
)
</if> </if>
group by scoped.model_code, scoped.model_name group by
order by scoped.model_code asc json_unquote(json_extract(result.model_hit_summary_json, concat('$[', idx.idx, '].modelCode'))),
json_unquote(json_extract(result.model_hit_summary_json, concat('$[', idx.idx, '].modelName')))
order by json_unquote(json_extract(result.model_hit_summary_json, concat('$[', idx.idx, '].modelCode'))) asc
</select> </select>
<select id="selectRiskHitTagsByScope" resultType="com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskHitTagVO"> <select id="selectRiskHitTagsByScope" resultType="com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskHitTagVO">
select select
scoped.rule_code, json_unquote(json_extract(result.hit_rules_json, concat('$[', idx.idx, '].ruleCode'))) as rule_code,
max(scoped.rule_name) as rule_name, max(json_unquote(json_extract(result.hit_rules_json, concat('$[', idx.idx, '].ruleName')))) as rule_name,
max(scoped.risk_level) as risk_level max(json_unquote(json_extract(result.hit_rules_json, concat('$[', idx.idx, '].riskLevel')))) as risk_level
from ( from ccdi_project_overview_employee_result result
<include refid="resolvedEmployeeRiskBaseSql"/> join (
) scoped <include refid="jsonArrayIndexSql"/>
where scoped.project_id = #{projectId} ) idx on idx.idx &lt; json_length(result.hit_rules_json)
and scoped.staff_id_card = #{staffIdCard} where result.project_id = #{projectId}
and result.staff_id_card = #{staffIdCard}
<if test="selectedModelCodes != null and selectedModelCodes != ''"> <if test="selectedModelCodes != null and selectedModelCodes != ''">
and find_in_set(scoped.model_code, #{selectedModelCodes}) and find_in_set(
json_unquote(json_extract(result.hit_rules_json, concat('$[', idx.idx, '].modelCode'))),
#{selectedModelCodes}
)
</if> </if>
group by scoped.rule_code group by json_unquote(json_extract(result.hit_rules_json, concat('$[', idx.idx, '].ruleCode')))
order by case max(scoped.risk_level) order by case max(json_unquote(json_extract(result.hit_rules_json, concat('$[', idx.idx, '].riskLevel'))))
when 'HIGH' then 1 when 'HIGH' then 1
when 'MEDIUM' then 2 when 'MEDIUM' then 2
else 3 else 3
end, end,
scoped.rule_code asc json_unquote(json_extract(result.hit_rules_json, concat('$[', idx.idx, '].ruleCode'))) asc
</select> </select>
<select id="selectRiskCountSummaryByProjectId" resultType="map"> <select id="selectRiskCountSummaryByProjectId" resultType="map">

View File

@@ -0,0 +1,56 @@
package com.ruoyi.ccdi.project.domain.entity;
import org.junit.jupiter.api.Test;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertTrue;
class CcdiProjectOverviewEmployeeResultEntityTest {
private static final Path ENTITY_PATH = Path.of(
"src/main/java/com/ruoyi/ccdi/project/domain/entity/CcdiProjectOverviewEmployeeResult.java"
);
@Test
void entityAndSqlShouldDefineProjectOverviewEmployeeResultTable() throws Exception {
String entitySource = Files.readString(ENTITY_PATH);
String sql = Files.readString(resolveSqlPath());
String normalizedSql = sql.toLowerCase().replace("`", "");
assertAll(
() -> assertTrue(entitySource.contains("@TableName(\"ccdi_project_overview_employee_result\")")),
() -> assertTrue(entitySource.contains("private Long projectId;")),
() -> assertTrue(entitySource.contains("private String staffIdCard;")),
() -> assertTrue(entitySource.contains("private Integer ruleCount;")),
() -> assertTrue(entitySource.contains("private Integer modelCount;")),
() -> assertTrue(entitySource.contains("private Integer hitCount;")),
() -> assertTrue(entitySource.contains("private String riskLevelCode;")),
() -> assertTrue(entitySource.contains("private String modelCodesCsv;")),
() -> assertTrue(entitySource.contains("private String modelNamesJson;")),
() -> assertTrue(entitySource.contains("private String hitRulesJson;")),
() -> assertTrue(entitySource.contains("private String modelHitSummaryJson;")),
() -> assertTrue(normalizedSql.contains("create table if not exists ccdi_project_overview_employee_result")),
() -> assertTrue(normalizedSql.contains("unique key")),
() -> assertTrue(normalizedSql.contains("project_id")),
() -> assertTrue(normalizedSql.contains("staff_id_card")),
() -> assertTrue(normalizedSql.contains("rule_count")),
() -> assertTrue(normalizedSql.contains("model_count")),
() -> assertTrue(normalizedSql.contains("hit_count")),
() -> assertTrue(normalizedSql.contains("risk_level_code")),
() -> assertTrue(normalizedSql.contains("model_codes_csv")),
() -> assertTrue(normalizedSql.contains("model_names_json")),
() -> assertTrue(normalizedSql.contains("hit_rules_json")),
() -> assertTrue(normalizedSql.contains("model_hit_summary_json"))
);
}
private Path resolveSqlPath() {
Path moduleRelative = Path.of("../sql/migration/2026-03-20-create-project-overview-employee-result-table.sql");
if (Files.exists(moduleRelative)) {
return moduleRelative;
}
return Path.of("sql/migration/2026-03-20-create-project-overview-employee-result-table.sql");
}
}

View File

@@ -132,6 +132,17 @@ class CcdiBankStatementMapperXmlTest {
} }
} }
@Test
void insertBatch_shouldAvoidUpdatingAutoIncrementPrimaryKeyInDuplicateBranch() throws Exception {
try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream(RESOURCE)) {
String xml = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
assertTrue(xml.contains("on duplicate key update"), xml);
assertTrue(xml.contains("batch_id = batch_id"), xml);
assertFalse(xml.contains("bank_statement_id = bank_statement_id"), xml);
}
}
private MappedStatement loadMappedStatement(String statementId) throws Exception { private MappedStatement loadMappedStatement(String statementId) throws Exception {
Configuration configuration = new Configuration(); Configuration configuration = new Configuration();
configuration.setEnvironment(new Environment("test", new JdbcTransactionFactory(), new NoOpDataSource())); configuration.setEnvironment(new Environment("test", new JdbcTransactionFactory(), new NoOpDataSource()));

View File

@@ -11,6 +11,7 @@ import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -27,6 +28,15 @@ class CcdiBankTagAnalysisMapperXmlTest {
"selectStockTfrLargeStatements", "selectStockTfrLargeStatements",
"selectLargeStockTradingStatements" "selectLargeStockTradingStatements"
); );
private static final List<String> PHASE_TWO_OBJECT_SELECT_IDS = List.of(
"selectLowIncomeRelativeLargeTransactionObjects",
"selectMultiPartyGamblingTransferObjects",
"selectMonthlyFixedIncomeObjects",
"selectFixedCounterpartyTransferObjects",
"selectSupplierConcentrationObjects",
"selectSalaryQuickTransferObjects",
"selectSalaryUnusedObjects"
);
private static final List<String> PLACEHOLDER_SELECT_IDS = List.of( private static final List<String> PLACEHOLDER_SELECT_IDS = List.of(
"selectAbnormalCustomerTransactionStatements", "selectAbnormalCustomerTransactionStatements",
"selectLowIncomeRelativeLargeTransactionObjects", "selectLowIncomeRelativeLargeTransactionObjects",
@@ -90,7 +100,7 @@ class CcdiBankTagAnalysisMapperXmlTest {
void placeholderRules_shouldUseEmptyResultSqlTemplate() throws Exception { void placeholderRules_shouldUseEmptyResultSqlTemplate() throws Exception {
String xml = readXml(RESOURCE); String xml = readXml(RESOURCE);
assertTrue(xml.contains("占位SQL待补充真实规则")); assertTrue(xml.contains("占位SQL待补充真实规则"));
assertEquals(16, countMatches(xml, "where 1 = 0")); assertEquals(6, countMatches(xml, "where 1 = 0"));
} }
@Test @Test
@@ -116,6 +126,29 @@ class CcdiBankTagAnalysisMapperXmlTest {
assertTrue(!selectSql.contains("where 1 = 0")); assertTrue(!selectSql.contains("where 1 = 0"));
} }
@Test
void phaseTwoObjectRules_shouldUseRealSqlAndKeepObjectHitFields() throws Exception {
String xml = readXml(RESOURCE);
for (String selectId : PHASE_TWO_OBJECT_SELECT_IDS) {
String selectSql = extractSelectSql(xml, selectId);
assertTrue(selectSql.contains("'STAFF_ID_CARD' AS objectType"), () -> selectId + " 缺少 objectType");
assertTrue(selectSql.contains("AS objectKey"), () -> selectId + " 缺少 objectKey");
assertTrue(selectSql.contains("reasonDetail"), () -> selectId + " 缺少 reasonDetail");
assertTrue(!selectSql.contains("where 1 = 0"), () -> selectId + " 仍是占位 SQL");
}
}
@Test
void assetRegistrationMismatchRules_shouldUseRealSqlAndAssetTable() throws Exception {
String xml = readXml(RESOURCE);
assertAll(
() -> assertStatementRuleUsesAssetTable(xml, "selectHouseRegistrationMismatchStatements"),
() -> assertStatementRuleUsesAssetTable(xml, "selectPropertyFeeRegistrationMismatchStatements"),
() -> assertStatementRuleUsesAssetTable(xml, "selectTaxAssetRegistrationMismatchStatements")
);
}
@Test @Test
void analysisMapperXml_shouldBeWellFormed() throws Exception { void analysisMapperXml_shouldBeWellFormed() throws Exception {
String xml = readXml(RESOURCE); String xml = readXml(RESOURCE);
@@ -149,4 +182,14 @@ class CcdiBankTagAnalysisMapperXmlTest {
assertTrue(matcher.find(), () -> "未找到 select: " + selectId); assertTrue(matcher.find(), () -> "未找到 select: " + selectId);
return matcher.group(); return matcher.group();
} }
private void assertStatementRuleUsesAssetTable(String xml, String selectId) {
String selectSql = extractSelectSql(xml, selectId);
assertTrue(selectSql.contains("ccdi_asset_info"), () -> selectId + " 缺少 ccdi_asset_info");
assertTrue(selectSql.contains("AS bankStatementId"), () -> selectId + " 缺少 bankStatementId");
assertTrue(selectSql.contains("AS groupId"), () -> selectId + " 缺少 groupId");
assertTrue(selectSql.contains("AS logId"), () -> selectId + " 缺少 logId");
assertTrue(selectSql.contains("reasonDetail"), () -> selectId + " 缺少 reasonDetail");
assertTrue(!selectSql.contains("where 1 = 0"), () -> selectId + " 仍是占位 SQL");
}
} }

View File

@@ -0,0 +1,36 @@
package com.ruoyi.ccdi.project.mapper;
import org.junit.jupiter.api.Test;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertTrue;
class CcdiProjectOverviewEmployeeResultMapperXmlTest {
private static final Path MAPPER_PATH = Path.of(
"src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewEmployeeResultMapper.java"
);
private static final Path XML_PATH = Path.of(
"src/main/resources/mapper/ccdi/project/CcdiProjectOverviewEmployeeResultMapper.xml"
);
@Test
void mapperAndXmlShouldExposeMinimumCrudStatements() throws Exception {
String mapperSource = Files.readString(MAPPER_PATH);
String xml = Files.readString(XML_PATH);
assertAll(
() -> assertTrue(mapperSource.contains("interface CcdiProjectOverviewEmployeeResultMapper")),
() -> assertTrue(mapperSource.contains("deleteByProjectId")),
() -> assertTrue(mapperSource.contains("insertBatch")),
() -> assertTrue(mapperSource.contains("selectByProjectId")),
() -> assertTrue(xml.contains("delete id=\"deleteByProjectId\"")),
() -> assertTrue(xml.contains("insert id=\"insertBatch\"")),
() -> assertTrue(xml.contains("select id=\"selectByProjectId\"")),
() -> assertTrue(xml.contains("ccdi_project_overview_employee_result"))
);
}
}

View File

@@ -17,18 +17,19 @@ class CcdiProjectOverviewMapperRiskModelCardsTest {
} }
@Test @Test
void shouldDefineRiskModelCardsSqlUsingEmployeeResolvedBase() throws Exception { void shouldDefineRiskModelCardsSqlUsingEmployeeResultSnapshot() throws Exception {
String xml = Files.readString(Path.of("src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml")); String xml = Files.readString(Path.of("src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml"));
assertTrue(xml.contains("<select id=\"selectRiskModelCardsByProjectId\"")); assertTrue(xml.contains("<select id=\"selectRiskModelCardsByProjectId\""));
assertTrue(xml.contains("from (")); assertTrue(xml.contains("from ccdi_project_overview_employee_result"));
assertTrue(xml.contains("from ccdi_bank_tag_rule")); assertTrue(xml.contains("from ccdi_bank_tag_rule"));
assertTrue(xml.contains("where enabled = 1")); assertTrue(xml.contains("where enabled = 1"));
assertTrue(xml.contains("left join (")); assertTrue(xml.contains("left join ("));
assertTrue(xml.contains("<include refid=\"resolvedEmployeeRiskBaseSql\"/>")); assertTrue(xml.contains("model_hit_summary_json"));
assertTrue(xml.contains("json_extract("));
assertTrue(xml.contains("coalesce(stats.warning_count, 0) as warning_count")); assertTrue(xml.contains("coalesce(stats.warning_count, 0) as warning_count"));
assertTrue(xml.contains("coalesce(stats.people_count, 0) as people_count")); assertTrue(xml.contains("coalesce(stats.people_count, 0) as people_count"));
assertTrue(xml.contains("count(1) as warning_count")); assertTrue(xml.contains(".warningCount"));
assertTrue(xml.contains("order by warning_count desc, model_code asc")); assertTrue(xml.contains("order by warning_count desc, model_code asc"));
} }
} }

View File

@@ -34,18 +34,19 @@ class CcdiProjectOverviewMapperRiskModelPeopleTest {
assertTrue(xml.contains("query.modelCodes != null and query.modelCodes.size() > 0")); assertTrue(xml.contains("query.modelCodes != null and query.modelCodes.size() > 0"));
assertTrue(xml.contains("query.matchMode == 'ALL'")); assertTrue(xml.contains("query.matchMode == 'ALL'"));
assertFalse(xml.contains("#{query.modelCodes.size}")); assertFalse(xml.contains("#{query.modelCodes.size}"));
assertTrue(xml.contains("count(distinct base.model_code) = #{query.modelCodesCount}")); assertTrue(xml.contains("find_in_set(#{modelCode}, result.model_codes_csv)"));
assertTrue(xml.contains("<bind name=\"projectId\" value=\"query.projectId\"/>")); assertTrue(xml.contains("<bind name=\"projectId\" value=\"query.projectId\"/>"));
assertTrue(xml.contains("base.staff_name like concat('%', trim(#{query.keyword}), '%')")); assertTrue(xml.contains("result.staff_name like concat('%', trim(#{query.keyword}), '%')"));
assertTrue(xml.contains("cast(base.staff_code as char) like concat('%', trim(#{query.keyword}), '%')")); assertTrue(xml.contains("result.staff_code like concat('%', trim(#{query.keyword}), '%')"));
assertTrue(xml.contains("base.dept_id = #{query.deptId}")); assertTrue(xml.contains("result.dept_id = #{query.deptId}"));
assertTrue(xml.contains("select=\"selectRiskModelNamesByScope\"")); assertTrue(xml.contains("select=\"selectRiskModelNamesByScope\""));
assertTrue(xml.contains("select=\"selectRiskHitTagsByScope\"")); assertTrue(xml.contains("select=\"selectRiskHitTagsByScope\""));
assertTrue(xml.contains("find_in_set(scoped.model_code, #{selectedModelCodes})")); assertTrue(xml.contains("model_hit_summary_json"));
assertFalse(xml.contains("select distinct scoped.model_name")); assertTrue(xml.contains("hit_rules_json"));
assertTrue(xml.contains("group by scoped.model_code, scoped.model_name")); assertTrue(xml.contains("json_extract("));
assertTrue(xml.contains("order by scoped.model_code asc")); assertTrue(xml.contains(".modelCode"));
assertTrue(xml.contains("order by case max(scoped.risk_level)")); assertTrue(xml.contains(".modelName"));
assertTrue(xml.contains("scoped.rule_code asc")); assertTrue(xml.contains(".ruleCode"));
assertTrue(xml.contains(".riskLevel"));
} }
} }

View File

@@ -11,18 +11,30 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
class CcdiProjectOverviewMapperSqlTest { class CcdiProjectOverviewMapperSqlTest {
@Test @Test
void shouldContainEmployeeRiskAggregationSql() throws Exception { void shouldReadOverviewQueriesFromEmployeeResultTable() throws Exception {
String xml = Files.readString(Path.of("src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml")); String xml = Files.readString(Path.of("src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml"));
String riskPeopleSql = extractSelect(xml, "selectRiskPeopleOverviewByProjectId");
String topRiskPeopleSql = extractSelect(xml, "selectTopRiskPeopleByProjectId");
String riskModelCardsSql = extractSelect(xml, "selectRiskModelCardsByProjectId");
String riskModelPeopleSql = extractSelect(xml, "selectRiskModelPeoplePage");
assertTrue(xml.contains("count(distinct base.rule_code)")); assertTrue(riskPeopleSql.contains("from ccdi_project_overview_employee_result"));
assertTrue(xml.contains("count(distinct base.model_code)")); assertTrue(riskPeopleSql.contains("risk_level_code"));
assertTrue(xml.contains("count(1) as hit_count")); assertTrue(riskPeopleSql.contains("model_count"));
assertTrue(xml.contains("agg.hit_count")); assertTrue(riskPeopleSql.contains("risk_point"));
assertTrue(xml.contains("when agg.rule_count >= 5 then 'HIGH'")); assertFalse(riskPeopleSql.contains("resolvedEmployeeRiskBaseSql"));
assertTrue(xml.contains("when agg.rule_count between 2 and 4 then 'MEDIUM'"));
assertTrue(xml.contains("group_concat(")); assertTrue(topRiskPeopleSql.contains("from ccdi_project_overview_employee_result"));
assertTrue(xml.contains("as risk_point")); assertTrue(topRiskPeopleSql.contains("risk_level_code in ('HIGH', 'MEDIUM')"));
assertTrue(xml.contains("order by grouped.hit_count desc, grouped.rule_code asc")); assertFalse(topRiskPeopleSql.contains("resolvedEmployeeRiskBaseSql"));
assertTrue(riskModelCardsSql.contains("from ccdi_project_overview_employee_result"));
assertTrue(riskModelCardsSql.contains("model_hit_summary_json"));
assertFalse(riskModelCardsSql.contains("resolvedEmployeeRiskBaseSql"));
assertTrue(riskModelPeopleSql.contains("from ccdi_project_overview_employee_result"));
assertTrue(riskModelPeopleSql.contains("model_codes_csv"));
assertFalse(riskModelPeopleSql.contains("resolvedEmployeeRiskBaseSql"));
} }
@Test @Test
@@ -30,6 +42,13 @@ class CcdiProjectOverviewMapperSqlTest {
String xml = Files.readString(Path.of("src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml")); String xml = Files.readString(Path.of("src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml"));
assertFalse(xml.contains("row_number() over"), xml); assertFalse(xml.contains("row_number() over"), xml);
assertTrue(xml.contains("not exists"), xml); assertFalse(xml.contains("json_table("), xml);
}
private String extractSelect(String xml, String selectId) {
String start = "<select id=\"" + selectId + "\"";
int startIndex = xml.indexOf(start);
int endIndex = xml.indexOf("</select>", startIndex);
return xml.substring(startIndex, endIndex);
} }
} }

View File

@@ -13,6 +13,7 @@ class CcdiProjectOverviewServiceStructureTest {
assertNotNull(clazz.getMethod("getDashboard", Long.class)); assertNotNull(clazz.getMethod("getDashboard", Long.class));
assertNotNull(clazz.getMethod("getRiskPeopleOverview", Long.class)); assertNotNull(clazz.getMethod("getRiskPeopleOverview", Long.class));
assertNotNull(clazz.getMethod("getTopRiskPeople", Long.class)); assertNotNull(clazz.getMethod("getTopRiskPeople", Long.class));
assertNotNull(clazz.getMethod("refreshOverviewEmployeeResults", Long.class, String.class));
assertNotNull(clazz.getMethod("refreshProjectRiskCounts", Long.class, String.class)); assertNotNull(clazz.getMethod("refreshProjectRiskCounts", Long.class, String.class));
} }
} }

View File

@@ -19,6 +19,7 @@ import org.slf4j.LoggerFactory;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@@ -163,6 +164,52 @@ class BankTagRuleConfigResolverTest {
assertRuleHasNoThresholds("SUSPICIOUS_PURCHASE", "LARGE_PURCHASE_TRANSACTION"); assertRuleHasNoThresholds("SUSPICIOUS_PURCHASE", "LARGE_PURCHASE_TRANSACTION");
} }
@Test
void resolve_shouldSupportPhaseTwoThresholdRulesAndKeepNoParamRulesEmpty() {
CcdiProject project = new CcdiProject();
project.setProjectId(40L);
project.setConfigType("default");
when(projectMapper.selectById(40L)).thenReturn(project);
when(modelParamMapper.selectByProjectAndModel(0L, "SUSPICIOUS_GAMBLING")).thenReturn(List.of(
buildParam("SUSPICIOUS_GAMBLING", "MULTI_PARTY_AMT_MIN", "500"),
buildParam("SUSPICIOUS_GAMBLING", "MULTI_PARTY_AMT_MAX", "5000")
));
when(modelParamMapper.selectByProjectAndModel(0L, "SUSPICIOUS_PART_TIME")).thenReturn(List.of(
buildParam("SUSPICIOUS_PART_TIME", "MONTHLY_FIXED_INCOME", "5000"),
buildParam("SUSPICIOUS_PART_TIME", "FIXED_COUNTERPARTY_TRANSFER_MIN", "3000"),
buildParam("SUSPICIOUS_PART_TIME", "FIXED_COUNTERPARTY_TRANSFER_MAX", "15000")
));
when(modelParamMapper.selectByProjectAndModel(0L, "ABNORMAL_TRANSACTION")).thenReturn(List.of(
buildParam("ABNORMAL_TRANSACTION", "IGNORED_PARAM", "999")
));
when(modelParamMapper.selectByProjectAndModel(0L, "SUSPICIOUS_PURCHASE")).thenReturn(List.of(
buildParam("SUSPICIOUS_PURCHASE", "IGNORED_PARAM", "999")
));
when(modelParamMapper.selectByProjectAndModel(0L, "SUSPICIOUS_PROPERTY")).thenReturn(List.of(
buildParam("SUSPICIOUS_PROPERTY", "IGNORED_PARAM", "999")
));
when(modelParamMapper.selectByProjectAndModel(0L, "ABNORMAL_BEHAVIOR")).thenReturn(List.of(
buildParam("ABNORMAL_BEHAVIOR", "IGNORED_PARAM", "999")
));
assertAll(
() -> assertRuleThresholds("SUSPICIOUS_GAMBLING", "MULTI_PARTY_GAMBLING_TRANSFER",
Map.of("MULTI_PARTY_AMT_MIN", "500", "MULTI_PARTY_AMT_MAX", "5000")),
() -> assertRuleThresholds("SUSPICIOUS_PART_TIME", "MONTHLY_FIXED_INCOME",
Map.of("MONTHLY_FIXED_INCOME", "5000")),
() -> assertRuleThresholds("SUSPICIOUS_PART_TIME", "FIXED_COUNTERPARTY_TRANSFER",
Map.of("FIXED_COUNTERPARTY_TRANSFER_MIN", "3000", "FIXED_COUNTERPARTY_TRANSFER_MAX", "15000")),
() -> assertRuleHasNoThresholds("ABNORMAL_TRANSACTION", "LOW_INCOME_RELATIVE_LARGE_TRANSACTION"),
() -> assertRuleHasNoThresholds("SUSPICIOUS_PURCHASE", "SUPPLIER_CONCENTRATION"),
() -> assertRuleHasNoThresholds("SUSPICIOUS_PROPERTY", "HOUSE_REGISTRATION_MISMATCH"),
() -> assertRuleHasNoThresholds("SUSPICIOUS_PROPERTY", "PROPERTY_FEE_REGISTRATION_MISMATCH"),
() -> assertRuleHasNoThresholds("SUSPICIOUS_PROPERTY", "TAX_ASSET_REGISTRATION_MISMATCH"),
() -> assertRuleHasNoThresholds("ABNORMAL_BEHAVIOR", "SALARY_QUICK_TRANSFER"),
() -> assertRuleHasNoThresholds("ABNORMAL_BEHAVIOR", "SALARY_UNUSED")
);
}
private CcdiModelParam buildParam(String paramCode, String paramValue) { private CcdiModelParam buildParam(String paramCode, String paramValue) {
CcdiModelParam param = new CcdiModelParam(); CcdiModelParam param = new CcdiModelParam();
param.setProjectId(0L); param.setProjectId(0L);
@@ -201,6 +248,16 @@ class BankTagRuleConfigResolverTest {
assertTrue(config.getThresholdValues().isEmpty()); assertTrue(config.getThresholdValues().isEmpty());
} }
private void assertRuleThresholds(String modelCode, String ruleCode, Map<String, String> expectedThresholds) {
CcdiBankTagRule ruleMeta = new CcdiBankTagRule();
ruleMeta.setModelCode(modelCode);
ruleMeta.setRuleCode(ruleCode);
BankTagRuleExecutionConfig config = resolver.resolve(40L, ruleMeta);
assertEquals(expectedThresholds, config.getThresholdValues());
}
private CcdiModelParam buildParam(String modelCode, String paramCode, String paramValue) { private CcdiModelParam buildParam(String modelCode, String paramCode, String paramValue) {
CcdiModelParam param = new CcdiModelParam(); CcdiModelParam param = new CcdiModelParam();
param.setProjectId(0L); param.setProjectId(0L);

View File

@@ -27,6 +27,7 @@ import org.springframework.test.util.ReflectionTestUtils;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.math.BigDecimal;
import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -327,6 +328,84 @@ class CcdiBankTagServiceImplTest {
verify(projectService).updateProjectStatus(40L, "0", "tester"); verify(projectService).updateProjectStatus(40L, "0", "tester");
} }
@Test
void rebuildProject_shouldDispatchPhaseTwoThresholdObjectRulesWithResolvedThresholds() {
ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run);
CcdiBankTagRule gamblingRule = buildRule("SUSPICIOUS_GAMBLING", "疑似赌博",
"MULTI_PARTY_GAMBLING_TRANSFER", "疑似赌博交易", "OBJECT");
CcdiBankTagRule monthlyRule = buildRule("SUSPICIOUS_PART_TIME", "可疑兼职",
"MONTHLY_FIXED_INCOME", "疑似兼职", "OBJECT");
CcdiBankTagRule fixedCounterpartyRule = buildRule("SUSPICIOUS_PART_TIME", "可疑兼职",
"FIXED_COUNTERPARTY_TRANSFER", "疑似兼职", "OBJECT");
BankTagRuleExecutionConfig gamblingConfig = buildConfig(40L, gamblingRule);
gamblingConfig.setThresholdValues(Map.of("MULTI_PARTY_AMT_MIN", "500", "MULTI_PARTY_AMT_MAX", "5000"));
BankTagRuleExecutionConfig monthlyConfig = buildConfig(40L, monthlyRule);
monthlyConfig.setThresholdValues(Map.of("MONTHLY_FIXED_INCOME", "5000"));
BankTagRuleExecutionConfig fixedCounterpartyConfig = buildConfig(40L, fixedCounterpartyRule);
fixedCounterpartyConfig.setThresholdValues(Map.of(
"FIXED_COUNTERPARTY_TRANSFER_MIN", "3000",
"FIXED_COUNTERPARTY_TRANSFER_MAX", "15000"
));
when(ruleMapper.selectEnabledRules("SUSPICIOUS_GAMBLING")).thenReturn(List.of(gamblingRule));
when(configResolver.resolve(40L, gamblingRule)).thenReturn(gamblingConfig);
when(analysisMapper.selectMultiPartyGamblingTransferObjects(40L, new BigDecimal("500"), new BigDecimal("5000")))
.thenReturn(List.of());
when(ruleMapper.selectEnabledRules("SUSPICIOUS_PART_TIME")).thenReturn(List.of(monthlyRule, fixedCounterpartyRule));
when(configResolver.resolve(40L, monthlyRule)).thenReturn(monthlyConfig);
when(configResolver.resolve(40L, fixedCounterpartyRule)).thenReturn(fixedCounterpartyConfig);
when(analysisMapper.selectMonthlyFixedIncomeObjects(40L, new BigDecimal("5000"))).thenReturn(List.of());
when(analysisMapper.selectFixedCounterpartyTransferObjects(40L, new BigDecimal("3000"), new BigDecimal("15000")))
.thenReturn(List.of());
service.rebuildProject(40L, "SUSPICIOUS_GAMBLING", "admin", TriggerType.MANUAL);
service.rebuildProject(40L, "SUSPICIOUS_PART_TIME", "admin", TriggerType.MANUAL);
verify(analysisMapper).selectMultiPartyGamblingTransferObjects(40L, new BigDecimal("500"), new BigDecimal("5000"));
verify(analysisMapper).selectMonthlyFixedIncomeObjects(40L, new BigDecimal("5000"));
verify(analysisMapper).selectFixedCounterpartyTransferObjects(40L, new BigDecimal("3000"), new BigDecimal("15000"));
}
@Test
void rebuildProject_shouldDispatchPhaseTwoObjectRulesWithoutExtraThresholds() {
ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run);
CcdiBankTagRule lowIncomeRule = buildRule("ABNORMAL_TRANSACTION", "异常交易",
"LOW_INCOME_RELATIVE_LARGE_TRANSACTION", "低收入亲属大额交易", "OBJECT");
CcdiBankTagRule supplierRule = buildRule("SUSPICIOUS_PURCHASE", "可疑采购",
"SUPPLIER_CONCENTRATION", "可疑采购", "OBJECT");
CcdiBankTagRule salaryQuickRule = buildRule("ABNORMAL_BEHAVIOR", "异常行为",
"SALARY_QUICK_TRANSFER", "工资快速转出", "OBJECT");
CcdiBankTagRule salaryUnusedRule = buildRule("ABNORMAL_BEHAVIOR", "异常行为",
"SALARY_UNUSED", "工资无使用记录", "OBJECT");
when(ruleMapper.selectEnabledRules("ABNORMAL_TRANSACTION")).thenReturn(List.of(lowIncomeRule));
when(configResolver.resolve(40L, lowIncomeRule)).thenReturn(buildConfig(40L, lowIncomeRule));
when(analysisMapper.selectLowIncomeRelativeLargeTransactionObjects(40L)).thenReturn(List.of());
when(ruleMapper.selectEnabledRules("SUSPICIOUS_PURCHASE")).thenReturn(List.of(supplierRule));
when(configResolver.resolve(40L, supplierRule)).thenReturn(buildConfig(40L, supplierRule));
when(analysisMapper.selectSupplierConcentrationObjects(40L)).thenReturn(List.of());
when(ruleMapper.selectEnabledRules("ABNORMAL_BEHAVIOR")).thenReturn(List.of(salaryQuickRule, salaryUnusedRule));
when(configResolver.resolve(40L, salaryQuickRule)).thenReturn(buildConfig(40L, salaryQuickRule));
when(configResolver.resolve(40L, salaryUnusedRule)).thenReturn(buildConfig(40L, salaryUnusedRule));
when(analysisMapper.selectSalaryQuickTransferObjects(40L)).thenReturn(List.of());
when(analysisMapper.selectSalaryUnusedObjects(40L)).thenReturn(List.of());
service.rebuildProject(40L, "ABNORMAL_TRANSACTION", "admin", TriggerType.MANUAL);
service.rebuildProject(40L, "SUSPICIOUS_PURCHASE", "admin", TriggerType.MANUAL);
service.rebuildProject(40L, "ABNORMAL_BEHAVIOR", "admin", TriggerType.MANUAL);
verify(analysisMapper).selectLowIncomeRelativeLargeTransactionObjects(40L);
verify(analysisMapper).selectSupplierConcentrationObjects(40L);
verify(analysisMapper).selectSalaryQuickTransferObjects(40L);
verify(analysisMapper).selectSalaryUnusedObjects(40L);
}
private CcdiBankTagRule buildRule(String modelCode, String modelName, String ruleCode, String ruleName, String resultType) { private CcdiBankTagRule buildRule(String modelCode, String modelName, String ruleCode, String ruleName, String resultType) {
CcdiBankTagRule rule = new CcdiBankTagRule(); CcdiBankTagRule rule = new CcdiBankTagRule();
rule.setModelCode(modelCode); rule.setModelCode(modelCode);

View File

@@ -62,7 +62,7 @@ class CcdiBankTagServiceRiskCountRefreshTest {
private ProjectBankTagRebuildCoordinator coordinator; private ProjectBankTagRebuildCoordinator coordinator;
@Test @Test
void shouldRefreshProjectRiskCountsAfterTagRebuildSuccess() { void shouldRefreshOverviewEmployeeResultsAfterTagRebuildSuccess() {
ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run); ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run);
CcdiBankTagRule rule = buildRule(); CcdiBankTagRule rule = buildRule();
@@ -84,13 +84,13 @@ class CcdiBankTagServiceRiskCountRefreshTest {
inOrder.verify(projectService).updateProjectStatus(40L, "3", "tester"); inOrder.verify(projectService).updateProjectStatus(40L, "3", "tester");
inOrder.verify(resultMapper).deleteByProjectAndModel(40L, null); inOrder.verify(resultMapper).deleteByProjectAndModel(40L, null);
inOrder.verify(resultMapper).insertBatch(anyList()); inOrder.verify(resultMapper).insertBatch(anyList());
inOrder.verify(projectOverviewService).refreshProjectRiskCounts(40L, "tester"); inOrder.verify(projectOverviewService).refreshOverviewEmployeeResults(40L, "tester");
inOrder.verify(taskMapper).updateTask(argThat(task -> "SUCCESS".equals(task.getStatus()))); inOrder.verify(taskMapper).updateTask(argThat(task -> "SUCCESS".equals(task.getStatus())));
inOrder.verify(projectService).updateProjectStatus(40L, "1", "tester"); inOrder.verify(projectService).updateProjectStatus(40L, "1", "tester");
} }
@Test @Test
void shouldFailTaskWhenRiskCountRefreshFails() { void shouldFailTaskWhenOverviewEmployeeResultRefreshFails() {
ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run); ReflectionTestUtils.setField(service, "tagRuleExecutor", (Executor) Runnable::run);
CcdiBankTagRule rule = buildRule(); CcdiBankTagRule rule = buildRule();
@@ -105,7 +105,7 @@ class CcdiBankTagServiceRiskCountRefreshTest {
when(configResolver.resolve(40L, rule)).thenReturn(config); when(configResolver.resolve(40L, rule)).thenReturn(config);
when(analysisMapper.selectHouseOrCarExpenseStatements(40L)).thenReturn(List.of(buildHit())); when(analysisMapper.selectHouseOrCarExpenseStatements(40L)).thenReturn(List.of(buildHit()));
doThrow(new RuntimeException("refresh failed")) doThrow(new RuntimeException("refresh failed"))
.when(projectOverviewService).refreshProjectRiskCounts(40L, "tester"); .when(projectOverviewService).refreshOverviewEmployeeResults(40L, "tester");
assertThrows(RuntimeException.class, assertThrows(RuntimeException.class,
() -> service.rebuildProject(40L, null, "tester", TriggerType.MANUAL)); () -> service.rebuildProject(40L, null, "tester", TriggerType.MANUAL));

View File

@@ -0,0 +1,118 @@
package com.ruoyi.ccdi.project.service.impl;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.ruoyi.ccdi.project.domain.entity.CcdiProjectOverviewEmployeeResult;
import org.junit.jupiter.api.Test;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
class CcdiProjectOverviewEmployeeResultBuilderTest {
@Test
@SuppressWarnings("unchecked")
void shouldAggregateHitRowsIntoEmployeeResultSnapshots() throws Exception {
Class<?> rowClass = Class.forName("com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewEmployeeHitRowVO");
Class<?> builderClass = Class.forName("com.ruoyi.ccdi.project.service.impl.CcdiProjectOverviewEmployeeResultBuilder");
Object builder = builderClass.getDeclaredConstructor().newInstance();
Method buildMethod = builderClass.getMethod("build", Long.class, List.class, String.class);
List<Object> hitRows = List.of(
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
"LARGE_TRANSACTION", "大额交易", "HOUSE_OR_CAR_EXPENSE", "房车消费支出交易", "HIGH"),
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
"LARGE_TRANSACTION", "大额交易", "HOUSE_OR_CAR_EXPENSE", "房车消费支出交易", "HIGH"),
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
"LARGE_TRANSACTION", "大额交易", "TAX_EXPENSE", "税务支出交易", "HIGH"),
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
"ABNORMAL_TRANSACTION", "异常交易", "ABNORMAL_CUSTOMER_TRANSACTION", "异常客户交易", "HIGH"),
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
"ABNORMAL_TRANSACTION", "异常交易", "ABNORMAL_CUSTOMER_TRANSACTION", "异常客户交易", "HIGH"),
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
"SUSPICIOUS_PART_TIME", "可疑兼职", "MONTHLY_FIXED_INCOME", "疑似兼职", "MEDIUM"),
buildHitRow(rowClass, "330000000000000001", "李四", "E1001", 12L, "信息二部",
"SUSPICIOUS_PROPERTY", "可疑财产", "HOUSE_REGISTRATION_MISMATCH", "房产登记不匹配", "LOW")
);
List<CcdiProjectOverviewEmployeeResult> results =
(List<CcdiProjectOverviewEmployeeResult>) buildMethod.invoke(builder, 40L, hitRows, "tester");
assertEquals(1, results.size());
CcdiProjectOverviewEmployeeResult result = results.getFirst();
assertEquals(40L, result.getProjectId());
assertEquals("330000000000000001", result.getStaffIdCard());
assertEquals("李四", result.getStaffName());
assertEquals("E1001", result.getStaffCode());
assertEquals(12L, result.getDeptId());
assertEquals("信息二部", result.getDeptName());
assertEquals(5, result.getRuleCount());
assertEquals(4, result.getModelCount());
assertEquals(7, result.getHitCount());
assertEquals("HIGH", result.getRiskLevelCode());
assertEquals("ABNORMAL_TRANSACTION,LARGE_TRANSACTION,SUSPICIOUS_PART_TIME,SUSPICIOUS_PROPERTY",
result.getModelCodesCsv());
assertNotNull(result.getRiskPoint());
JSONArray modelNames = JSON.parseArray(result.getModelNamesJson());
assertEquals(List.of("异常交易", "大额交易", "可疑兼职", "可疑财产"),
modelNames.toList(String.class));
JSONArray hitRules = JSON.parseArray(result.getHitRulesJson());
assertEquals(5, hitRules.size());
JSONObject firstRule = hitRules.getJSONObject(0);
assertEquals("ABNORMAL_CUSTOMER_TRANSACTION", firstRule.getString("ruleCode"));
assertEquals("异常客户交易", firstRule.getString("ruleName"));
assertEquals("HIGH", firstRule.getString("riskLevel"));
assertEquals(2, firstRule.getIntValue("warningCount"));
JSONArray modelSummary = JSON.parseArray(result.getModelHitSummaryJson());
Map<String, Integer> warningCountByModel = modelSummary.toList(JSONObject.class).stream()
.collect(java.util.stream.Collectors.toMap(
item -> item.getString("modelCode"),
item -> item.getIntValue("warningCount")
));
assertEquals(2, warningCountByModel.get("ABNORMAL_TRANSACTION"));
assertEquals(3, warningCountByModel.get("LARGE_TRANSACTION"));
assertEquals(1, warningCountByModel.get("SUSPICIOUS_PROPERTY"));
assertEquals(1, warningCountByModel.get("SUSPICIOUS_PART_TIME"));
}
private Object buildHitRow(Class<?> rowClass,
String staffIdCard,
String staffName,
String staffCode,
Long deptId,
String deptName,
String modelCode,
String modelName,
String ruleCode,
String ruleName,
String riskLevel) throws Exception {
Object row = rowClass.getDeclaredConstructor().newInstance();
setField(rowClass, row, "setProjectId", Long.class, 40L);
setField(rowClass, row, "setStaffIdCard", String.class, staffIdCard);
setField(rowClass, row, "setStaffName", String.class, staffName);
setField(rowClass, row, "setStaffCode", String.class, staffCode);
setField(rowClass, row, "setDeptId", Long.class, deptId);
setField(rowClass, row, "setDeptName", String.class, deptName);
setField(rowClass, row, "setModelCode", String.class, modelCode);
setField(rowClass, row, "setModelName", String.class, modelName);
setField(rowClass, row, "setRuleCode", String.class, ruleCode);
setField(rowClass, row, "setRuleName", String.class, ruleName);
setField(rowClass, row, "setRiskLevel", String.class, riskLevel);
return row;
}
private void setField(Class<?> rowClass, Object row, String methodName, Class<?> parameterType, Object value)
throws Exception {
Method method = rowClass.getMethod(methodName, parameterType);
method.invoke(row, value);
}
}

View File

@@ -3,8 +3,10 @@ package com.ruoyi.ccdi.project.service.impl;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.ruoyi.ccdi.project.domain.CcdiProject; import com.ruoyi.ccdi.project.domain.CcdiProject;
import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskModelPeopleQueryDTO; import com.ruoyi.ccdi.project.domain.dto.CcdiProjectRiskModelPeopleQueryDTO;
import com.ruoyi.ccdi.project.domain.entity.CcdiProjectOverviewEmployeeResult;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeRiskAggregateVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectEmployeeRiskAggregateVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewDashboardVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewDashboardVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectOverviewEmployeeHitRowVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelCardVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelCardVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelCardsVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelCardsVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelPeopleItemVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelPeopleItemVO;
@@ -12,6 +14,7 @@ import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskModelPeopleVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskPeopleOverviewVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectRiskPeopleOverviewVO;
import com.ruoyi.ccdi.project.domain.vo.CcdiProjectTopRiskPeopleVO; import com.ruoyi.ccdi.project.domain.vo.CcdiProjectTopRiskPeopleVO;
import com.ruoyi.ccdi.project.mapper.CcdiProjectMapper; import com.ruoyi.ccdi.project.mapper.CcdiProjectMapper;
import com.ruoyi.ccdi.project.mapper.CcdiProjectOverviewEmployeeResultMapper;
import com.ruoyi.ccdi.project.mapper.CcdiProjectOverviewMapper; import com.ruoyi.ccdi.project.mapper.CcdiProjectOverviewMapper;
import com.ruoyi.common.exception.ServiceException; import com.ruoyi.common.exception.ServiceException;
import java.math.BigDecimal; import java.math.BigDecimal;
@@ -44,6 +47,12 @@ class CcdiProjectOverviewServiceImplTest {
@Mock @Mock
private CcdiProjectMapper projectMapper; private CcdiProjectMapper projectMapper;
@Mock
private CcdiProjectOverviewEmployeeResultMapper overviewEmployeeResultMapper;
@Mock
private CcdiProjectOverviewEmployeeResultBuilder overviewEmployeeResultBuilder;
@Test @Test
void shouldBuildDashboardWithNoRiskCount() { void shouldBuildDashboardWithNoRiskCount() {
CcdiProject project = new CcdiProject(); CcdiProject project = new CcdiProject();
@@ -182,6 +191,36 @@ class CcdiProjectOverviewServiceImplTest {
assertEquals("查看详情", result.getRows().getFirst().getActionLabel()); assertEquals("查看详情", result.getRows().getFirst().getActionLabel());
} }
@Test
void shouldRefreshOverviewEmployeeResultsAndSyncRiskCounts() {
CcdiProject project = new CcdiProject();
project.setProjectId(43L);
when(projectMapper.selectById(43L)).thenReturn(project);
List<CcdiProjectOverviewEmployeeHitRowVO> hitRows = List.of(new CcdiProjectOverviewEmployeeHitRowVO());
List<CcdiProjectOverviewEmployeeResult> results = List.of(
buildEmployeeResult("HIGH"),
buildEmployeeResult("MEDIUM"),
buildEmployeeResult("LOW")
);
when(overviewEmployeeResultMapper.selectEmployeeHitRowsByProjectId(43L)).thenReturn(hitRows);
when(overviewEmployeeResultBuilder.build(43L, hitRows, "tester")).thenReturn(results);
service.refreshOverviewEmployeeResults(43L, "tester");
org.mockito.InOrder inOrder = org.mockito.Mockito.inOrder(
projectMapper,
overviewEmployeeResultMapper,
overviewEmployeeResultBuilder
);
inOrder.verify(projectMapper).selectById(43L);
inOrder.verify(overviewEmployeeResultMapper).deleteByProjectId(43L);
inOrder.verify(overviewEmployeeResultMapper).selectEmployeeHitRowsByProjectId(43L);
inOrder.verify(overviewEmployeeResultBuilder).build(43L, hitRows, "tester");
inOrder.verify(overviewEmployeeResultMapper).insertBatch(results);
inOrder.verify(projectMapper).updateRiskCountsByProjectId(43L, 1, 1, 1, "tester");
}
@Test @Test
void shouldReturnEmptyCollectionsForRiskModelCardsAndPeople() { void shouldReturnEmptyCollectionsForRiskModelCardsAndPeople() {
CcdiProject project = new CcdiProject(); CcdiProject project = new CcdiProject();
@@ -234,4 +273,10 @@ class CcdiProjectOverviewServiceImplTest {
queryDTO.setPageSize(10); queryDTO.setPageSize(10);
return queryDTO; return queryDTO;
} }
private CcdiProjectOverviewEmployeeResult buildEmployeeResult(String riskLevelCode) {
CcdiProjectOverviewEmployeeResult result = new CcdiProjectOverviewEmployeeResult();
result.setRiskLevelCode(riskLevelCode);
return result;
}
} }

View File

@@ -21,6 +21,15 @@ class CcdiBankTagRuleSqlMetadataTest {
assertPhase1Metadata(migrationSql); assertPhase1Metadata(migrationSql);
} }
@Test
void phase2MetadataSql_shouldAlignInitAndMigrationScripts() throws IOException {
String initSql = readProjectFile("sql", "2026-03-16-bank-tagging.sql");
String migrationSql = readProjectFile("sql", "migration", "2026-03-20-sync-bank-tag-phase2-rule-metadata.sql");
assertPhase2Metadata(initSql);
assertPhase2Metadata(migrationSql);
}
private void assertPhase1Metadata(String sqlContent) { private void assertPhase1Metadata(String sqlContent) {
assertAll( assertAll(
() -> assertTrue(sqlContent.contains("'FOREX_BUY_AMT'") () -> assertTrue(sqlContent.contains("'FOREX_BUY_AMT'")
@@ -41,6 +50,37 @@ class CcdiBankTagRuleSqlMetadataTest {
); );
} }
private void assertPhase2Metadata(String sqlContent) {
assertAll(
() -> assertTrue(sqlContent.contains("真实规则识别低收入关系人累计交易超10万元的员工对象"),
"应同步 LOW_INCOME_RELATIVE_LARGE_TRANSACTION 的真实规则说明"),
() -> assertTrue(sqlContent.contains("真实规则:识别同日多对手方且金额落在可疑区间的疑似赌博对象"),
"应同步 MULTI_PARTY_GAMBLING_TRANSFER 的真实规则说明"),
() -> assertTrue(sqlContent.contains("'MONTHLY_FIXED_INCOME'")
&& sqlContent.contains("真实规则识别近12个月持续出现稳定月度非工资收入的员工对象"),
"应同步 MONTHLY_FIXED_INCOME 的指标编码和真实规则说明"),
() -> assertTrue(sqlContent.contains("'FIXED_COUNTERPARTY_TRANSFER', '疑似兼职', NULL, 'OBJECT'")
&& sqlContent.contains("真实规则:识别固定交易对手季度转入金额落在设定区间的员工对象"),
"FIXED_COUNTERPARTY_TRANSFER 应清空旧 indicator_code 并同步真实规则说明"),
() -> assertTrue(sqlContent.contains("真实规则:识别购房支出但当前房产登记口径缺失的流水"),
"应同步 HOUSE_REGISTRATION_MISMATCH 的真实规则说明"),
() -> assertTrue(sqlContent.contains("真实规则:识别物业缴费但当前房产登记口径缺失的流水"),
"应同步 PROPERTY_FEE_REGISTRATION_MISMATCH 的真实规则说明"),
() -> assertTrue(sqlContent.contains("员工及关系人有5000元以上的纳税记录但当前资产登记口径下无房产登记。"),
"TAX_ASSET_REGISTRATION_MISMATCH 应使用当前资产登记口径表述"),
() -> assertTrue(sqlContent.contains("真实规则:识别大额纳税但当前房产登记口径缺失的流水"),
"应同步 TAX_ASSET_REGISTRATION_MISMATCH 的真实规则说明"),
() -> assertTrue(sqlContent.contains("真实规则识别单个供应商采购额占比超过70%的员工对象"),
"应同步 SUPPLIER_CONCENTRATION 的真实规则说明"),
() -> assertTrue(sqlContent.contains("工资发放后24小时内转出超过80%的资金。")
&& sqlContent.contains("真实规则识别工资入账24小时内快速转出的员工对象"),
"应同步 SALARY_QUICK_TRANSFER 的业务口径和真实规则说明"),
() -> assertTrue(sqlContent.contains("工资发放后除代扣项目外连续30天无消费或转账支出记录。")
&& sqlContent.contains("真实规则识别工资入账后30天内无消费或转账支出的员工对象"),
"应同步 SALARY_UNUSED 的业务口径修复和真实规则说明")
);
}
private String readProjectFile(String... parts) throws IOException { private String readProjectFile(String... parts) throws IOException {
Path path = Path.of("..", parts); Path path = Path.of("..", parts);
return Files.readString(path, StandardCharsets.UTF_8); return Files.readString(path, StandardCharsets.UTF_8);

View File

@@ -0,0 +1,253 @@
# 兰溪流水 Mock 第二期稳定随机命中设计
## 1. 背景
当前仓库已经具备两条基础能力:
- `lsfx-mock-server` 已支持按 `logId` 稳定随机命中第一期规则子集,并通过 Mock 流水为主工程提供可重复联调输入。
- 主工程第二期真实规则已在 [`docs/plans/backend/2026-03-20-bank-tag-real-rule-phase2-backend-implementation.md`](/Users/wkc/Desktop/ccdi/ccdi/docs/plans/backend/2026-03-20-bank-tag-real-rule-phase2-backend-implementation.md) 中明确范围,但 `lsfx-mock-server` 尚未补齐第二期命中输入与配套基线。
本次目标不是重写主工程打标逻辑,而是在保留第一期稳定随机命中方案的前提下,为第二期补齐可联调、可重复、可命中真实 SQL 的输入数据。
## 2. 目标
- 保留现有第一期 `logId -> 稳定随机规则子集` 行为不变。
- 为第二期规则新增稳定随机命中计划。
- 让“拉取本行信息 / 获取流水列表 -> 入库到本地 -> 执行重打标”后,最终标签结果中可以查到抽中的第二期规则命中。
- 对依赖采购、资产等外部事实表的规则,补最小且幂等的本地数据库基线。
## 3. 非目标
- 不新增主工程打标补丁逻辑。
- 不把采购类、资产类规则伪造成纯银行流水规则。
- 不引入“指定特殊 `logId` 固定命中全部第二期规则”的双轨模式。
- 不扩展第二期范围外的新规则。
## 4. 方案对比
### 4.1 方案 A稳定随机命中 + 第二期基线编排
保留现有 `FileService -> StatementService -> 缓存分页` 主链路;对可由银行流水驱动的第二期规则继续通过 Mock 流水样本触发;对依赖采购、资产等外部事实表的规则,通过本地数据库最小基线补齐真实 SQL 输入。
优点:
- 与主工程真实 SQL 数据来源一致。
- 能同时覆盖“Mock 取数正确”和“真实规则可命中”。
- 保持现有第一期链路结构不变,扩展成本最低。
缺点:
- 需要同时维护流水样本和数据库基线两类输入。
### 4.2 方案 B只改 Mock 流水
只扩充 `lsfx-mock-server` 返回的流水数据,不补采购、资产等业务表。
缺点:
- 第二期中存在真实依赖 `ccdi_purchase_transaction``ccdi_asset_info` 的规则,最终会出现 Mock 看似有数据、真实 SQL 仍然无法命中的偏差。
### 4.3 方案 C只补数据库基线
Mock 仅返回普通流水,把第二期命中主要交给数据库灌数。
缺点:
- 无法证明“兰溪本地接口获取流水列表并入库”的链路本身正确。
- 偏离本次验收口径。
## 5. 推荐方案
采用方案 A。
原因:
- 第二期联调必须同时证明两件事Mock 取数入库链路没问题,真实 SQL 也确实能命中。
- 采购类和资产类规则本来就不是纯流水规则,直接补最小事实基线才符合真实业务口径。
- 继续使用“同一 `logId` 稳定随机命中一部分规则”可以复用现有测试与排障习惯。
## 6. 模块边界与数据流
### 6.1 `FileService`
继续负责生成并持久化规则命中计划。
在现有字段基础上新增两组规则计划:
- `phase2_statement_hit_rules`
- `phase2_baseline_hit_rules`
职责边界:
- 只负责决定“这个 `logId` 抽中了哪些第二期规则”。
- 不直接构造流水明细,也不直接写数据库业务基线。
### 6.2 `StatementService` 与 `statement_rule_samples.py`
继续负责把命中计划转成最小流水样本,并与噪声流水合并后返回。
职责边界:
- 只生成能通过 `ccdi_bank_statement` 真实入库后触发的第二期样本。
- 不为采购、资产等外部事实规则伪造流水替代。
### 6.3 第二期基线服务
`lsfx-mock-server/services/` 下新增一个轻量服务,负责根据 `phase2_baseline_hit_rules` 幂等写入本地 MySQL 最小事实数据。
职责边界:
- 只补真实 SQL 所需输入。
- 不直接写标签结果表,不参与打标逻辑计算。
### 6.4 主工程
主工程 `CcdiFileUploadServiceImpl -> CcdiBankStatement -> CcdiBankTagServiceImpl` 链路保持不变。
验收以真实链路为准:
1. 拉取本行信息或通过兰溪接口获取流水。
2. 流水成功入库到 `ccdi_bank_statement`
3. 执行项目重打标。
4.`ccdi_bank_statement_tag_result` 或结果接口中看到抽中的第二期规则命中。
## 7. 第二期规则分层
### 7.1 流水样本直接驱动
以下规则以 `ccdi_bank_statement` 为核心输入,使用 Mock 流水样本驱动:
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- `MULTI_PARTY_GAMBLING_TRANSFER`
- `MONTHLY_FIXED_INCOME`
- `FIXED_COUNTERPARTY_TRANSFER`
- `SALARY_QUICK_TRANSFER`
- `SALARY_UNUSED`
要求:
- 样本必须落在同一个真实员工域或其亲属域上。
- 时间窗口、金额区间、对手方稳定性等聚合条件必须在同一对象范围内闭环。
- 互斥规则不得使用同一员工样本组。
### 7.2 数据库基线驱动
以下规则需要外部事实表配合:
- `HOUSE_REGISTRATION_MISMATCH`
- `PROPERTY_FEE_REGISTRATION_MISMATCH`
- `TAX_ASSET_REGISTRATION_MISMATCH`
- `SUPPLIER_CONCENTRATION`
处理方式:
- 三条资产不匹配规则:
- Mock 生成对应交易流水。
- 基线服务在 `ccdi_asset_info` 中补“故意缺失或不匹配”的最小资产事实。
- `SUPPLIER_CONCENTRATION`
- 直接补 `ccdi_purchase_transaction` 聚合命中基线。
- 不伪造银行流水替代。
## 8. 第二期样本设计
### 8.1 `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- 选择一个真实员工及其亲属身份证范围。
- 亲属收入基线维持低收入事实。
- 生成大额累计转入或转出流水,确保超过低收入亲属的大额交易阈值。
### 8.2 `MULTI_PARTY_GAMBLING_TRANSFER`
- 同一员工对象、同一天、多名不同对手方。
- 每笔金额落在 `MULTI_PARTY_AMT_MIN ~ MULTI_PARTY_AMT_MAX` 区间。
- 命中依赖“多人 + 多次 + 同日 + 区间金额”,不依赖敏感词。
### 8.3 `MONTHLY_FIXED_INCOME`
- 为同一员工构造连续 3 至 4 个月的固定收入转入。
- 排除工资代发主体和工资关键词。
- 每月金额稳定高于 `MONTHLY_FIXED_INCOME`
### 8.4 `FIXED_COUNTERPARTY_TRANSFER`
- 为同一员工与固定对手方构造季度稳定转入。
- 每季累计金额落在 `FIXED_COUNTERPARTY_TRANSFER_MIN ~ FIXED_COUNTERPARTY_TRANSFER_MAX` 区间。
### 8.5 `SALARY_QUICK_TRANSFER`
- 先生成可识别工资入账。
- 再在 24 小时内生成大额转出,满足比例条件。
### 8.6 `SALARY_UNUSED`
- 先生成工资入账。
- 后续 30 天不生成可计入“工资使用”的消费或转账支出。
-`SALARY_QUICK_TRANSFER` 不共用同一员工对象。
### 8.7 资产不匹配三条规则
- `HOUSE_REGISTRATION_MISMATCH`:生成购房交易流水,但不提供可匹配房产登记。
- `PROPERTY_FEE_REGISTRATION_MISMATCH`:生成物业缴费流水,但不提供对应房产登记。
- `TAX_ASSET_REGISTRATION_MISMATCH`:生成大额纳税流水,但不提供与税费相符的资产登记。
### 8.8 `SUPPLIER_CONCENTRATION`
- 为同一申请人写入多笔采购记录。
- 使单一供应商采购额占比显著超过 70%。
- 使用固定业务主键和稳定供应商集合,确保可重复验证。
## 9. 实现落点
- Modify: `lsfx-mock-server/services/file_service.py`
- Modify: `lsfx-mock-server/services/statement_service.py`
- Modify: `lsfx-mock-server/services/statement_rule_samples.py`
- Add: `lsfx-mock-server/services/phase2_baseline_service.py`
- Add/Modify: `lsfx-mock-server/tests/test_file_service.py`
- Add/Modify: `lsfx-mock-server/tests/test_statement_service.py`
- Add/Modify: `lsfx-mock-server/tests/integration/test_full_workflow.py`
- Add: `sql/migration/2026-03-20-lsfx-mock-phase2-hit-baseline.sql`
## 10. 幂等策略
- 规则命中计划继续由 `logId` 决定,不新增第二随机源。
- 流水样本继续由 `StatementService` 缓存,同一 `logId` 重复分页读取结果一致。
- 采购基线使用固定业务主键,按固定主键删后重建。
- 资产基线使用固定特征组合,确保可精确清理与重建。
- 所有基线写入都必须只作用于本次选中的员工域,避免污染无关人员。
## 11. 验证设计
### 11.1 Mock 单元测试
- 第二期规则池抽样稳定性。
- 第二期流水样本只装配被抽中的规则。
- 第二期数据库基线写入的幂等性。
### 11.2 Mock 集成测试
- `getJZFileOrZjrcuFile -> getBSByLogId` 同一 `logId` 下结果稳定。
- 抽中第二期规则时,流水样本与基线写入同步成立。
### 11.3 主工程端到端验证
- 拉取本行信息后流水成功入库。
- 执行项目重打标后,第二期目标规则出现在结果中。
- 明细型规则可通过流水详情接口回查。
- 对象型规则可在标签结果表中看到正确 `objectType/objectKey/reasonDetail`
## 12. 风险与边界
- 不为了联调成功去修改主工程真实 SQL。
- 不把 `SUPPLIER_CONCENTRATION` 伪造成纯流水命中。
- 不引入兜底、补丁或兼容性双轨逻辑。
- 若第二期真实 SQL 与现有基线字段口径不一致,按验证失败暴露,不在 Mock 侧偷偷修正业务规则。
## 13. 结论
本方案采用“保留第一期稳定随机命中方案 + 为第二期补齐稳定随机命中计划、最小流水样本和幂等数据库基线”的最短路径实现。
这样可以同时保证:
- 兰溪本地接口能够正确返回并入库存量流水;
- 第二期新增模型所需输入事实完整;
- 主工程重打标后能够命中新增模型的真实规则结果。

View File

@@ -0,0 +1,347 @@
# 结果总览员工结果表设计文档
**模块**: 初核项目详情 - 结果总览
**日期**: 2026-03-20
**状态**: 已确认
## 一、背景
当前结果总览页已接通的查询包括:
- 风险仪表盘
- 风险人员总览
- 模型预警次数统计
- 命中模型涉及人员
现状中,这些查询依赖 `ccdi_bank_statement_tag_result``ccdi_bank_statement``ccdi_base_staff``ccdi_staff_fmy_relation` 的运行时连表归并与聚合。页面访问时会重复执行重 SQL导致响应时间偏长尤其是模型区和风险人员总览区块。
本轮目标是新增一张结果总览结果表,把“项目内每个员工的模型触发情况”预先沉淀下来,再基于该表编写页面查询 SQL避免结果总览页面运行时直接连回源表。
## 二、目标
本次设计目标如下:
1. 新增一张结果总览员工结果表,记录项目内每个员工的结果快照。
2. 员工亲属的流水异常命中需归并计入该员工本人。
3. 页面查询只依赖项目表和结果总览员工结果表,不再运行时连 `标签结果表 + 流水表 + 员工表 + 亲属表`
4. 命中结果写库成功后,在同一事务内按项目整块重算结果总览员工结果表。
5. 保持当前页面统计口径不变,尤其是模型卡片 `warningCount` 仍定义为原始标签命中次数。
## 三、范围
### 3.1 本次范围
- 设计结果总览员工结果表结构
- 设计员工本人和亲属命中的归并口径
- 设计命中结果写库后的同步重算链路
- 设计结果总览 4 类查询从结果表取数的方案
- 后续输出前后端实施计划与实施记录
### 3.2 不在本次范围
- 不覆盖结果总览风险明细区块
- 不引入异步刷新、缓存或消息队列
- 不做增量更新,只做按项目整块重算
- 不新增多张平行结果表
- 不保留中高风险 TOP10 区块及其专用查询设计
## 四、当前口径约束
本次设计确认以下业务口径:
1. 结果表粒度为 `项目 + 员工`,一行代表一个项目内一个归并后的员工结果。
2. 员工亲属的流水异常命中,必须归并到对应员工本人名下。
3. 命中结果写库成功后,结果表需要在同一事务内同步重算完成。
4. 重算方式采用“按项目整块重算当前已接通区块所需数据”。
5. 模型卡片中的 `warningCount` 定义保持现状,为原始标签命中次数,而不是员工人数或规则去重数。
## 五、方案对比
### 5.1 方案 A直接存页面结果快照
做法:
- 按项目保存仪表盘、风险人员总览、模型卡片、模型人员列表等页面结果快照
问题:
- `命中模型涉及人员` 存在 `ANY / ALL / keyword / deptId / pageNum` 等动态筛选
- 一旦直接存页面结果,筛选和分页要么退化为内存处理,要么继续依赖复杂 JSON 查询
- 无法满足“先保存员工触发情况,再根据结果表写 SQL”的要求
### 5.2 方案 B单表保存项目内每个员工的结果快照
做法:
- 新增一张结果总览员工结果表
-`project_id + staff_id_card` 为唯一口径
- 每行保存员工基础信息、风险统计、模型命中快照、规则命中快照
- 页面查询全部基于该表聚合或筛选
优点:
- 符合“先存员工触发情况,再写页面 SQL”的要求
- 页面查询可以收敛为单表查询
- 兼容 `ANY / ALL`、关键字、部门、分页等动态筛选
- 与当前统计口径兼容,不需要改业务定义
缺点:
- 命中结果写库后需要按项目整块重算该表
- 表中会存在部分汇总冗余字段和 JSON 快照字段
### 5.3 方案 C多张专用结果表
做法:
- 仪表盘、风险人员、模型卡片、模型人员分别建独立结果表
问题:
- 不符合本轮“创建一张结果总览结果表”的目标
- 数据维护面更大DDL 和重算逻辑更分散
### 5.4 推荐方案
采用方案 B新增一张“结果总览员工结果表”以员工为核心粒度预聚合再由页面查询基于该表出数。
## 六、结果表设计
建议表名:
- `ccdi_project_overview_employee_result`
建议主键与唯一性约束:
- 主键:`id`
- 唯一键:`uk_project_staff (project_id, staff_id_card)`
建议字段如下。
### 6.1 基础标识字段
- `project_id`
- `staff_id_card`
- `staff_id`
- `staff_name`
- `staff_code`
- `dept_id`
- `dept_name`
### 6.2 风险统计字段
- `rule_count`
- 该员工命中的规则数,按 `rule_code` 去重
- `model_count`
- 该员工命中的模型数,按 `model_code` 去重
- `hit_count`
- 该员工归并后的原始标签命中总次数
- `risk_level_code`
- 依据现有规则推导 `HIGH / MEDIUM / LOW`
- `top_rule_code`
- `top_rule_name`
- `risk_point`
- 按命中次数降序、规则编码升序拼接规则名
### 6.3 模型与规则快照字段
- `model_codes_csv`
- 员工命中的全部模型编码,逗号分隔,统一按编码排序
- `model_names_json`
- 员工命中的模型名称数组
- `hit_rules_json`
- 员工命中的规则数组,元素建议至少包含:
- `ruleCode`
- `ruleName`
- `riskLevel`
- `modelCode`
- `modelName`
- `hitCount`
- `model_hit_summary_json`
- 员工按模型汇总的数组,元素建议至少包含:
- `modelCode`
- `modelName`
- `warningCount`
- `ruleCount`
- `hitRuleList`
其中:
- `warningCount` 表示该员工在该模型下的原始标签命中次数
- `hitRuleList` 用于页面“异常标签”展示
### 6.4 审计字段
- `create_by`
- `create_time`
- `update_by`
- `update_time`
## 七、员工归并与重算链路
### 7.1 归并口径
员工结果重算时,继续沿用当前结果总览链路中的归并规则:
1. 员工本人命中,归并到本人。
2. 当命中对象为空或为亲属对象时,通过流水身份证号或亲属身份证号映射到员工本人。
3. 无法归并到员工本人的命中记录,不进入结果总览员工结果表。
本轮不调整归并逻辑,只把这一步从页面查询阶段前移到结果表重算阶段。
### 7.2 重算触发时机
触发点放在命中结果写库成功之后、事务提交之前。
触发顺序固定为:
1. 写入当前项目标签结果
2. 删除当前项目历史员工结果
3. 基于当前项目全量标签结果,按员工归并口径重算员工结果
4. 批量插入新的员工结果
5. 同事务刷新项目表中的高/中/低风险人数
6. 提交事务
### 7.3 重算方式
本次采用按项目整块重算,不做增量更新。
原因:
- 需求已明确接受按项目整块重算
- 这样实现路径最短,且避免增量口径漂移
- 同事务内先删后插更容易保证一致性
## 八、页面查询改造方案
改造后,结果总览页查询只依赖:
- `ccdi_project`
- `ccdi_project_overview_employee_result`
### 8.1 风险仪表盘
来源:
- `ccdi_project.target_count`
- 员工结果表按 `risk_level_code` 的人数统计
查询方式:
- 高风险人数:结果表中 `risk_level_code = 'HIGH'`
- 中风险人数:结果表中 `risk_level_code = 'MEDIUM'`
- 低风险人数:结果表中 `risk_level_code = 'LOW'`
- 无风险人数:`target_count - high - medium - low`
### 8.2 风险人员总览
来源:
- 员工结果表单表查询
返回字段:
- `staff_name`
- `staff_id_card`
- `dept_name`
- `hit_count`
- `risk_level_code`
- `model_count`
- `risk_point`
排序延续现有口径:
- 风险等级
- 模型数
- 规则数
- 员工身份证号
### 8.3 模型预警次数统计
来源:
- 员工结果表中的 `model_hit_summary_json`
查询逻辑:
1. 先按项目过滤员工结果表
2. 展开每位员工的模型汇总
3.`modelCode` 聚合
统计口径:
- `warningCount` = 各员工该模型 `warningCount` 求和
- `peopleCount` = 命中该模型的员工人数
排序口径:
- `warningCount desc`
- `model_code asc`
### 8.4 命中模型涉及人员
来源:
- 员工结果表中的 `model_codes_csv`
- `model_names_json`
- `hit_rules_json`
- `model_hit_summary_json`
筛选规则:
- `keyword`:匹配员工姓名或工号
- `deptId`:精确匹配部门
- `modelCodes`
- `ANY`:命中任一所选模型即可
- `ALL`:必须同时命中全部所选模型
实现建议:
- `ANY / ALL` 筛选优先基于 `model_codes_csv`
- `modelNames``hitTagList` 展示基于 JSON 快照组装
这样可以把:
- 结构化筛选
- 模型条件判断
- 分页排序
都控制在结果表层完成,避免回源连表。
## 九、索引建议
为保证页面查询稳定,建议增加如下索引:
- 唯一键:`(project_id, staff_id_card)`
- 普通索引:`(project_id, risk_level_code)`
- 普通索引:`(project_id, dept_id)`
- 普通索引:`(project_id, staff_name)`
- 普通索引:`(project_id, staff_code)`
本轮先不扩展模型专用索引,优先依赖整项目重算后的结果表降本。如果后续 `model_codes_csv` 筛选成为瓶颈,再评估是否引入模型筛选辅助列。
## 十、测试与验收口径
本次设计的验收以“结果表口径与当前业务口径一致”为准,重点验证:
1. 同一项目下,结果表员工数应等于当前可归并出的员工数。
2. 员工亲属的流水异常命中必须归并到员工本人。
3. 风险仪表盘的高/中/低人数与结果表按等级统计一致。
4. 风险人员总览展示字段与结果表字段映射一致。
5. 模型卡片 `warningCount` 必须等于原始标签命中次数汇总。
6. 模型卡片 `peopleCount` 必须等于命中该模型的员工人数。
7. 命中模型涉及人员在 `ANY / ALL / keyword / deptId` 条件下的结果,必须与旧逻辑一致。
## 十一、结论
本次设计采用“单张结果总览员工结果表”作为结果总览页的唯一预聚合载体。
核心结论如下:
1. 结果表粒度为 `项目 + 员工`
2. 员工亲属命中统一归并到员工本人。
3. 命中结果写库成功后,同一事务内按项目整块重算结果表。
4. 页面查询不再直接运行重连表聚合 SQL而是基于结果表聚合或筛选。
5. 模型卡片 `warningCount` 继续保持原始标签命中次数口径。
该方案满足“最短路径实现、口径一致、页面查询降本”的目标,后续可据此输出前后端实施计划并进入实现阶段。

View File

@@ -0,0 +1,573 @@
# LSFX Mock Phase 2 Random Hit Backend Implementation Plan
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
**Goal:** 改造 `lsfx-mock-server`,在保留第一期稳定随机命中方案的前提下,为第二期规则补齐稳定随机命中计划、最小流水样本和幂等数据库基线,使兰溪本地接口取数入库后可命中新增加的第二期真实规则。
**Architecture:** 保持现有 `FileService -> StatementService -> 缓存分页` 主链路不变,不新增兼容性双轨。`FileService` 只负责生成并持久化第二期规则命中计划,`statement_rule_samples.py` 只负责装配可由银行流水驱动的第二期样本,新增的 `phase2_baseline_service.py` 负责幂等补齐采购与资产事实基线;主工程继续沿用现有真实 SQL 打标链路,不加联调补丁。
**Tech Stack:** Python 3, FastAPI, pytest, PyMySQL, MySQL, Bash
---
## File Structure
- `lsfx-mock-server/services/file_service.py`: 在 `FileRecord` 中新增第二期命中计划字段,并让上传与拉取行内流水链路都能持久化第二期规则子集。
- `lsfx-mock-server/services/statement_rule_samples.py`: 按规则代码补齐第二期流水样本 builder保证每条规则只生成最小命中样本。
- `lsfx-mock-server/services/statement_service.py`: 读取第二期规则命中计划,装配第二期流水样本并保持缓存稳定性。
- `lsfx-mock-server/services/phase2_baseline_service.py`: 基于项目配置中的数据库连接信息,幂等写入第二期采购、资产与低收入事实基线。
- `lsfx-mock-server/tests/test_file_service.py`: 锁定第二期命中计划生成与持久化语义。
- `lsfx-mock-server/tests/test_statement_service.py`: 锁定第二期样本装配、互斥规则隔离与缓存稳定性。
- `lsfx-mock-server/tests/test_phase2_baseline_service.py`: 锁定第二期数据库基线写入的幂等性与命中前提。
- `lsfx-mock-server/tests/integration/test_full_workflow.py`: 验证 `getJZFileOrZjrcuFile -> getBSByLogId` 端到端链路在第二期下仍保持稳定。
- `sql/migration/2026-03-20-lsfx-mock-phase2-hit-baseline.sql`: 固化第二期采购、资产与低收入事实基线的最小 SQL。
- `docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-backend-record.md`: 记录本次后端实施范围、规则分层与落地结果。
- `docs/tests/records/2026-03-20-lsfx-mock-phase2-random-hit-backend-verification.md`: 记录测试命令、数据库核验和端到端结果。
### Task 1: 在 FileService 中持久化第二期稳定随机命中计划
**Files:**
- Modify: `lsfx-mock-server/services/file_service.py`
- Modify: `lsfx-mock-server/tests/test_file_service.py`
- Reference: `docs/design/2026-03-20-lsfx-mock-phase2-random-hit-design.md`
- [ ] **Step 1: Write the failing test**
`lsfx-mock-server/tests/test_file_service.py` 中先补两条失败用例,锁定“同一 `logId` 第二期命中计划稳定”和“`fetch_inner_flow()` 会把第二期命中计划落到 `FileRecord`”:
```python
def test_build_rule_hit_plan_should_include_phase2_rule_sets():
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
plan1 = service._build_rule_hit_plan(20001)
plan2 = service._build_rule_hit_plan(20001)
assert plan1 == plan2
assert 2 <= len(plan1["phase2_statement_hit_rules"]) <= 4
assert 2 <= len(plan1["phase2_baseline_hit_rules"]) <= 4
def test_fetch_inner_flow_should_persist_phase2_rule_hit_plan(monkeypatch):
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
monkeypatch.setattr(
service,
"_build_rule_hit_plan",
lambda log_id: {
"large_transaction_hit_rules": [],
"phase1_hit_rules": [],
"phase2_statement_hit_rules": [
"MULTI_PARTY_GAMBLING_TRANSFER",
"SALARY_QUICK_TRANSFER",
],
"phase2_baseline_hit_rules": [
"SUPPLIER_CONCENTRATION",
"HOUSE_REGISTRATION_MISMATCH",
],
},
)
response = service.fetch_inner_flow(
{
"groupId": 1001,
"customerNo": "phase2_customer_001",
"dataChannelCode": "test_code",
"requestDateId": 20240101,
"dataStartDateId": 20240101,
"dataEndDateId": 20240131,
"uploadUserId": 902001,
}
)
log_id = response["data"][0]
record = service.file_records[log_id]
assert record.phase2_statement_hit_rules == [
"MULTI_PARTY_GAMBLING_TRANSFER",
"SALARY_QUICK_TRANSFER",
]
assert record.phase2_baseline_hit_rules == [
"SUPPLIER_CONCENTRATION",
"HOUSE_REGISTRATION_MISMATCH",
]
```
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_file_service.py -k "phase2_rule_hit_plan" -v
```
Expected:
- `FAIL`
- 原因是 `FileRecord` 尚未保存第二期规则命中计划,`_build_rule_hit_plan()` 也未生成第二期规则子集
- [ ] **Step 3: Write minimal implementation**
`lsfx-mock-server/services/file_service.py` 中只做最小改动:
1.`FileRecord` 新增两个字段:
```python
phase2_statement_hit_rules: List[str] = field(default_factory=list)
phase2_baseline_hit_rules: List[str] = field(default_factory=list)
```
2. 定义第二期规则池,按设计分层:
```python
PHASE2_STATEMENT_RULE_CODES = [
"LOW_INCOME_RELATIVE_LARGE_TRANSACTION",
"MULTI_PARTY_GAMBLING_TRANSFER",
"MONTHLY_FIXED_INCOME",
"FIXED_COUNTERPARTY_TRANSFER",
"SALARY_QUICK_TRANSFER",
"SALARY_UNUSED",
]
PHASE2_BASELINE_RULE_CODES = [
"HOUSE_REGISTRATION_MISMATCH",
"PROPERTY_FEE_REGISTRATION_MISMATCH",
"TAX_ASSET_REGISTRATION_MISMATCH",
"SUPPLIER_CONCENTRATION",
]
```
3. 复用现有 `_pick_rule_subset()`,在 `_build_rule_hit_plan()` 中新增第二期两组子集。
4.`_create_file_record()``upload_file()``fetch_inner_flow()` 中都透传并保存第二期规则计划。
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_file_service.py -k "phase2_rule_hit_plan" -v
```
Expected:
- `PASS`
- 第二期规则计划已按 `logId` 稳定随机生成并持久化
- [ ] **Step 5: Commit**
```bash
git add lsfx-mock-server/services/file_service.py lsfx-mock-server/tests/test_file_service.py
git commit -m "补充第二期Mock规则命中计划"
```
### Task 2: 新增第二期数据库基线服务并幂等补齐事实输入
**Files:**
- Create: `lsfx-mock-server/services/phase2_baseline_service.py`
- Create: `lsfx-mock-server/tests/test_phase2_baseline_service.py`
- Create: `sql/migration/2026-03-20-lsfx-mock-phase2-hit-baseline.sql`
- Reference: `lsfx-mock-server/config/settings.py`
- Reference: `docs/design/2026-03-20-lsfx-mock-phase2-random-hit-design.md`
- [ ] **Step 1: Write the failing test**
先在 `lsfx-mock-server/tests/test_phase2_baseline_service.py` 中补最小失败用例,锁定“抽中第二期基线规则时会生成幂等 SQL”与“互斥/无关规则不会写脏数据”:
```python
def test_apply_phase2_baselines_should_return_idempotent_sql_plan():
service = Phase2BaselineService()
sql_plan = service.build_sql_plan(
staff_id_card="330101198801010011",
family_id_cards=["330101199001010022"],
baseline_rule_codes=[
"SUPPLIER_CONCENTRATION",
"HOUSE_REGISTRATION_MISMATCH",
],
)
assert any("LSFXMOCKP2PUR" in sql for sql in sql_plan)
assert any("ccdi_asset_info" in sql for sql in sql_plan)
assert all("DELETE" in sql or "INSERT" in sql for sql in sql_plan)
def test_apply_phase2_baselines_should_skip_unselected_rules():
service = Phase2BaselineService()
sql_plan = service.build_sql_plan(
staff_id_card="330101198801010011",
family_id_cards=[],
baseline_rule_codes=["SUPPLIER_CONCENTRATION"],
)
assert any("ccdi_purchase_transaction" in sql for sql in sql_plan)
assert not any("ccdi_asset_info" in sql for sql in sql_plan)
```
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_phase2_baseline_service.py -v
```
Expected:
- `FAIL`
- 原因是 `Phase2BaselineService` 与第二期幂等基线 SQL 规划尚不存在
- [ ] **Step 3: Write minimal implementation**
`lsfx-mock-server/services/phase2_baseline_service.py` 中新增最小服务:
1. 复用 `config/settings.py` 中的数据库连接配置。
2. 提供两个入口:
```python
def build_sql_plan(self, staff_id_card: str, family_id_cards: List[str], baseline_rule_codes: List[str]) -> List[str]:
...
def apply(self, staff_id_card: str, family_id_cards: List[str], baseline_rule_codes: List[str]) -> None:
...
```
3. 对四类第二期基线规则分别输出幂等 SQL
- `SUPPLIER_CONCENTRATION`:固定采购业务主键,先删后插;
- `HOUSE_REGISTRATION_MISMATCH` / `PROPERTY_FEE_REGISTRATION_MISMATCH` / `TAX_ASSET_REGISTRATION_MISMATCH`:按固定资产标识清理并重建“故意不匹配”资产事实;
4. 将稳定 SQL 同步沉淀到 `sql/migration/2026-03-20-lsfx-mock-phase2-hit-baseline.sql`,便于单独重放和排障。
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_phase2_baseline_service.py -v
```
Expected:
- `PASS`
- 第二期数据库基线服务可生成幂等 SQL 计划
- [ ] **Step 5: Commit**
```bash
git add lsfx-mock-server/services/phase2_baseline_service.py lsfx-mock-server/tests/test_phase2_baseline_service.py sql/migration/2026-03-20-lsfx-mock-phase2-hit-baseline.sql
git commit -m "补充第二期Mock基线编排服务"
```
### Task 3: 按规则代码补齐第二期流水样本并接入 StatementService
**Files:**
- Modify: `lsfx-mock-server/services/statement_rule_samples.py`
- Modify: `lsfx-mock-server/services/statement_service.py`
- Modify: `lsfx-mock-server/tests/test_statement_service.py`
- [ ] **Step 1: Write the failing test**
`lsfx-mock-server/tests/test_statement_service.py` 中补两组失败用例,锁定“只装配被选中的第二期规则样本”和“互斥工资类规则不落在同一员工对象上”:
```python
def test_build_seed_statements_for_rule_plan_should_only_include_requested_phase2_rules():
plan = {
"large_transaction_hit_rules": [],
"phase1_hit_rules": [],
"phase2_statement_hit_rules": [
"MULTI_PARTY_GAMBLING_TRANSFER",
"SALARY_QUICK_TRANSFER",
],
"phase2_baseline_hit_rules": [],
}
statements = build_seed_statements_for_rule_plan(
group_id=1000,
log_id=30001,
rule_plan=plan,
)
assert any(item["userMemo"] == "工资入账" for item in statements)
assert any(item["customerName"] == "欢乐游戏科技有限公司" for item in statements)
assert not any(item["userMemo"] == "季度稳定兼职收入" for item in statements)
def test_salary_quick_transfer_and_salary_unused_should_use_different_identity_groups():
plan = {
"large_transaction_hit_rules": [],
"phase1_hit_rules": [],
"phase2_statement_hit_rules": [
"SALARY_QUICK_TRANSFER",
"SALARY_UNUSED",
],
"phase2_baseline_hit_rules": [],
}
statements = build_seed_statements_for_rule_plan(
group_id=1000,
log_id=30001,
rule_plan=plan,
)
salary_id_cards = {
item["cretNo"]
for item in statements
if item["userMemo"] == "工资入账"
}
assert len(salary_id_cards) >= 2
```
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_statement_service.py -k "requested_phase2_rules or salary_quick_transfer_and_salary_unused" -v
```
Expected:
- `FAIL`
- 原因是当前样本模块还没有第二期 builder 与互斥规则隔离能力
- [ ] **Step 3: Write minimal implementation**
`lsfx-mock-server/services/statement_rule_samples.py` 中补齐第二期样本 builder 与映射:
```python
PHASE2_STATEMENT_RULE_BUILDERS = {
"LOW_INCOME_RELATIVE_LARGE_TRANSACTION": build_low_income_relative_large_transaction_samples,
"MULTI_PARTY_GAMBLING_TRANSFER": build_multi_party_gambling_transfer_samples,
"MONTHLY_FIXED_INCOME": build_monthly_fixed_income_samples,
"FIXED_COUNTERPARTY_TRANSFER": build_fixed_counterparty_transfer_samples,
"SALARY_QUICK_TRANSFER": build_salary_quick_transfer_samples,
"SALARY_UNUSED": build_salary_unused_samples,
}
```
实现要求:
- `MULTI_PARTY_GAMBLING_TRANSFER`:同一员工、同一天、多个对手方、区间金额;
- `MONTHLY_FIXED_INCOME`:连续 3 至 4 个月固定转入,排除工资代发;
- `FIXED_COUNTERPARTY_TRANSFER`:固定对手方、季度稳定区间金额;
- `SALARY_QUICK_TRANSFER`:工资入账后 24 小时内大额转出;
- `SALARY_UNUSED`:工资入账后 30 天无有效使用记录;
- `SALARY_QUICK_TRANSFER``SALARY_UNUSED` 必须使用不同 identity group。
同时在 `lsfx-mock-server/services/statement_service.py` 中:
1. 读取 `record.phase2_statement_hit_rules`
2. 把第二期样本装配进 `build_seed_statements_for_rule_plan(...)`
3. 保持总数 `FIXED_TOTAL_COUNT = 200`、ID 分配和缓存语义不变。
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_statement_service.py -k "requested_phase2_rules or salary_quick_transfer_and_salary_unused" -v
```
Expected:
- `PASS`
- 第二期样本已按规则子集装配,工资类互斥规则已隔离
- [ ] **Step 5: Commit**
```bash
git add lsfx-mock-server/services/statement_rule_samples.py lsfx-mock-server/services/statement_service.py lsfx-mock-server/tests/test_statement_service.py
git commit -m "补充第二期Mock流水样本生成"
```
### Task 4: 在拉取链路中接通第二期基线写入并补集成回归
**Files:**
- Modify: `lsfx-mock-server/services/file_service.py`
- Modify: `lsfx-mock-server/services/statement_service.py`
- Modify: `lsfx-mock-server/tests/integration/test_full_workflow.py`
- Modify: `lsfx-mock-server/tests/test_file_service.py`
- Reference: `lsfx-mock-server/routers/api.py`
- [ ] **Step 1: Write the failing test**
`lsfx-mock-server/tests/integration/test_full_workflow.py` 中补失败用例,锁定“同一 `logId` 抽中第二期基线规则时,获取流水前已补齐对应数据库事实”:
```python
def test_inner_flow_should_apply_phase2_baselines_before_get_bank_statement(client, monkeypatch):
from routers.api import file_service
applied = {}
def fake_apply(**kwargs):
applied["called"] = True
applied["baseline_rule_codes"] = kwargs["baseline_rule_codes"]
monkeypatch.setattr(file_service.phase2_baseline_service, "apply", fake_apply)
monkeypatch.setattr(
file_service,
"_build_rule_hit_plan",
lambda log_id: {
"large_transaction_hit_rules": [],
"phase1_hit_rules": [],
"phase2_statement_hit_rules": ["MONTHLY_FIXED_INCOME"],
"phase2_baseline_hit_rules": ["SUPPLIER_CONCENTRATION"],
},
)
response = client.post(
"/watson/api/project/getJZFileOrZjrcuFile",
data={
"groupId": 1001,
"customerNo": "phase2_customer",
"dataChannelCode": "channel_code",
"requestDateId": 20240101,
"dataStartDateId": 20240101,
"dataEndDateId": 20240131,
"uploadUserId": 902001,
},
)
assert response.status_code == 200
assert applied["called"] is True
assert applied["baseline_rule_codes"] == ["SUPPLIER_CONCENTRATION"]
```
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/integration/test_full_workflow.py -k "apply_phase2_baselines" -v
```
Expected:
- `FAIL`
- 原因是当前 `fetch_inner_flow()` 链路还没有接通第二期基线服务
- [ ] **Step 3: Write minimal implementation**
`lsfx-mock-server/services/file_service.py` 中:
1.`__init__()` 中注入 `phase2_baseline_service`
2.`fetch_inner_flow()``upload_file()` 创建 `FileRecord` 后、返回响应前,根据当前记录的 `phase2_baseline_hit_rules` 调用:
```python
self.phase2_baseline_service.apply(
staff_id_card=file_record.staff_id_card,
family_id_cards=file_record.family_id_cards,
baseline_rule_codes=file_record.phase2_baseline_hit_rules,
)
```
要求:
- 只对当前 `logId` 命中的第二期基线规则写入;
- 不因空规则集报错;
- 基线写入异常直接暴露,避免产生“流水有了但基线未写”的假成功状态。
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/integration/test_full_workflow.py -k "apply_phase2_baselines" -v
```
Expected:
- `PASS`
- 第二期基线写入已在拉取链路中接通
- [ ] **Step 5: Commit**
```bash
git add lsfx-mock-server/services/file_service.py lsfx-mock-server/tests/integration/test_full_workflow.py
git commit -m "接通第二期Mock基线写入链路"
```
### Task 5: 完成第二期回归、数据库核验与实施记录
**Files:**
- Modify: `docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-design-record.md`
- Create: `docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-backend-record.md`
- Create: `docs/tests/records/2026-03-20-lsfx-mock-phase2-random-hit-backend-verification.md`
- Test: `lsfx-mock-server/tests/test_file_service.py`
- Test: `lsfx-mock-server/tests/test_statement_service.py`
- Test: `lsfx-mock-server/tests/test_phase2_baseline_service.py`
- Test: `lsfx-mock-server/tests/integration/test_full_workflow.py`
- [ ] **Step 1: Run focused and full regression**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_file_service.py -k "phase2_rule_hit_plan" -v
python3 -m pytest tests/test_phase2_baseline_service.py -v
python3 -m pytest tests/test_statement_service.py -k "phase2 or salary_quick_transfer_and_salary_unused" -v
python3 -m pytest tests/integration/test_full_workflow.py -k "phase2" -v
python3 -m pytest tests/test_file_service.py tests/test_statement_service.py tests/test_phase2_baseline_service.py tests/test_api.py tests/integration/test_full_workflow.py -v
```
Expected:
- 聚焦用例全部 `PASS`
- 全量回归 `PASS`
- [ ] **Step 2: Verify database baselines**
Run:
```bash
bin/mysql_utf8_exec.sh sql/migration/2026-03-20-lsfx-mock-phase2-hit-baseline.sql
```
再使用只读 SQL 核验:
```sql
SELECT purchase_id, supplier_name, actual_amount
FROM ccdi_purchase_transaction
WHERE purchase_id LIKE 'LSFXMOCKP2PUR%';
SELECT asset_name, person_id, asset_main_type, asset_status
FROM ccdi_asset_info
WHERE asset_name LIKE 'LSFX Mock P2%';
```
Expected:
- 第二期采购与资产基线存在
- 重跑 SQL 后结果仍稳定,无重复脏数据
- [ ] **Step 3: Write implementation record**
`docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-backend-record.md` 中记录:
- 第二期规则如何拆分为流水样本与数据库基线两层
- `FileService``StatementService``Phase2BaselineService` 的职责边界
- 互斥工资规则的样本隔离策略
- 幂等 SQL 方案与数据库基线范围
- [ ] **Step 4: Write verification record**
`docs/tests/records/2026-03-20-lsfx-mock-phase2-random-hit-backend-verification.md` 中记录:
- pytest 执行命令与结果摘要
- SQL 执行与核验结果
- 端到端接口链路结果
- 结论与环境清理情况
- [ ] **Step 5: Commit**
```bash
git add docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-design-record.md docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-backend-record.md docs/tests/records/2026-03-20-lsfx-mock-phase2-random-hit-backend-verification.md
git add lsfx-mock-server/tests/test_file_service.py lsfx-mock-server/tests/test_statement_service.py lsfx-mock-server/tests/test_phase2_baseline_service.py lsfx-mock-server/tests/integration/test_full_workflow.py
git commit -m "完成第二期Mock随机命中回归验证"
```

View File

@@ -0,0 +1,345 @@
# Results Overview Employee Result Table Backend Implementation Plan
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
**Goal:** 为结果总览页新增按“项目 + 员工”维度沉淀的员工结果表,并将风险仪表盘、风险人员总览、模型预警次数统计、命中模型涉及人员 4 类后端查询切换为只读该结果表。
**Architecture:**`ccdi-project` 内新增结果总览员工结果表及其维护 Mapper保留当前“员工本人 + 亲属归并到员工本人”的归并口径,但把重连表归并从页面查询阶段前移到标签写库后的同事务重算阶段。页面查询继续沿用 `CcdiProjectOverviewController + Service + Mapper` 入口,不新增平行接口,只替换底层查询数据源。
**Tech Stack:** Java 21, Spring Boot 3, MyBatis XML, MyBatis-Plus, Maven, MySQL, JUnit 5, Mockito
---
### Task 1: 新增结果总览员工结果表与基础映射
**Files:**
- Create: `sql/migration/2026-03-20-create-project-overview-employee-result-table.sql`
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/entity/CcdiProjectOverviewEmployeeResult.java`
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewEmployeeResultMapper.java`
- Create: `ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewEmployeeResultMapper.xml`
- Test: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/domain/entity/CcdiProjectOverviewEmployeeResultEntityTest.java`
- Test: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewEmployeeResultMapperXmlTest.java`
- [ ] **Step 1: Write the failing tests**
新增实体与 XML 静态测试,锁定以下内容:
- 结果表名为 `ccdi_project_overview_employee_result`
- 唯一键包含 `project_id``staff_id_card`
- 字段包含:
- `rule_count`
- `model_count`
- `hit_count`
- `risk_level_code`
- `model_codes_csv`
- `model_names_json`
- `hit_rules_json`
- `model_hit_summary_json`
- Mapper XML 至少声明:
- `deleteByProjectId`
- `insertBatch`
- `selectByProjectId`
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultEntityTest,CcdiProjectOverviewEmployeeResultMapperXmlTest
```
Expected:
- `FAIL`
- 原因是结果表实体、Mapper 与 SQL 脚本尚未创建
- [ ] **Step 3: Write minimal implementation**
创建结果表结构与基础映射,要求:
- 表粒度固定为 `project_id + staff_id_card`
- JSON 字段使用可承载结构化快照的类型
- 审计字段与项目现有表风格保持一致
- Mapper XML 只提供本轮需要的最小 CRUD
- 按项目删除
- 批量插入
- 按项目查询
SQL 脚本如需执行,使用:
```bash
bin/mysql_utf8_exec.sh sql/migration/2026-03-20-create-project-overview-employee-result-table.sql
```
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultEntityTest,CcdiProjectOverviewEmployeeResultMapperXmlTest
```
Expected:
- `PASS`
- [ ] **Step 5: Commit**
```bash
git add sql/migration/2026-03-20-create-project-overview-employee-result-table.sql ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/entity/CcdiProjectOverviewEmployeeResult.java ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewEmployeeResultMapper.java ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewEmployeeResultMapper.xml ccdi-project/src/test/java/com/ruoyi/ccdi/project/domain/entity/CcdiProjectOverviewEmployeeResultEntityTest.java ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewEmployeeResultMapperXmlTest.java
git commit -m "新增结果总览员工结果表结构"
```
### Task 2: 实现员工归并明细查询与结果表聚合构建
**Files:**
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectOverviewEmployeeHitRowVO.java`
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectOverviewEmployeeRuleSummaryVO.java`
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectOverviewEmployeeModelSummaryVO.java`
- Create: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewEmployeeResultBuilder.java`
- Modify: `ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewEmployeeResultMapper.xml`
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewEmployeeResultMapper.java`
- Test: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewEmployeeResultBuilderTest.java`
- [ ] **Step 1: Write the failing test**
为结果构建器新增单测,锁定以下聚合口径:
- 同一员工多条命中按 `staff_id_card` 合并为一条结果
- 亲属命中已归并到员工本人后再参与聚合
- `ruleCount``rule_code` 去重
- `modelCount``model_code` 去重
- `hitCount` 为原始命中次数累计
- `riskLevelCode` 沿用当前分级:
- `>= 5``HIGH`
- `2-4``MEDIUM`
- 其余为 `LOW`
- `model_hit_summary_json` 中每个模型的 `warningCount` 为原始标签命中次数
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultBuilderTest
```
Expected:
- `FAIL`
- 原因是构建器与原始命中明细类型尚未实现
- [ ] **Step 3: Write minimal implementation**
实现以下内容:
- 在结果表 Mapper 中新增“按项目查询员工归并命中明细”的内部查询
- 该查询可以复用当前归并口径,但只供重算链路使用,不供页面查询直接调用
-`CcdiProjectOverviewEmployeeResultBuilder` 中完成 Java 聚合,输出可批量入表的实体列表
- 生成:
- `risk_point`
- `model_codes_csv`
- `model_names_json`
- `hit_rules_json`
- `model_hit_summary_json`
不要在这一阶段改造 Controller 或页面查询 SQL。
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultBuilderTest
```
Expected:
- `PASS`
- [ ] **Step 5: Commit**
```bash
git add ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectOverviewEmployeeHitRowVO.java ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectOverviewEmployeeRuleSummaryVO.java ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectOverviewEmployeeModelSummaryVO.java ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewEmployeeResultBuilder.java ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewEmployeeResultMapper.java ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewEmployeeResultMapper.xml ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewEmployeeResultBuilderTest.java
git commit -m "实现结果总览员工结果聚合构建"
```
### Task 3: 接入标签写库后的同事务重算链路
**Files:**
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/ICcdiProjectOverviewService.java`
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImpl.java`
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java`
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/CcdiProjectOverviewServiceStructureTest.java`
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImplTest.java`
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceRiskCountRefreshTest.java`
- [ ] **Step 1: Write the failing tests**
调整服务结构与事务链路测试,锁定以下行为:
- `ICcdiProjectOverviewService` 暴露“重算结果总览员工结果”的服务方法
- 标签重算成功后,先写标签结果,再重算员工结果表,再同步项目风险人数
- 如果员工结果表重算失败,标签任务整体失败并回滚
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewServiceStructureTest,CcdiProjectOverviewServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest
```
Expected:
- `FAIL`
- 原因是当前只刷新项目风险人数,尚未维护结果总览员工结果表
- [ ] **Step 3: Write minimal implementation**
实现以下最小改动:
-`CcdiProjectOverviewServiceImpl` 中新增按项目重算员工结果表的方法
- 方法内部顺序固定为:
- 校验项目存在
- 删除当前项目历史结果
- 查询项目全量员工归并命中明细
- 通过构建器聚合
- 批量写入结果表
- 基于结果表同步项目高/中/低风险人数
-`CcdiBankTagServiceImpl` 中把成功链路切换为调用新方法
不要在这一步修改结果总览 Controller 对外接口。
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewServiceStructureTest,CcdiProjectOverviewServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest
```
Expected:
- `PASS`
- [ ] **Step 5: Commit**
```bash
git add ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/ICcdiProjectOverviewService.java ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImpl.java ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/CcdiProjectOverviewServiceStructureTest.java ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImplTest.java ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceRiskCountRefreshTest.java
git commit -m "接入结果总览员工结果同步重算"
```
### Task 4: 将结果总览 4 类查询切换为只读结果表
**Files:**
- Modify: `ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapper.java`
- Modify: `ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml`
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapperSqlTest.java`
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapperRiskModelCardsTest.java`
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapperRiskModelPeopleTest.java`
- Modify: `ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImplTest.java`
- [ ] **Step 1: Write the failing tests**
补充或调整 Mapper/Service 测试,锁定以下要求:
- 风险仪表盘查询不再依赖 `resolvedEmployeeRiskBaseSql`
- 风险人员总览从结果表返回 `riskLevel``modelCount``riskPoint`
- 模型卡片 `warningCount` 取自结果表中的模型汇总快照
- 模型人员列表继续支持:
- `ANY`
- `ALL`
- `keyword`
- `deptId`
- `pageNum`
- `pageSize`
- 4 类页面查询都改为读 `ccdi_project_overview_employee_result`
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewMapperRiskModelCardsTest,CcdiProjectOverviewMapperRiskModelPeopleTest,CcdiProjectOverviewServiceImplTest
```
Expected:
- `FAIL`
- 原因是当前结果总览查询仍直接读取重连表聚合 SQL
- [ ] **Step 3: Write minimal implementation**
`CcdiProjectOverviewMapper.xml` 中:
- 风险仪表盘按结果表风险等级统计
- 风险人员总览改为单表排序查询
- 模型卡片基于 `model_hit_summary_json` 或等价结构展开聚合
- 模型人员分页基于结果表完成 `ANY / ALL / keyword / deptId` 过滤,并从快照字段组装 `modelNames``hitTagList`
`CcdiProjectOverviewServiceImpl` 中只做最小适配,不新增新接口,不改前端响应结构。
不要改动:
- 风险明细相关逻辑
- 已移出页面范围之外的 TOP10 逻辑
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewMapperRiskModelCardsTest,CcdiProjectOverviewMapperRiskModelPeopleTest,CcdiProjectOverviewServiceImplTest
```
Expected:
- `PASS`
- [ ] **Step 5: Commit**
```bash
git add ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapper.java ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapperSqlTest.java ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapperRiskModelCardsTest.java ccdi-project/src/test/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewMapperRiskModelPeopleTest.java ccdi-project/src/test/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImplTest.java
git commit -m "切换结果总览查询到员工结果表"
```
### Task 5: 补充后端实施与验证文档
**Files:**
- Create: `docs/reports/implementation/2026-03-20-results-overview-employee-result-table-backend-implementation.md`
- Create: `docs/tests/records/2026-03-20-results-overview-employee-result-table-backend-verification.md`
- Verify: `docs/design/2026-03-20-results-overview-employee-result-table-design.md`
- [ ] **Step 1: Write implementation record**
记录以下内容:
- 新增结果总览员工结果表
- 命中结果写库后同事务重算
- 结果总览 4 类查询改为只读结果表
- 保持模型卡片 `warningCount` 为原始标签命中次数
- [ ] **Step 2: Write verification record**
记录执行过的命令、日期、结果和结论,至少包含:
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultEntityTest,CcdiProjectOverviewEmployeeResultMapperXmlTest
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultBuilderTest
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewServiceStructureTest,CcdiProjectOverviewServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewMapperRiskModelCardsTest,CcdiProjectOverviewMapperRiskModelPeopleTest,CcdiProjectOverviewServiceImplTest
```
如执行 SQL 脚本,记录:
```bash
bin/mysql_utf8_exec.sh sql/migration/2026-03-20-create-project-overview-employee-result-table.sql
```
- [ ] **Step 3: Commit**
```bash
git add docs/reports/implementation/2026-03-20-results-overview-employee-result-table-backend-implementation.md docs/tests/records/2026-03-20-results-overview-employee-result-table-backend-verification.md
git commit -m "补充结果总览员工结果表后端实施记录"
```

View File

@@ -0,0 +1,331 @@
# LSFX Mock Rule Hit Mode Backend Implementation Plan
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
**Goal:**`lsfx-mock-server` 增加可通过命令行切换的规则命中模式,在默认保持稳定随机子集命中的前提下,支持切换为“全部兼容规则命中”。
**Architecture:** 保持现有 `FileService -> StatementService -> FileRecord 缓存` 主链路不变,只在启动层新增命令行参数解析,在配置层新增统一模式值,在规则计划层新增 `subset/all` 两种编排路径。`all` 模式不做字面全开,而是通过显式互斥组裁剪产出“全部兼容规则命中”计划,避免样本语义冲突。
**Tech Stack:** Python 3, FastAPI, uvicorn, pydantic-settings, pytest, Markdown
---
## File Structure
- `lsfx-mock-server/config/settings.py`: 新增 `RULE_HIT_MODE` 配置项,统一暴露规则命中模式。
- `lsfx-mock-server/main.py`: 新增普通启动命令行参数解析,并在启动前校验模式值。
- `lsfx-mock-server/dev.py`: 新增热重载启动入口,支持 `--reload --rule-hit-mode ...`
- `lsfx-mock-server/services/file_service.py`: 为 `subset/all` 两种模式生成命中计划,并显式维护互斥组裁剪逻辑。
- `lsfx-mock-server/tests/test_file_service.py`: 锁定默认随机子集模式、`all` 模式全集逻辑和互斥组行为。
- `lsfx-mock-server/tests/test_startup.py`: 锁定命令行参数解析、非法参数报错和热重载启动参数透传。
- `lsfx-mock-server/README.md`: 更新普通启动、热重载启动与“全部兼容规则命中”的准确说明。
- `docs/reports/implementation/2026-03-22-lsfx-rule-hit-mode-backend-record.md`: 记录本次后端实施范围、命中模式语义和落地结果。
- `docs/tests/records/2026-03-22-lsfx-rule-hit-mode-backend-verification.md`: 记录测试命令、启动验证和进程清理结果。
### Task 1: 接入命令行启动参数并统一规则命中模式配置
**Files:**
- Modify: `lsfx-mock-server/config/settings.py`
- Modify: `lsfx-mock-server/main.py`
- Create: `lsfx-mock-server/dev.py`
- Create: `lsfx-mock-server/tests/test_startup.py`
- Reference: `docs/superpowers/specs/2026-03-22-lsfx-rule-hit-mode-design.md`
- [ ] **Step 1: Write the failing test**
先在 `lsfx-mock-server/tests/test_startup.py` 中补三条失败用例,锁定启动参数语义:
```python
import pytest
from main import parse_args as parse_main_args
from dev import parse_args as parse_dev_args
def test_main_parse_args_should_default_to_subset():
args = parse_main_args([])
assert args.rule_hit_mode == "subset"
def test_main_parse_args_should_accept_all_mode():
args = parse_main_args(["--rule-hit-mode", "all"])
assert args.rule_hit_mode == "all"
def test_dev_parse_args_should_reject_invalid_mode():
with pytest.raises(SystemExit):
parse_dev_args(["--rule-hit-mode", "invalid"])
```
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_startup.py -v
```
Expected:
- `FAIL`
- 原因是 `main.py` 与热重载入口尚未提供可测试的参数解析函数
- [ ] **Step 3: Write minimal implementation**
按最小路径实现:
1.`config/settings.py` 中新增默认配置:
```python
RULE_HIT_MODE: str = "subset"
```
2.`main.py` 中新增参数解析函数,只允许 `subset|all`
```python
def parse_args(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument("--rule-hit-mode", choices=["subset", "all"], default="subset")
return parser.parse_args(argv)
```
3.`main.py` 启动前,将 `rule_hit_mode` 写入环境变量,再初始化/读取 `settings`
4. 新增 `dev.py`,复用同一套参数解析,支持:
```bash
python dev.py --reload --rule-hit-mode all
```
5. `dev.py` 内部调用 `uvicorn.run("main:app", reload=True, ...)` 或等价方式,不再要求用户直接运行裸 `uvicorn main:app --reload ...`
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_startup.py -v
```
Expected:
- `PASS`
- 默认模式为 `subset`
- `all` 模式可被普通启动与热重载入口正确解析
- [ ] **Step 5: Commit**
```bash
git add lsfx-mock-server/config/settings.py lsfx-mock-server/main.py lsfx-mock-server/dev.py lsfx-mock-server/tests/test_startup.py
git commit -m "补充Mock规则命中模式启动参数"
```
### Task 2: 在 FileService 中实现全部兼容规则命中计划
**Files:**
- Modify: `lsfx-mock-server/services/file_service.py`
- Modify: `lsfx-mock-server/tests/test_file_service.py`
- Reference: `lsfx-mock-server/services/statement_rule_samples.py`
- Reference: `docs/superpowers/specs/2026-03-22-lsfx-rule-hit-mode-design.md`
- [ ] **Step 1: Write the failing test**
`lsfx-mock-server/tests/test_file_service.py` 中先补失败用例,锁定 `all` 模式语义:
```python
def test_build_rule_hit_plan_should_return_all_compatible_rules_in_all_mode(monkeypatch):
monkeypatch.setattr("services.file_service.settings.RULE_HIT_MODE", "all")
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
plan = service._build_rule_hit_plan(10001)
assert plan["large_transaction_hit_rules"] == LARGE_TRANSACTION_RULE_CODES
assert plan["phase1_hit_rules"] == PHASE1_RULE_CODES
assert plan["phase2_statement_hit_rules"] == PHASE2_STATEMENT_RULE_CODES
assert plan["phase2_baseline_hit_rules"] == PHASE2_BASELINE_RULE_CODES
def test_build_rule_hit_plan_should_keep_subset_mode_as_default():
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
plan1 = service._build_rule_hit_plan(10001)
plan2 = service._build_rule_hit_plan(10001)
assert plan1 == plan2
assert 2 <= len(plan1["large_transaction_hit_rules"]) <= 4
def test_build_rule_hit_plan_should_drop_conflicting_rules_from_all_mode(monkeypatch):
monkeypatch.setattr("services.file_service.settings.RULE_HIT_MODE", "all")
monkeypatch.setattr(
"services.file_service.RULE_CONFLICT_GROUPS",
[["SALARY_QUICK_TRANSFER", "SALARY_UNUSED"]],
)
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
plan = service._build_rule_hit_plan(10001)
assert not (
"SALARY_QUICK_TRANSFER" in plan["phase2_statement_hit_rules"]
and "SALARY_UNUSED" in plan["phase2_statement_hit_rules"]
)
```
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_file_service.py -k "rule_hit_plan" -v
```
Expected:
- `FAIL`
- 原因是当前 `_build_rule_hit_plan()` 只有稳定随机子集逻辑,尚无模式切换和互斥裁剪
- [ ] **Step 3: Write minimal implementation**
`lsfx-mock-server/services/file_service.py` 中按职责做最小拆分:
1. 保留现有四类规则池常量。
2. 新增互斥组常量,第一版允许为空列表:
```python
RULE_CONFLICT_GROUPS = []
```
3. 新增模式编排辅助函数:
```python
def _build_subset_rule_hit_plan(self, log_id: int) -> dict:
...
def _build_all_compatible_rule_hit_plan(self) -> dict:
...
def _apply_conflict_groups(self, rule_plan: dict) -> dict:
...
```
4. `_build_rule_hit_plan()` 只负责分发:
```python
if settings.RULE_HIT_MODE == "all":
return self._apply_conflict_groups(self._build_all_compatible_rule_hit_plan())
return self._build_subset_rule_hit_plan(log_id)
```
5. 不修改 `FileRecord` 字段结构和后续消费链路,只改变计划生成方式。
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_file_service.py -k "rule_hit_plan" -v
```
Expected:
- `PASS`
- 默认仍为随机子集
- `all` 模式返回全部兼容规则
- 若未来配置互斥组,同组规则不会同时出现在结果里
- [ ] **Step 5: Commit**
```bash
git add lsfx-mock-server/services/file_service.py lsfx-mock-server/tests/test_file_service.py
git commit -m "补充Mock全部兼容规则命中计划"
```
### Task 3: 更新文档并完成后端验证记录
**Files:**
- Modify: `lsfx-mock-server/README.md`
- Create: `docs/reports/implementation/2026-03-22-lsfx-rule-hit-mode-backend-record.md`
- Create: `docs/tests/records/2026-03-22-lsfx-rule-hit-mode-backend-verification.md`
- [ ] **Step 1: Update README with accurate startup instructions**
更新 `lsfx-mock-server/README.md`
- 普通启动示例改为:
```bash
python main.py --rule-hit-mode subset
python main.py --rule-hit-mode all
```
- 热重载示例改为:
```bash
python dev.py --reload --rule-hit-mode subset
python dev.py --reload --rule-hit-mode all
```
- 文案统一使用“全部兼容规则命中”,不使用“全部规则命中”。
- [ ] **Step 2: Run targeted verification**
Run:
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_startup.py tests/test_file_service.py -k "rule_hit_plan or parse_args" -v
```
Expected:
- `PASS`
- 启动参数与规则计划两条主链路均被锁定
- [ ] **Step 3: Run startup smoke tests and stop processes**
分别执行并记录:
```bash
cd lsfx-mock-server
python3 main.py --rule-hit-mode all > /tmp/lsfx_main.log 2>&1 & echo $! > /tmp/lsfx_main.pid
sleep 3
kill "$(cat /tmp/lsfx_main.pid)"
rm -f /tmp/lsfx_main.pid
python3 dev.py --reload --rule-hit-mode all > /tmp/lsfx_dev.log 2>&1 & echo $! > /tmp/lsfx_dev.pid
sleep 5
kill "$(cat /tmp/lsfx_dev.pid)"
rm -f /tmp/lsfx_dev.pid
```
Expected:
- 两种启动方式均成功拉起
- 结束验证后无残留进程
- [ ] **Step 4: Write implementation and verification records**
在实施记录中写清:
- 默认模式保持不变
- `all` 的准确语义是“全部兼容规则命中”
- 当前互斥组为空或具体清单
- 热重载改为项目脚本入口
在验证记录中写清:
- 测试命令及结果
- 启动验证命令及结果
- 进程清理动作
- [ ] **Step 5: Commit**
```bash
git add lsfx-mock-server/README.md docs/reports/implementation/2026-03-22-lsfx-rule-hit-mode-backend-record.md docs/tests/records/2026-03-22-lsfx-rule-hit-mode-backend-verification.md
git commit -m "补充Mock命中模式后端实施与验证记录"
```

View File

@@ -0,0 +1,121 @@
# LSFX Mock Phase 2 Random Hit Frontend Implementation Plan
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
**Goal:** 明确本次 `lsfx-mock-server` 第二期稳定随机命中改造不引入前端代码变更,同时验证现有前端页面与接口契约无需调整。
**Architecture:** 本次改造只作用于 Mock 服务与本地数据库基线,不新增前端页面、字段、交互或路由。前端实施计划采用“零代码变更 + 契约核验 + 文档沉淀”的最短路径,若核验发现返回结构变化,再停止并回到设计阶段,而不是在本计划中临时扩展前端实现。
**Tech Stack:** Vue 2, npm, Axios request wrapper, Markdown docs
---
## File Structure
- `ruoyi-ui/src/api/`: 本次预期不修改,只用于核验现有接口调用契约是否保持不变。
- `ruoyi-ui/src/views/ccdiProject/`: 本次预期不修改,只用于核验“拉取本行信息”“重打标结果展示”“流水详情标签展示”相关页面是否无需联动调整。
- `docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-frontend-record.md`: 记录本次前端无代码改动的范围说明。
- `docs/tests/records/2026-03-20-lsfx-mock-phase2-random-hit-frontend-verification.md`: 记录前端契约核验与页面回归结论。
### Task 1: 核验前端接口契约无需调整
**Files:**
- Reference: `ruoyi-ui/src/api/`
- Reference: `ruoyi-ui/src/views/ccdiProject/`
- Reference: `docs/design/2026-03-20-lsfx-mock-phase2-random-hit-design.md`
- [ ] **Step 1: Check the existing frontend touchpoints**
检查以下现有前端触点,不写代码:
- `拉取本行信息` 入口对应的接口调用位置
- `项目重打标` 入口对应的接口调用位置
- `流水详情命中标签` 展示链路
重点确认:
- 请求参数没有新增字段要求
- 返回结构没有新增前端必填字段要求
- 结果展示仍依赖现有标签结果接口,不需要额外渲染第二期专属字段
- [ ] **Step 2: Verify no code change is needed**
Run:
```bash
cd ruoyi-ui
rg -n "pull-bank-info|tags/rebuild|bank-statement/detail|hitTags" src
```
Expected:
- 只看到现有调用点
- 设计文档中的第二期改造不要求新增前端字段适配
- [ ] **Step 3: If contract drift is found, stop instead of patching**
若核验发现以下任一情况,则停止执行并回到设计阶段:
- 后端返回结构新增前端必须消费的新字段
- 结果展示需要新增新的专属列或筛选条件
- 现有页面无法承接第二期命中结果
若未发现上述情况,则保持前端零代码变更。
- [ ] **Step 4: Record the no-op decision**
在计划执行时将“无需前端改动”的事实沉淀到实施记录与验证记录,不创建任何前端源码改动。
- [ ] **Step 5: Commit**
```bash
git add docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-frontend-record.md docs/tests/records/2026-03-20-lsfx-mock-phase2-random-hit-frontend-verification.md
git commit -m "补充第二期Mock联调前端核验记录"
```
### Task 2: 补前端实施与验证记录
**Files:**
- Create: `docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-frontend-record.md`
- Create: `docs/tests/records/2026-03-20-lsfx-mock-phase2-random-hit-frontend-verification.md`
- [ ] **Step 1: Write implementation record**
`docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-frontend-record.md` 中记录:
- 本次需求范围仅涉及 Mock 服务与数据库基线
- 前端页面、接口封装、路由和交互均不需要改动
- 不做“为了联调看起来完整”而新增无业务价值的前端补丁
- [ ] **Step 2: Write verification record**
`docs/tests/records/2026-03-20-lsfx-mock-phase2-random-hit-frontend-verification.md` 中记录:
- 核验的页面与接口触点
- 检查命令
- “无需前端改动”的依据
- 最终结论
- [ ] **Step 3: Confirm no frontend build/test is required**
若无源码改动,则不运行 `npm run build:prod`;在验证记录中明确写明“本次为零代码改动核验,因此未执行构建”。
- [ ] **Step 4: Verify git diff stays docs-only**
Run:
```bash
git diff --name-only -- ruoyi-ui
```
Expected:
- 无输出
- 证明本次前端计划执行保持零代码改动
- [ ] **Step 5: Commit**
```bash
git add docs/reports/implementation/2026-03-20-lsfx-mock-phase2-random-hit-frontend-record.md docs/tests/records/2026-03-20-lsfx-mock-phase2-random-hit-frontend-verification.md
git commit -m "记录第二期Mock联调前端零改动结论"
```

View File

@@ -0,0 +1,223 @@
# Results Overview Employee Result Table Frontend Implementation Plan
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
**Goal:** 在后端切换到结果总览员工结果表出数后,保持结果总览页现有交互、文案和接口消费方式不变,并补充前端回归保护,确保风险仪表盘、风险人员总览、模型区展示持续稳定。
**Architecture:** 前端不新增路由、不新增页面、不调整结果总览现有组件拆分。保持 `PreliminaryCheck.vue``RiskPeopleSection.vue``RiskModelSection.vue` 继续消费当前接口返回结构,前端工作的重点放在 API/组件契约回归测试与最小必要的归一化兼容,而不是重做 UI 交互。
**Tech Stack:** Vue 2, Element UI, Axios (`@/utils/request`), Node.js
---
### Task 1: 锁定结果总览前端 API 契约不变
**Files:**
- Modify: `ruoyi-ui/tests/unit/project-overview-api.test.js`
- Modify: `ruoyi-ui/tests/unit/preliminary-check-api-integration.test.js`
- Verify: `ruoyi-ui/src/api/ccdi/projectOverview.js`
- [ ] **Step 1: Write the failing test**
补充 API 静态断言,锁定以下约束:
- 仍使用现有接口路径:
- `/ccdi/project/overview/dashboard`
- `/ccdi/project/overview/risk-people`
- `/ccdi/project/overview/risk-models/cards`
- `/ccdi/project/overview/risk-models/people`
- 结果总览前端不感知后端底层是否改为结果表
- 模型人员接口仍透传:
- `projectId`
- `modelCodes`
- `matchMode`
- `keyword`
- `deptId`
- `pageNum`
- `pageSize`
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
cd ruoyi-ui
node tests/unit/project-overview-api.test.js
node tests/unit/preliminary-check-api-integration.test.js
```
Expected:
- `FAIL`
- 原因是测试尚未锁定“后端数据源改造但前端 API 契约不变”的预期
- [ ] **Step 3: Write minimal implementation**
如现有 API 封装已经满足约束,则只补测试,不修改源码。
如果测试暴露出接口参数或路径未被稳定消费,再做最小调整,要求:
- 不新增新 API 方法
- 不改变接口名称
- 不改变组件调用方式
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
cd ruoyi-ui
node tests/unit/project-overview-api.test.js
node tests/unit/preliminary-check-api-integration.test.js
```
Expected:
- `PASS`
- [ ] **Step 5: Commit**
```bash
git add ruoyi-ui/tests/unit/project-overview-api.test.js ruoyi-ui/tests/unit/preliminary-check-api-integration.test.js ruoyi-ui/src/api/ccdi/projectOverview.js
git commit -m "锁定结果总览前端接口契约"
```
### Task 2: 锁定结果总览页面展示契约与回归边界
**Files:**
- Modify: `ruoyi-ui/tests/unit/preliminary-check-summary-and-people.test.js`
- Modify: `ruoyi-ui/tests/unit/preliminary-check-model-and-detail.test.js`
- Modify: `ruoyi-ui/tests/unit/preliminary-check-risk-people-binding.test.js`
- Modify: `ruoyi-ui/tests/unit/preliminary-check-model-linkage-flow.test.js`
- Verify: `ruoyi-ui/src/views/ccdiProject/components/detail/PreliminaryCheck.vue`
- Verify: `ruoyi-ui/src/views/ccdiProject/components/detail/RiskPeopleSection.vue`
- Verify: `ruoyi-ui/src/views/ccdiProject/components/detail/RiskModelSection.vue`
- [ ] **Step 1: Write the failing test**
补充组件回归断言,锁定以下边界:
- 风险仪表盘仍展示:
- 总人数
- 高风险
- 中风险
- 低风险
- 无风险人员
- 风险人员总览仍展示:
- 姓名
- 身份证号
- 所属部门
- 风险等级
- 命中模型数
- 核心异常点
- 模型区仍支持:
- 模型卡片统计
- `ANY / ALL`
- 关键字筛选
- 部门筛选
- 异常标签展示
- 页面不重新引入已移除的 TOP10 区块
- [ ] **Step 2: Run test to verify it fails**
Run:
```bash
cd ruoyi-ui
node tests/unit/preliminary-check-summary-and-people.test.js
node tests/unit/preliminary-check-model-and-detail.test.js
node tests/unit/preliminary-check-risk-people-binding.test.js
node tests/unit/preliminary-check-model-linkage-flow.test.js
```
Expected:
- `FAIL`
- 原因是测试尚未完整覆盖“后端改为结果表出数后前端展示不变”的场景
- [ ] **Step 3: Write minimal implementation**
优先只改测试。
如果测试发现组件对空数组、字段顺序或字段缺省值存在脆弱依赖,再在以下文件中做最小兼容处理:
- `PreliminaryCheck.vue`
- `RiskPeopleSection.vue`
- `RiskModelSection.vue`
兼容处理原则:
- 只做空值归一化
- 只做字段缺省保护
- 不改变现有交互、布局、文案和样式
- [ ] **Step 4: Run test to verify it passes**
Run:
```bash
cd ruoyi-ui
node tests/unit/preliminary-check-summary-and-people.test.js
node tests/unit/preliminary-check-model-and-detail.test.js
node tests/unit/preliminary-check-risk-people-binding.test.js
node tests/unit/preliminary-check-model-linkage-flow.test.js
```
Expected:
- `PASS`
- [ ] **Step 5: Commit**
```bash
git add ruoyi-ui/tests/unit/preliminary-check-summary-and-people.test.js ruoyi-ui/tests/unit/preliminary-check-model-and-detail.test.js ruoyi-ui/tests/unit/preliminary-check-risk-people-binding.test.js ruoyi-ui/tests/unit/preliminary-check-model-linkage-flow.test.js ruoyi-ui/src/views/ccdiProject/components/detail/PreliminaryCheck.vue ruoyi-ui/src/views/ccdiProject/components/detail/RiskPeopleSection.vue ruoyi-ui/src/views/ccdiProject/components/detail/RiskModelSection.vue
git commit -m "补充结果总览前端展示回归保护"
```
### Task 3: 执行结果总览前端专项回归验证
**Files:**
- Create: `docs/reports/implementation/2026-03-20-results-overview-employee-result-table-frontend-implementation.md`
- Create: `docs/tests/records/2026-03-20-results-overview-employee-result-table-frontend-verification.md`
- Verify: `docs/design/2026-03-20-results-overview-employee-result-table-design.md`
- [ ] **Step 1: Run focused frontend regression tests**
Run:
```bash
cd ruoyi-ui
node tests/unit/project-overview-api.test.js
node tests/unit/preliminary-check-api-integration.test.js
node tests/unit/preliminary-check-summary-and-people.test.js
node tests/unit/preliminary-check-model-and-detail.test.js
node tests/unit/preliminary-check-risk-people-binding.test.js
node tests/unit/preliminary-check-model-linkage-flow.test.js
```
Expected:
- `PASS`
- 证明结果总览页前端契约与交互边界在后端数据源改造后保持稳定
- [ ] **Step 2: Write implementation and verification records**
实施记录需说明:
- 前端未新增页面和交互
- 前端重点是锁定 API 与展示契约
- 若有源码调整,仅为最小兼容处理
验证记录需写明:
- 执行日期
- 执行命令
- 每条命令结果
- 最终结论
- [ ] **Step 3: Commit**
```bash
git add docs/reports/implementation/2026-03-20-results-overview-employee-result-table-frontend-implementation.md docs/tests/records/2026-03-20-results-overview-employee-result-table-frontend-verification.md
git commit -m "补充结果总览员工结果表前端验证记录"
```

View File

@@ -0,0 +1,119 @@
# LSFX Mock Rule Hit Mode Frontend Implementation Plan
> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking.
**Goal:** 明确本次 `lsfx-mock-server` 规则命中模式切换不引入前端代码改动,同时完成对前端接口契约和页面影响面的核验。
**Architecture:** 本次需求仅作用于 Mock 服务启动方式与命中计划生成逻辑,不新增接口字段、页面交互、路由或状态管理逻辑。前端计划采用“零代码变更 + 契约核验 + 文档记录”的最短路径;若核验发现契约漂移,则停止执行并回到设计阶段,不在本计划中临时扩展实现。
**Tech Stack:** Vue 2, Axios request wrapper, npm, Markdown
---
## File Structure
- `ruoyi-ui/src/api/`: 本次预期不修改,只用于核验现有接口封装是否依赖 Mock 启动方式或新增字段。
- `ruoyi-ui/src/views/`: 本次预期不修改,只用于核验与流水拉取、标签结果、流水明细相关页面是否无需联动调整。
- `docs/reports/implementation/2026-03-22-lsfx-rule-hit-mode-frontend-record.md`: 记录本次前端零代码改动的范围说明。
- `docs/tests/records/2026-03-22-lsfx-rule-hit-mode-frontend-verification.md`: 记录前端契约核验与页面影响判断依据。
### Task 1: 核验前端接口契约与页面触点不受影响
**Files:**
- Reference: `ruoyi-ui/src/api/`
- Reference: `ruoyi-ui/src/views/`
- Reference: `docs/superpowers/specs/2026-03-22-lsfx-rule-hit-mode-design.md`
- [ ] **Step 1: Identify existing frontend touchpoints**
检查以下触点,不写任何前端源码:
- 与“拉取本行信息”相关的接口封装
- 与项目打标、结果总览、流水详情相关的调用链
- 是否存在对 Mock 启动方式、端口或命中模式的前端硬编码依赖
- [ ] **Step 2: Verify no contract change is required**
Run:
```bash
cd ruoyi-ui
rg -n "getJZFileOrZjrcuFile|getBSByLogId|tag|result|statement" src
```
Expected:
- 只命中现有接口调用和结果展示代码
- 未发现前端依赖新增请求参数或新增响应字段
- [ ] **Step 3: Stop if any contract drift is found**
若核验发现以下任一情况,则停止,不做补丁式前端改造:
- 后端返回结构新增前端必填字段
- 页面需要新增新的命中模式展示入口
- 现有页面对热重载脚本入口有直接依赖
若未发现,则维持前端零代码改动。
- [ ] **Step 4: Record the no-op decision**
在实施记录中明确写明:
- 本次需求仅影响 Mock 服务
- 前端页面、接口封装、路由、权限和状态管理均无需改动
- 不为“展示模式切换”新增无业务价值 UI
- [ ] **Step 5: Commit**
```bash
git add docs/reports/implementation/2026-03-22-lsfx-rule-hit-mode-frontend-record.md docs/tests/records/2026-03-22-lsfx-rule-hit-mode-frontend-verification.md
git commit -m "记录Mock命中模式前端零改动结论"
```
### Task 2: 补充前端实施与验证记录
**Files:**
- Create: `docs/reports/implementation/2026-03-22-lsfx-rule-hit-mode-frontend-record.md`
- Create: `docs/tests/records/2026-03-22-lsfx-rule-hit-mode-frontend-verification.md`
- [ ] **Step 1: Write implementation record**
`docs/reports/implementation/2026-03-22-lsfx-rule-hit-mode-frontend-record.md` 中记录:
- 需求范围仅涉及 `lsfx-mock-server`
- 前端无新增字段、无新增交互、无新增构建要求
- 维持零代码改动是本次需求的最短正确路径
- [ ] **Step 2: Write verification record**
`docs/tests/records/2026-03-22-lsfx-rule-hit-mode-frontend-verification.md` 中记录:
- 核验范围
- 检查命令
- “无需前端改动”的依据
- 最终结论
- [ ] **Step 3: Confirm build is not required**
若前端源码无改动,则不执行 `npm run build:prod`;在验证记录中明确写明“本次为零代码变更核验,因此未执行前端构建”。
- [ ] **Step 4: Verify git diff stays docs-only**
Run:
```bash
git diff --name-only -- ruoyi-ui
```
Expected:
- 无输出
- 证明本次前端计划执行期间没有引入前端源码改动
- [ ] **Step 5: Commit**
```bash
git add docs/reports/implementation/2026-03-22-lsfx-rule-hit-mode-frontend-record.md docs/tests/records/2026-03-22-lsfx-rule-hit-mode-frontend-verification.md
git commit -m "补充Mock命中模式前端核验记录"
```

View File

@@ -0,0 +1,54 @@
# 银行流水批量入库 MySQL 1869 修复记录
## 背景
文件上传解析完成后,`CcdiFileUploadServiceImpl.fetchAndSaveBankStatements` 调用
`CcdiBankStatementMapper.insertBatch` 批量写入 `ccdi_bank_statement`
线上报错如下:
- MySQL 错误码:`1869`
- 异常信息:`Auto-increment value in UPDATE conflicts with internally generated values`
## 根因
`ccdi-project/src/main/resources/mapper/ccdi/project/CcdiBankStatementMapper.xml`
中的批量插入 SQL 使用了 no-op upsert
```sql
ON DUPLICATE KEY UPDATE
bank_statement_id = bank_statement_id
```
`bank_statement_id` 是自增主键。当前 MySQL 版本在 `INSERT ... ON DUPLICATE KEY UPDATE`
场景下,若 duplicate 分支显式更新自增列,即使是自赋值,也会触发 `1869`
## 本次修改
1.`insertBatch` 的 duplicate 分支从自增主键自赋值改为普通字段自赋值:
```sql
ON DUPLICATE KEY UPDATE
batch_id = batch_id
```
2. 新增 Mapper XML 回归测试,约束批量去重 SQL 不得再更新自增主键。
## 影响说明
- 新流水:仍正常插入。
- 重复流水:仍按唯一键命中后跳过,不改写已有业务数据。
- 非重复键的其他 SQL 错误:仍然继续抛出,不会被静默吞掉。
## 验证
执行命令:
```bash
mvn test -pl ccdi-project -Dtest=CcdiBankStatementMapperXmlTest,CcdiFileUploadServiceImplTest
```
重点关注:
- `insertBatch_shouldAvoidUpdatingAutoIncrementPrimaryKeyInDuplicateBranch`
- `processFileAsync_shouldMarkParsedFailedWhenInsertBatchThrowsUnexpectedSqlError`

View File

@@ -0,0 +1,31 @@
# 第二期银行流水规则元数据修复实施记录
## 问题背景
- 2026-03-20 校验发现第二期规则已完成后端真实实现,但当前数据库中的第二期 10 条规则元数据仍停留在占位状态。
- 直接查询 `ccdi_bank_tag_rule` 可见:
- 10 条第二期规则 `remark` 仍为“占位规则待补充真实SQL”
- `FIXED_COUNTERPARTY_TRANSFER.indicator_code` 仍为旧值 `FIXED_COUNTERPARTY_TRANSFER`
- `SALARY_UNUSED.business_caliber` 仍为乱码
- `TAX_ASSET_REGISTRATION_MISMATCH.business_caliber` 仍为旧口径
## 根因分析
- 第二期真实规则落地时已更新初始化脚本 [`sql/2026-03-16-bank-tagging.sql`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/bank-tag-real-rule-phase2-backend/sql/2026-03-16-bank-tagging.sql),但没有同步补一份增量迁移脚本。
- 当前仓库的 SQL 元数据测试此前只覆盖第一期,没有覆盖第二期,所以“只改初始化脚本、遗漏增量脚本”的问题没有被自动拦截。
- 已执行过旧增量脚本、但未重建规则表初始化数据的环境,会继续保留第二期占位元数据。
## 本次修改
- 扩展 SQL 元数据测试 [`CcdiBankTagRuleSqlMetadataTest.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/bank-tag-real-rule-phase2-backend/ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiBankTagRuleSqlMetadataTest.java)
- 保留第一期元数据校验
- 新增第二期初始化脚本与迁移脚本一致性校验
- 约束第二期 10 条规则的 `indicator_code``business_caliber``remark` 必须与真实规则实现对齐
- 新增增量脚本 [`2026-03-20-sync-bank-tag-phase2-rule-metadata.sql`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/bank-tag-real-rule-phase2-backend/sql/migration/2026-03-20-sync-bank-tag-phase2-rule-metadata.sql)
- 使用 `INSERT ... ON DUPLICATE KEY UPDATE` 同步第二期 10 条规则元数据
- 清空 `FIXED_COUNTERPARTY_TRANSFER` 的旧 `indicator_code`
- 修复 `SALARY_UNUSED` 乱码与 `TAX_ASSET_REGISTRATION_MISMATCH` 业务口径
- 同步 10 条规则的真实规则 `remark`
- 使用 `bin/mysql_utf8_exec.sh` 将第二期元数据修复脚本落到当前验证数据库
## 实施结果
- 第二期规则元数据已与真实后端实现对齐。
- 新增测试可在仓库层拦住“第二期初始化脚本已改、迁移脚本漏补”的回归。
- 当前数据库中的第二期规则不再继续保留占位 `remark`、旧 `indicator_code` 和乱码业务口径。

View File

@@ -0,0 +1,80 @@
# 银行流水真实规则第二期后端实施记录
## 第二期规则范围
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- `MULTI_PARTY_GAMBLING_TRANSFER`
- `MONTHLY_FIXED_INCOME`
- `FIXED_COUNTERPARTY_TRANSFER`
- `HOUSE_REGISTRATION_MISMATCH`
- `PROPERTY_FEE_REGISTRATION_MISMATCH`
- `TAX_ASSET_REGISTRATION_MISMATCH`
- `SUPPLIER_CONCENTRATION`
- `SALARY_QUICK_TRANSFER`
- `SALARY_UNUSED`
## 修改内容
- 补齐第二期规则参数映射与阈值透传
- 替换第二期 3 条资产比对明细规则占位 SQL
- 替换第二期 7 条对象聚合规则占位 SQL
- 对齐第二期规则元数据脚本、实施记录与验证记录
## 参数与分发调整
-`BankTagRuleConfigResolver` 中补齐第二期阈值规则参数映射:
- `MULTI_PARTY_GAMBLING_TRANSFER -> MULTI_PARTY_AMT_MIN, MULTI_PARTY_AMT_MAX`
- `MONTHLY_FIXED_INCOME -> MONTHLY_FIXED_INCOME`
- `FIXED_COUNTERPARTY_TRANSFER -> FIXED_COUNTERPARTY_TRANSFER_MIN, FIXED_COUNTERPARTY_TRANSFER_MAX`
- 明确以下规则为无阈值规则,继续返回空参数集:
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- `SUPPLIER_CONCENTRATION`
- `HOUSE_REGISTRATION_MISMATCH`
- `PROPERTY_FEE_REGISTRATION_MISMATCH`
- `TAX_ASSET_REGISTRATION_MISMATCH`
- `SALARY_QUICK_TRANSFER`
- `SALARY_UNUSED`
-`CcdiBankTagServiceImpl` 中为第二期 3 条阈值型对象规则改为显式透传解析后的阈值,避免继续走占位分支。
## 资产比对真实 SQL
-`CcdiBankTagAnalysisMapper.xml` 中将以下 3 条明细规则替换为真实 SQL
- `HOUSE_REGISTRATION_MISMATCH`
- `PROPERTY_FEE_REGISTRATION_MISMATCH`
- `TAX_ASSET_REGISTRATION_MISMATCH`
- 三条规则统一基于 `ccdi_asset_info` 校验员工及关系人当前资产登记情况,并继续输出 `bankStatementId/groupId/logId/reasonDetail`
- 房产登记口径统一使用当前项目数据中的实际枚举值:
- `asset_main_type = '房产'`
- `asset_sub_type = '住宅'`
- `asset_status = '正常'`
## 对象聚合真实 SQL
-`CcdiBankTagAnalysisMapper.xml` 中将以下 7 条对象规则替换为真实 SQL
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- `MULTI_PARTY_GAMBLING_TRANSFER`
- `MONTHLY_FIXED_INCOME`
- `FIXED_COUNTERPARTY_TRANSFER`
- `SUPPLIER_CONCENTRATION`
- `SALARY_QUICK_TRANSFER`
- `SALARY_UNUSED`
- 对象型规则统一按员工维度收口,返回 `objectType = 'STAFF_ID_CARD'``objectKey = 员工身份证号`,与结果表唯一键保持一致。
- 各规则口径落地如下:
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`:基于关系人年收入折算月收入,筛出无收入或月收入低于 3000 元且累计交易超 10 万元的员工。
- `MULTI_PARTY_GAMBLING_TRANSFER`:按员工同日交易聚合,要求金额落在配置区间、交易笔数超过 2 笔且对手方至少 2 个。
- `MONTHLY_FIXED_INCOME`:统计近 12 个月非工资稳定转入,满足月收入阈值、命中月数不少于 6 个月且波动率受控。
- `FIXED_COUNTERPARTY_TRANSFER`:统计近 12 个月固定对手方按季度转入,季度金额落在配置区间且持续出现。
- `SUPPLIER_CONCENTRATION`:基于采购交易表计算员工负责采购中单一供应商占比是否超过 70%。
- `SALARY_QUICK_TRANSFER`:识别工资入账后 24 小时内快速转出超过 80% 的员工。
- `SALARY_UNUSED`:识别工资入账后剔除代扣项目,连续 30 天无消费或转账支出的员工。
## 元数据脚本调整
-`sql/2026-03-16-bank-tagging.sql` 中将第二期已落地规则的 `remark` 从占位描述更新为真实规则说明。
- `FIXED_COUNTERPARTY_TRANSFER.indicator_code` 调整为 `NULL`,避免继续保留不准确的单指标编码。
- 修正 `SALARY_UNUSED` 的业务口径文本乱码问题。
- 同步将 `TAX_ASSET_REGISTRATION_MISMATCH` 的业务口径更新为当前真实 SQL 已执行的房产登记校验表述。
## 与第一期衔接关系
- 第一期已完成的 9 条规则保持不变,本次不回退第一期参数映射、明细规则与对象规则实现。
- 第二期实现后,当前 19 条已落地真实规则共用同一套任务执行、结果落表、风险人数刷新与概览统计链路。
- 本次回归继续覆盖项目概览相关测试,确认第二期改动未影响已有风险概览接口结构与 SQL。
## 全量收口结论
- 第二期 10 条规则已全部替换为真实后端实现,不再依赖 `where 1 = 0` 占位 SQL。
- 第二期规则参数编码、规则编码、脚本元数据均保持全大写约定。
- 当前银行流水真实规则第一期与第二期后端范围已全部接入现有项目级打标主链路。

View File

@@ -0,0 +1,72 @@
# LSFX Mock 第二期稳定随机命中后端实施记录
## 修改范围
- `lsfx-mock-server/services/file_service.py`
- `lsfx-mock-server/services/statement_rule_samples.py`
- `lsfx-mock-server/services/statement_service.py`
- `lsfx-mock-server/services/phase2_baseline_service.py`
- `lsfx-mock-server/tests/test_file_service.py`
- `lsfx-mock-server/tests/test_statement_service.py`
- `lsfx-mock-server/tests/test_phase2_baseline_service.py`
- `lsfx-mock-server/tests/integration/test_full_workflow.py`
- `sql/migration/2026-03-20-lsfx-mock-phase2-hit-baseline.sql`
## 第二期规则分层
- 流水样本驱动规则:
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- `MULTI_PARTY_GAMBLING_TRANSFER`
- `MONTHLY_FIXED_INCOME`
- `FIXED_COUNTERPARTY_TRANSFER`
- `SALARY_QUICK_TRANSFER`
- `SALARY_UNUSED`
- 数据库基线驱动规则:
- `HOUSE_REGISTRATION_MISMATCH`
- `PROPERTY_FEE_REGISTRATION_MISMATCH`
- `TAX_ASSET_REGISTRATION_MISMATCH`
- `SUPPLIER_CONCENTRATION`
## 职责边界
- `FileService`
- 为同一 `logId` 稳定生成并持久化第二期规则命中计划。
-`upload_file()``fetch_inner_flow()` 链路中写入 `phase2_statement_hit_rules``phase2_baseline_hit_rules`
- 在记录创建后立即调用第二期基线服务,避免出现“流水已返回但基线未写”的假成功状态。
- `StatementService`
-`FileRecord` 读取第二期流水规则子集。
- 继续保持 `FIXED_TOTAL_COUNT = 200`、稳定 ID 分配与缓存分页语义。
- `Phase2BaselineService`
- 复用项目数据库配置生成并执行第二期幂等 SQL 计划。
- 采购基线通过 `ccdi_base_staff.id_card -> staff_id` 映射到真实员工工号。
- 资产基线使用固定 `asset_name` 先删后插,并保持“故意不匹配”的资产枚举口径。
## 第二期流水样本策略
- `MULTI_PARTY_GAMBLING_TRANSFER`
- 为同一证件号生成同日多对手方、多笔区间金额转出。
- `MONTHLY_FIXED_INCOME`
- 生成连续 4 个月的稳定非工资转入。
- `FIXED_COUNTERPARTY_TRANSFER`
- 为固定对手方生成跨季度稳定转入样本。
- `SALARY_QUICK_TRANSFER`
- 生成工资入账后 6 小时内的大额转出。
- `SALARY_UNUSED`
- 生成独立证件号的工资入账与代扣样本,不与 `SALARY_QUICK_TRANSFER` 共用同一对象。
## 幂等 SQL 基线方案
- `SUPPLIER_CONCENTRATION`
- 固定采购主键 `LSFXMOCKP2PUR001`,先删后插。
- 插入 SQL 通过 `ccdi_base_staff` 按身份证反查真实 `staff_id` 与姓名,确保可被真实规则 SQL 关联。
- 三条资产不匹配规则
- 固定资产名称前缀 `LSFX Mock P2 ...`,先删后插。
- 统一使用当前项目真实口径中的 `asset_main_type = '房产'``asset_status = '正常'`
- `asset_sub_type` 故意使用非 `住宅` 的值,维持“存在资产事实但不满足房产登记匹配”的状态。
- 亲属资产记录遵循 `family_id = 员工身份证号``person_id = 亲属身份证号`
## 实施结果
- 第二期规则命中计划已接入 `FileService -> StatementService -> 缓存分页` 主链路。
- 第二期数据库基线已在拉取链路内接通,并支持通过独立 SQL 脚本重复重放。
- 单元测试、集成测试和全量 `lsfx-mock-server` 回归均已通过。

View File

@@ -0,0 +1,23 @@
# LSFX Mock 第二期稳定随机命中设计记录
## 本次变更
- 新增第二期联调设计文档:
- `docs/design/2026-03-20-lsfx-mock-phase2-random-hit-design.md`
## 设计收敛结果
- 明确保留第一期 `logId` 稳定随机命中方案,不引入第二套命中模式。
- 明确第二期规则按“流水样本驱动”和“数据库基线驱动”两层处理。
- 明确验收口径为“兰溪本地接口取数并入库后,执行重打标,最终标签结果中可查到抽中的第二期规则命中”。
## 关键边界
- 不修改主工程真实打标逻辑。
- 不把采购类、资产类规则伪造成纯流水规则。
- 不新增兼容性或补丁式方案。
## 后续动作
- 已基于设计文档完成后端实施计划与后端代码落地。
- 第二期稳定随机命中计划、流水样本与数据库基线已在 `lsfx-mock-server` 中接通。

View File

@@ -0,0 +1,24 @@
# LSFX Mock 第二期稳定随机命中实施计划记录
## 本次新增文档
- 后端实施计划:
- `docs/plans/backend/2026-03-20-lsfx-mock-phase2-random-hit-backend-implementation.md`
- 前端实施计划:
- `docs/plans/frontend/2026-03-20-lsfx-mock-phase2-random-hit-frontend-implementation.md`
## 计划范围
- 后端计划覆盖 `lsfx-mock-server` 第二期稳定随机命中规则、最小流水样本、数据库基线编排、回归验证与实施记录。
- 前端计划明确本次为零代码改动,仅做接口契约与现有页面承载能力核验。
## 计划约束
- 保持第一期稳定随机命中方案不变。
- 不修改主工程真实打标逻辑。
- 不引入兼容性或补丁式双轨实现。
- 前端不因“形式上要有计划”而制造无业务价值的改动。
## 后续动作
- 待用户审核两份实施计划后,再进入执行阶段。

View File

@@ -0,0 +1,50 @@
# 结果总览员工结果表后端实施记录
## 设计对照
- 本次实现对照设计文档 [`2026-03-20-results-overview-employee-result-table-design.md`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/docs/design/2026-03-20-results-overview-employee-result-table-design.md) 落地。
- 实现范围与设计保持一致:
- 新增结果总览员工结果表
- 命中结果写库后同事务重算员工结果表
- 结果总览 4 类查询切换为只读员工结果表
- 模型卡片 `warningCount` 保持为原始标签命中次数
## 本次改动
- 在 [`2026-03-20-create-project-overview-employee-result-table.sql`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/sql/migration/2026-03-20-create-project-overview-employee-result-table.sql) 新增 `ccdi_project_overview_employee_result` 建表脚本,落地 `project_id + staff_id_card` 唯一键、风险统计字段、模型/规则快照字段和审计字段。
- 在 [`CcdiProjectOverviewEmployeeResult.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/entity/CcdiProjectOverviewEmployeeResult.java)、[`CcdiProjectOverviewEmployeeResultMapper.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/java/com/ruoyi/ccdi/project/mapper/CcdiProjectOverviewEmployeeResultMapper.java)、[`CcdiProjectOverviewEmployeeResultMapper.xml`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewEmployeeResultMapper.xml) 补齐结果表实体、最小 CRUD 和“按项目查询员工归并命中明细”的内部查询。
- 新增 [`CcdiProjectOverviewEmployeeHitRowVO.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectOverviewEmployeeHitRowVO.java)、[`CcdiProjectOverviewEmployeeRuleSummaryVO.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectOverviewEmployeeRuleSummaryVO.java)、[`CcdiProjectOverviewEmployeeModelSummaryVO.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/java/com/ruoyi/ccdi/project/domain/vo/CcdiProjectOverviewEmployeeModelSummaryVO.java) 与 [`CcdiProjectOverviewEmployeeResultBuilder.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewEmployeeResultBuilder.java),把“员工本人 + 亲属归并到员工本人”的现有口径前移到写库后的重算阶段,并输出:
- `risk_point`
- `model_codes_csv`
- `model_names_json`
- `hit_rules_json`
- `model_hit_summary_json`
- 在 [`ICcdiProjectOverviewService.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/ICcdiProjectOverviewService.java) 与 [`CcdiProjectOverviewServiceImpl.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiProjectOverviewServiceImpl.java) 新增 `refreshOverviewEmployeeResults`,按固定顺序执行:
- 校验项目存在
- 删除当前项目历史员工结果
- 查询全量员工归并命中明细
- 通过 builder 聚合为结果表实体
- 批量写入员工结果表
- 基于聚合结果同步项目高/中/低风险人数
- 在 [`CcdiBankTagServiceImpl.java`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/java/com/ruoyi/ccdi/project/service/impl/CcdiBankTagServiceImpl.java) 把标签成功链路从“只刷新项目风险人数”切换为“重算员工结果表并同步风险人数”,使标签重算失败时整条任务失败并回滚。
- 在 [`CcdiProjectOverviewMapper.xml`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/ccdi-project/src/main/resources/mapper/ccdi/project/CcdiProjectOverviewMapper.xml) 将以下 4 类结果总览查询切换为只读 `ccdi_project_overview_employee_result`
- 风险人员总览
- 中高风险 TOP10
- 风险模型卡片
- 风险模型命中人员分页
- 模型卡片改为基于 `model_hit_summary_json` 展开统计,`warningCount` 继续按原始标签命中次数累加。
- 模型人员分页改为基于 `model_codes_csv``ANY / ALL` 过滤,并通过 `model_hit_summary_json``hit_rules_json` 还原 `modelNames``hitTagList`,保留 `keyword``deptId``pageNum``pageSize` 能力。
## 处理说明
- 没有新增平行 Controller 或对外接口,继续复用现有 `CcdiProjectOverviewController + Service + Mapper` 入口。
- 没有引入异步刷新、缓存或补丁式兜底逻辑,按设计保持“按项目整块重算”的最短实现路径。
- 为兼容现有 MySQL 能力,结果表快照查询使用 `json_extract + JSON 数组下标展开`,没有引入 `json_table` 或窗口函数。
- `hit_rules_json` 在规则快照中补充 `modelCode`,以便模型人员分页在按模型筛选时仍能只返回当前筛选模型范围内的异常标签。
## 提交记录
- `ec006f2` 新增结果总览员工结果表结构
- `0a58ac3` 实现结果总览员工结果聚合构建
- `f539c4b` 接入结果总览员工结果同步重算
- `ef10616` 切换结果总览查询到员工结果表

View File

@@ -0,0 +1,32 @@
# 结果总览员工结果表设计记录
## 变更概述
- 新增结果总览员工结果表设计文档 1 份。
- 本次设计明确结果总览页改为基于“项目内每个员工的结果快照”出数。
- 明确员工亲属流水异常需归并计入员工本人。
- 明确命中结果写库成功后,在同一事务内按项目整块重算结果表。
- 明确当前范围仅覆盖已接通区块:
- 风险仪表盘
- 风险人员总览
- 模型预警次数统计
- 命中模型涉及人员
## 新增文件
- `docs/design/2026-03-20-results-overview-employee-result-table-design.md`
## 设计结论
- 新增单张结果总览员工结果表,表名建议为 `ccdi_project_overview_employee_result`
- 结果表按 `project_id + staff_id_card` 唯一确定一名员工在一个项目内的结果快照。
- 页面查询只依赖 `ccdi_project` 与结果总览员工结果表,不再运行时回源连 `标签结果表 + 流水表 + 员工表 + 亲属表`
- 模型卡片 `warningCount` 继续保持“原始标签命中次数”口径。
- 中高风险 TOP10 不纳入本轮结果表设计范围。
## 说明
- 本次仅完成设计文档沉淀,尚未开始实施。
- 后续需按仓库规范继续输出:
- `docs/plans/backend/` 下的后端实施计划
- `docs/plans/frontend/` 下的前端实施计划

View File

@@ -0,0 +1,31 @@
# 结果总览员工结果表前端实施记录
## 本次改动
- 更新 [`project-overview-api.test.js`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/tests/unit/project-overview-api.test.js),补充结果总览 4 个既有接口路径断言,并锁定模型区人员接口继续透传 `projectId``modelCodes``matchMode``keyword``deptId``pageNum``pageSize`
- 更新 [`preliminary-check-api-integration.test.js`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/tests/unit/preliminary-check-api-integration.test.js),锁定 [`PreliminaryCheck.vue`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/src/views/ccdiProject/components/detail/PreliminaryCheck.vue) 仍并发请求仪表盘、风险人员总览、模型卡片 3 类接口,且不感知结果表底层实现。
- 更新 [`preliminary-check-summary-and-people.test.js`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/tests/unit/preliminary-check-summary-and-people.test.js)、[`preliminary-check-risk-people-binding.test.js`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/tests/unit/preliminary-check-risk-people-binding.test.js),锁定风险仪表盘指标文案、风险人员总览列结构、核心异常点标签拆分与空数组保护。
- 更新 [`preliminary-check-model-and-detail.test.js`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/tests/unit/preliminary-check-model-and-detail.test.js)、[`preliminary-check-model-linkage-flow.test.js`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/tests/unit/preliminary-check-model-linkage-flow.test.js),锁定模型区 `ANY / ALL`、关键词筛选、部门筛选、重置与分页参数构造,以及风险明细区静态结构。
## 处理说明
- 本次前端未新增页面、路由、交互,也未调整结果总览现有组件拆分。
- 本次未修改 [`projectOverview.js`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/src/api/ccdi/projectOverview.js)、[`PreliminaryCheck.vue`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/src/views/ccdiProject/components/detail/PreliminaryCheck.vue)、[`RiskPeopleSection.vue`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/src/views/ccdiProject/components/detail/RiskPeopleSection.vue)、[`RiskModelSection.vue`](/Users/wkc/Desktop/ccdi/ccdi/ruoyi-ui/src/views/ccdiProject/components/detail/RiskModelSection.vue) 源码;新增断言后源码已满足契约,因此本轮实施以测试加固为主。
- 本次重点是把“后端切换为员工结果表出数后,前端 API 消费方式与展示边界保持不变”沉淀为显式回归保护,避免后续改动误触接口路径、请求参数或页面结构。
- 本次未启动前后端开发服务,仅执行 Node 单测脚本,因此不存在额外进程清理动作。
## 验证情况
- 已于 2026-03-22 执行以下定向验证:
```bash
cd ruoyi-ui
node tests/unit/project-overview-api.test.js
node tests/unit/preliminary-check-api-integration.test.js
node tests/unit/preliminary-check-summary-and-people.test.js
node tests/unit/preliminary-check-model-and-detail.test.js
node tests/unit/preliminary-check-risk-people-binding.test.js
node tests/unit/preliminary-check-model-linkage-flow.test.js
```
- 上述 6 条命令全部 `exit 0`,说明结果总览页 API 契约、展示契约和模型区联动边界在当前实现下保持稳定。

View File

@@ -0,0 +1,30 @@
# 结果总览员工结果表实施计划记录
## 变更概述
- 基于已确认的设计文档,新增后端实施计划 1 份。
- 基于已确认的设计文档,新增前端实施计划 1 份。
- 本次计划围绕“按项目 + 员工沉淀结果快照,再由页面查询从结果表出数”展开。
- 计划明确命中结果写库成功后,在同一事务内按项目整块重算结果总览员工结果表。
## 新增文件
- `docs/plans/backend/2026-03-20-results-overview-employee-result-table-backend-implementation.md`
- `docs/plans/frontend/2026-03-20-results-overview-employee-result-table-frontend-implementation.md`
## 计划结论
- 后端计划聚焦:
- 新增结果总览员工结果表
- 实现员工归并结果构建
- 接入标签写库后的同步重算
- 将结果总览 4 类查询切换为只读结果表
- 前端计划聚焦:
- 锁定现有 API 契约不变
- 锁定结果总览页面展示和交互不变
- 通过回归测试保护后端数据源迁移后的页面稳定性
## 说明
- 本次按仓库规范,将实施计划分别落到 `docs/plans/backend/``docs/plans/frontend/`
- 本轮尚未进入代码实现阶段,本文件仅记录计划沉淀结果。

View File

@@ -0,0 +1,141 @@
# 后端全量打标规则验证实施记录
## 目标
- 验证后端当前银行流水打标链路在真实联调环境下是否能够覆盖全部 33 条规则。
- 不改动业务代码,只输出当前实现状态与验证结论。
## 本次执行范围
- 复用当前本地已运行的后端服务 `http://localhost:62318`
- 复用当前本地已运行的 Mock 服务 `http://localhost:8000`
- 使用 `project_id=49` 执行“拉取本行信息 -> 手动重打标 -> 数据库结果核验 -> 详情接口回查”
- 补充规则清单、占位 SQL、Mock all 模式覆盖范围与实际命中结果的交叉核验
## 实施内容
### 1. 自动化与实现现状核对
- 执行 `mvn -pl ccdi-project -Dtest=CcdiBankTagServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest,CcdiBankTagAnalysisMapperXmlTest,CcdiBankTagRuleSqlMetadataTest,CcdiModelParamSqlDefaultsTest test`
- 结果为 `BUILD SUCCESS`
- 结论:当前 33 条规则的服务分发、XML 结构、元数据校验与风险人数刷新回归均通过
### 2. 规则可验证范围核对
- 规则总数33
- `CcdiBankTagServiceImpl` 中分发 `case` 数量33
- 当前仍为占位 SQL 的规则共 6 条:
- `ABNORMAL_CUSTOMER_TRANSACTION`
- `INCOME_ASSET_MISMATCH`
- `CROSS_BORDER_AMT`
- `INTEREST_PAYMENT_BY_OTHERS`
- `WITHDRAW_AMT`
- `PROXY_ACCOUNT_OPERATION`
- Mock 服务 `--rule-hit-mode all` 当前可直接提供的“全部兼容规则命中”覆盖 26 条规则,不包含以下 7 条:
- `ABNORMAL_CUSTOMER_TRANSACTION`
- `INCOME_ASSET_MISMATCH`
- `CROSS_BORDER_AMT`
- `INTEREST_PAYMENT_BY_OTHERS`
- `LARGE_PURCHASE_TRANSACTION`
- `WITHDRAW_AMT`
- `PROXY_ACCOUNT_OPERATION`
- 其中 `LARGE_PURCHASE_TRANSACTION` 依赖采购基线表,不依赖 Mock 银行流水样本
### 3. 真实链路验证
- 登录接口返回 `code=200`
- 对项目 `49` 调用 `/ccdi/file-upload/pull-bank-info`,返回“拉取任务已提交”
- 对项目 `49` 调用 `/ccdi/project/tags/rebuild`,返回“标签重算任务已提交”
- 最新手动任务:
- `id=47`
- `status=SUCCESS`
- `hit_count=2340`
- `success_rule_count=33`
- `failed_rule_count=0`
### 4. 结果核验
- 项目 `49` 当前在标签结果表 `ccdi_bank_statement_tag_result` 中实际命中的规则共 19 条:
- `HOUSE_OR_CAR_EXPENSE`
- `TAX_EXPENSE`
- `SINGLE_LARGE_INCOME`
- `CUMULATIVE_INCOME`
- `ANNUAL_TURNOVER`
- `LARGE_CASH_DEPOSIT`
- `LARGE_TRANSFER`
- `MULTI_PARTY_GAMBLING_TRANSFER`
- `GAMBLING_SENSITIVE_KEYWORD`
- `HOUSE_REGISTRATION_MISMATCH`
- `PROPERTY_FEE_REGISTRATION_MISMATCH`
- `TAX_ASSET_REGISTRATION_MISMATCH`
- `FOREX_BUY_AMT`
- `FOREX_SELL_AMT`
- `LARGE_PURCHASE_TRANSACTION`
- `SUPPLIER_CONCENTRATION`
- `STOCK_TFR_LARGE`
- `SALARY_QUICK_TRANSFER`
- `LARGE_STOCK_TRADING`
- 未命中的 14 条规则中:
- 6 条属于占位 SQL当前无法证明“可正确打标”
- 8 条虽然已有真实 SQL但本次项目 `49` 在 all 模式联调数据下未产生命中:
- `FREQUENT_CASH_DEPOSIT`
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- `SPECIAL_AMOUNT_TRANSACTION`
- `MONTHLY_FIXED_INCOME`
- `FIXED_COUNTERPARTY_TRANSFER`
- `SUSPICIOUS_INCOME_KEYWORD`
- `WITHDRAW_CNT`
- `SALARY_UNUSED`
### 5. 8 条真实 SQL 未命中规则的具体原因
- `FREQUENT_CASH_DEPOSIT`
- 项目 `49``batch_id=45565/69755/70053` 中存在 `2026-03-10` 同日 `6` 笔存现样本,金额均高于运行阈值。
- `taskId=47` 日志显示运行参数为 `{LARGE_CASH_DEPOSIT=5000, FREQUENT_CASH_DEPOSIT=2}`,但规则仍记为无命中。
- 当前按源码 SQL 直接复核,这批数据已经足以命中 `3` 个对象;结合当前后端进程启动时间为 `2026-03-22 12:11:28`、晚于 `taskId=47``2026-03-22 09:11:57`,可判定这是验证时旧运行进程未刷新 Mapper XML 导致的假阴性。
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- Mock 样本给关系人打入两笔资金,累计 `120000` 元,金额条件已满足。
- 但项目 `49` 中这些关系人的 `annual_income` 实际为 `64000~128000` 元,折算月收入均高于 `3000` 元。
- 实际阻断点是家庭关系表收入口径,不是流水金额不足。
- `SPECIAL_AMOUNT_TRANSACTION`
- Mock 样本金额为 `88888.88` 元。
- SQL 仅接受 `520``1314` 两类特殊金额。
- 这条未命中是样本金额与真实规则枚举不一致。
- `MONTHLY_FIXED_INCOME`
- Mock 每个对象只生成了 `4` 个月稳定收入(`2025-12``2026-03`,每月 `7200` 元)。
- SQL 要求近 `12` 个月至少 `6` 个月达到阈值。
- 样本满足“固定金额”,但不满足“月份数量”条件。
- `FIXED_COUNTERPARTY_TRANSFER`
- “季度稳定兼职收入”样本全部写在家属证件号上,项目 `49` 复核时这些证件号均未进入员工主表。
- SQL 从 `ccdi_base_staff` 内连接开始,只统计员工本人。
- 因此这条在第一层员工连接就被过滤掉。
- `SUSPICIOUS_INCOME_KEYWORD`
- Mock 摘要为“咨询返现收入”。
- SQL 关键词集合仅覆盖“工资、分红、红利、奖金、劳务费、批量代付”等收入词,不包含“返现”“咨询”。
- 直接阻断点是摘要关键词不匹配。
- `WITHDRAW_CNT`
- 当前按源码 SQL 直接复核,项目 `49` 已能查出 `3` 个对象在 `2026-03-12` 单日提现 `4/4/5` 次,均超过阈值 `3`
-`taskId=47` 日志在同一阈值下仍记为无命中。
- 这条与 `FREQUENT_CASH_DEPOSIT` 一样,不是数据不满足,而是 `2026-03-22 09:11` 那次验证使用了旧后端运行进程,导致运行时 SQL 与当前源码不一致。
- `SALARY_UNUSED`
- `2026-02-10` 的“工资入账 + 代扣公积金”专用样本全部落在家属证件号,被 `ccdi_base_staff` 内连接排除。
- 员工本人侧仅剩 `2026-03-14` 的工资入账样本,而这些样本在 `30` 天内都已有 `2~5` 笔非代扣支出。
- 因此该规则一部分样本卡在员工连接另一部分样本卡在“30 天无支出”的 `not exists` 条件。
### 6. 接口回查
- 使用 `bank_statement_id=70334` 调用详情接口 `/ccdi/project/bank-statement/detail/70334`
- 返回 `code=200`
- `data.hitTags` 中成功回查到 `LARGE_TRANSFER`
## 产物
- [`2026-03-22-bank-tag-all-rules-validation-record.md`](/Users/wkc/Desktop/ccdi/ccdi/docs/reports/implementation/2026-03-22-bank-tag-all-rules-validation-record.md)
- [`2026-03-22-bank-tag-all-rules-validation-verification.md`](/Users/wkc/Desktop/ccdi/ccdi/docs/tests/records/2026-03-22-bank-tag-all-rules-validation-verification.md)
## 结论
- 当前后端可以证明“33 条规则都已接入执行链路”但不能证明“33 条规则现在都能正确打上标签”。
- 截至 2026-03-22本次真实联调链路只验证到 19 条规则产生实际标签结果。
- 其余 14 条里有 6 条仍是占位 SQL另外 8 条里有 6 条属于样本/主数据口径不满足,`FREQUENT_CASH_DEPOSIT``WITHDRAW_CNT` 则属于验证时运行进程版本不一致导致的假阴性。
- 因此当前结论应判定为“全量规则未全部验证通过”,且 `2026-03-22` 这次验证记录不能继续把 `FREQUENT_CASH_DEPOSIT``WITHDRAW_CNT` 简单归类为“真实 SQL 未打中”。

View File

@@ -0,0 +1,22 @@
# LSFX Mock 启动说明文档补充记录
## 本次改动
- 新增 LSFX Mock 服务启动说明文档。
- 文档聚焦普通启动、热重载启动和 `--rule-hit-mode` 参数写法。
- 同步补充端口占用时的临时启动示例,便于联调时直接参考。
- 按最新要求将启动说明移动到 `lsfx-mock-server/` 目录,与 `main.py` 保持同级。
## 文档位置
- `lsfx-mock-server/STARTUP.md`
## 覆盖内容
- `main.py` 启动命令
- `dev.py --reload` 启动命令
- `--rule-hit-mode subset|all` 参数说明
- `--reload` 参数说明
- 推荐使用场景
- 启动成功后的访问地址
- 端口冲突时的临时端口示例

View File

@@ -0,0 +1,40 @@
# LSFX Mock 规则命中模式后端实施记录
## 修改范围
- `lsfx-mock-server/config/settings.py`
- `lsfx-mock-server/main.py`
- `lsfx-mock-server/dev.py`
- `lsfx-mock-server/services/file_service.py`
- `lsfx-mock-server/tests/test_startup.py`
- `lsfx-mock-server/tests/test_file_service.py`
- `lsfx-mock-server/README.md`
## 本次改动
- 为 Mock 服务新增统一配置项 `RULE_HIT_MODE`,默认值为 `subset`
-`main.py` 中新增 `parse_args()` 与启动前模式注入逻辑,支持 `--rule-hit-mode subset|all`
- 新增项目级热重载入口 `dev.py`,支持 `python dev.py --reload --rule-hit-mode ...`
-`FileService` 中将规则命中计划拆分为:
- `subset` 模式:沿用按 `logId` 稳定随机命中子集
- `all` 模式:返回全部兼容规则命中计划
- 新增显式互斥组入口 `RULE_CONFLICT_GROUPS`,当前实现默认为空列表,仅预留结构与裁剪逻辑。
## 规则命中模式语义
- 默认模式保持为 `subset`,不传参数时仍按同一 `logId` 生成稳定随机子集。
- `all` 的准确语义是“全部兼容规则命中”,不是无约束全量命中。
- 当前四类规则池在 `all` 模式下会返回各自全集,再经过互斥组裁剪。
- 当前互斥组配置为空列表,因此默认不会额外裁剪任何规则;后续若新增互斥组,将按组内顺序保留首个规则。
## 启动入口调整
- 普通启动入口更新为:
- `python main.py --rule-hit-mode subset`
- `python main.py --rule-hit-mode all`
- 热重载入口统一改为项目脚本:
- `python dev.py --reload --rule-hit-mode subset`
- `python dev.py --reload --rule-hit-mode all`
- README 已同步改为“全部兼容规则命中”口径,不再使用“全部规则命中”。
## 实施结果
- `FileService -> StatementService -> FileRecord 缓存` 主链路保持不变。
- 默认随机子集行为未回归。
- `all` 模式已支持通过启动参数显式切换。
- 热重载启动不再依赖裸 `uvicorn main:app --reload ...` 透传业务参数。

View File

@@ -0,0 +1,152 @@
# project_id=50 银行流水未命中规则逐条追因记录
## 目标
- 针对 `project_id=50` 最新一次流水打标结果,逐条追查未命中的 `12` 条规则原因。
- 本次只做数据库与源码核验,不改动业务代码与数据。
## 核验范围
- 任务表:`ccdi_bank_tag_task`
- 结果表:`ccdi_bank_statement_tag_result`
- 规则表:`ccdi_bank_tag_rule`
- 主数据:`ccdi_base_staff``ccdi_staff_fmy_relation`
- 明细表:`ccdi_bank_statement`
- 源码口径:`ccdi-project/src/main/resources/mapper/ccdi/project/CcdiBankTagAnalysisMapper.xml`
## 项目概况
- `project_id=50` 最新标签任务为 `id=48`
- 任务状态:`SUCCESS`
- `success_rule_count=33`
- `failed_rule_count=0`
- `hit_count=2094`
- 实际产生命中结果的规则共 `21` 条,因此仍有 `12` 条规则未命中
- 项目流水共 `198` 条,仅涉及 `1` 名员工和 `2` 名家属:
- 员工:`499172200511062290 / 马娜`
- 家属:`320101200103037981 / 孙秀霞 / 配偶 / 年收入 152000`
- 家属:`320101198908317982 / 王德华 / 父亲 / 年收入 57000`
## 12 条未命中规则与原因
### 1. `ABNORMAL_CUSTOMER_TRANSACTION`
- 当前 Mapper SQL 仍是 `where 1 = 0` 的占位实现。
- 直接原因不是 `project_id=50` 的样本不足,而是规则尚未落地真实 SQL。
### 2. `INTEREST_PAYMENT_BY_OTHERS`
- 当前 Mapper SQL 仍是 `where 1 = 0` 的占位实现。
- 该规则当前版本不会返回任何对象结果。
### 3. `MONTHLY_FIXED_INCOME`
- 项目 `50` 员工本人近 `12` 个月满足“月流入总额 > 5000”的月份其实有 `10` 个月,并非月份数量不足。
- 但这些月份金额波动极大:
- 常规月份多在 `7225.83 ~ 22924.28`
- `2026-03` 因多笔大额对公流入,单月达到 `113218600.98`
- 按现有 SQL命中前提除了 `>= 6` 个月,还要求 `STDDEV(monthAmount) / AVG(monthAmount) <= 0.3`
- 该员工实际离散系数约为 `2.9962`,远高于阈值,最终卡在“固定收入稳定性”这一层。
### 4. `SPECIAL_AMOUNT_TRANSACTION`
- 项目 `50` 中不存在金额为 `520``1314` 的员工本人特殊金额流水。
- 现有最接近的样本是:
- `bank_statement_id=72174`
- `2026-03-14 08:00:00`
- 对手方 `兰溪特别金额结算中心`
- 摘要 `特殊金额转账`
- 支出金额 `88888.88`
- 现行 SQL 只接受 `520``1314` 两类固定金额,因此这条是“样本金额与真实规则枚举不一致”。
### 5. `FIXED_COUNTERPARTY_TRANSFER`
- 当前阈值为季度区间 `[3000, 15000]`
- 样本中原本用于模拟“稳定兼职收入”的对手方 `兰溪远航信息服务有限公司`
- `2025-Q4` 累计 `7200`,落在区间内
- `2026-Q1``2026-01``2026-02``2026-03` 各有一笔 `7200`,季度累计变成 `21600`,超过上限 `15000`
- 因此它自己就无法满足“同一对手至少 2 个季度都落在区间内”。
- 同时,项目 `50` 还存在多组噪声对手方也误落在区间内并跨季度重复,如:
- `小店``2025-Q2/Q3/Q4`
- `京东``2025-Q2/2026-Q1`
- `微信支付``2025-Q2/2026-Q1`
- `支付宝``2025-Q2/2025-Q4`
- `淘宝``2025-Q2/2026-Q1`
- `银行转账``2025-Q4/2026-Q1`
- 外层 SQL 还要求同一员工满足条件的固定对手数 `< 3`,而该员工实际达到 `6` 个,因此最终卡在对象收口条件。
### 6. `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- 项目 `50` 家属流水总量并不低:
- `王德华` 累计交易额 `820405.18`
- `孙秀霞` 累计交易额 `319656.22`
- 但规则要求关系人年收入为空、为 `0`,或折算月收入 `< 3000`
- 项目 `50` 的两个家属收入分别为:
- `王德华` 年收入 `57000`,折算月收入 `4750`
- `孙秀霞` 年收入 `152000`,折算月收入约 `12666.67`
- 因此没有任何一名家属能进入“低收入关系人”候选集,阻断点在家庭关系表收入口径,而不是流水金额不足。
### 7. `CROSS_BORDER_AMT`
- 当前 Mapper SQL 仍是 `where 1 = 0` 的占位实现。
- 该规则当前版本不会返回任何明细结果。
### 8. `SUSPICIOUS_INCOME_KEYWORD`
- 项目 `50` 存在多笔“看起来像兼职/合作收入”的流入样本,例如:
- `月度稳定兼职收入`
- `年度合作收入`
- `经营往来收入`
- `项目回款收入`
- `业务合作收入`
- `咨询返现收入`
- 但现行 SQL 的关键词集合只覆盖:
- `代发、工资、分红、红利、奖金、薪酬、薪金、补贴、年终奖、年金、加班费、劳务费、劳务外包、提成、劳务派遣、绩效、酬劳、批量代付`
- 不包含 `兼职、收入、返现、回款、合作、经营、咨询` 等词。
- 因此项目 `50` 不是没有疑似样本,而是当前关键词词表无法识别这些摘要。
### 9. `WITHDRAW_AMT`
- 当前 Mapper SQL 仍是 `where 1 = 0` 的占位实现。
- 该规则当前版本不会返回任何对象结果。
### 10. `INCOME_ASSET_MISMATCH`
- 当前 Mapper SQL 仍是 `where 1 = 0` 的占位实现。
- 该规则当前版本不会返回任何明细结果。
### 11. `SALARY_UNUSED`
- 项目 `50` 共有两笔工资入账样本:
- `2026-02-10 09:00:00`,证件号 `320101198908317982`,金额 `9800`
- `2026-03-14 09:00:00`,证件号 `499172200511062290`,金额 `12000`
- 其中第一笔落在家属 `王德华` 证件号上,现行 SQL 从 `ccdi_base_staff` 内连接开始,只统计员工本人,因此这笔在连接层被排除。
- 员工本人这笔 `2026-03-14 09:00:00` 的工资入账后,`30` 天内存在 `5` 笔非代扣支出,包含:
- `工资到账后快速转出 10800`
- `手机银行转账 12000000`
- `个人购汇 126000`
- `证券大额转托管转出 560000`
- `证券大额交易买入 880000`
- 因此这条规则一部分样本卡在员工连接另一部分样本卡在“30 天内无非代扣支出”的 `not exists` 条件。
### 12. `PROXY_ACCOUNT_OPERATION`
- 当前 Mapper SQL 仍是 `where 1 = 0` 的占位实现。
- 该规则当前版本不会返回任何对象结果。
## 结论
- `project_id=50``12` 条未命中规则中,有 `6` 条属于当前版本仍未落地的占位 SQL
- `ABNORMAL_CUSTOMER_TRANSACTION`
- `INTEREST_PAYMENT_BY_OTHERS`
- `CROSS_BORDER_AMT`
- `WITHDRAW_AMT`
- `INCOME_ASSET_MISMATCH`
- `PROXY_ACCOUNT_OPERATION`
- 另外 `6` 条属于真实 SQL 已执行,但项目 `50` 的样本或主数据口径不满足现行实现:
- `MONTHLY_FIXED_INCOME`:月度波动过大,离散系数超标
- `SPECIAL_AMOUNT_TRANSACTION`:只有 `88888.88`,没有 `520/1314`
- `FIXED_COUNTERPARTY_TRANSFER`:目标样本季度累计超上限,且噪声对手方过多
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`:家属收入都高于低收入阈值
- `SUSPICIOUS_INCOME_KEYWORD`:摘要词不在现行关键词集合内
- `SALARY_UNUSED`:家属工资样本被员工连接排除,员工本人工资后 30 天内已有非代扣支出

View File

@@ -0,0 +1,204 @@
# lsfx Mock 规则命中模式切换设计
## 1. 背景
`lsfx-mock-server` 当前在生成流水命中计划时,默认按 `log_id` 稳定随机抽取规则子集。现需要在保持默认行为不变的前提下,支持通过启动命令切换到“全部兼容规则命中”模式,便于联调时一次性覆盖更多规则。
本次设计仅面向 Mock 服务,不涉及主 Java 工程、前端页面或真实规则引擎逻辑调整。
## 2. 目标与范围
### 2.1 目标
- 默认启动时继续使用现有“随机子集命中”逻辑
- 支持通过命令行参数切换命中模式
- 普通启动与热重载启动都支持命中模式切换
- 在“全部兼容规则命中”模式下,避免互斥规则同时出现
- 保持现有流水生成、命中计划持久化、基线补齐链路不变
### 2.2 非目标
- 不修改任意规则编码、样本构造规则、流水拼装顺序
- 不新增第三种命中模式
- 不兼容原生 `uvicorn main:app ...` 直接附带自定义业务参数的形式
- 不改前端、不改主系统后端接口
## 3. 术语定义
### 3.1 `subset`
默认模式。沿用当前逻辑,按 `log_id` 稳定随机抽取每类规则池中的部分规则,保证同一 `log_id` 结果稳定。
### 3.2 `all`
“全部兼容规则命中”模式。该模式不是字面意义上的“无条件命中全部规则”,而是:
- 优先命中当前已实现的全部可共存规则
- 若存在显式定义的互斥规则组,则每个互斥组仅保留一个固定代表规则
- 最终返回的命中计划必须稳定、可解释、可测试
对外文案统一使用“全部兼容规则命中”,避免误解为无约束全开。
## 4. 现状分析
当前规则命中计划由 `services/file_service.py` 中的 `_build_rule_hit_plan(log_id)` 负责生成:
- 使用 `random.Random(f"rule-plan:{log_id}")` 保证稳定性
- 分别为四类规则池抽取 2 到 4 条规则
- 生成结果写入 `FileRecord`
- 后续流水样本生成与第二期基线补齐均消费这份 `rule_plan`
当前启动入口 `main.py` 未解析业务命令行参数,配置由 `config/settings.py` 基于 `BaseSettings` 读取。
## 5. 设计方案
### 5.1 总体思路
将“规则命中模式”收敛为统一配置,由启动层负责解析命令行参数并注入配置,由规则计划编排层根据模式生成最终 `rule_plan`
整体链路如下:
1. 启动命令读取 `--rule-hit-mode`
2. 启动层将模式值写入进程配置
3. `settings` 暴露统一的 `RULE_HIT_MODE`
4. `FileService` 根据模式生成规则命中计划
5. 后续流水生成、基线补齐继续复用该计划
这样只改变“命中计划如何生成”,不改变“命中计划如何被使用”。
### 5.2 配置设计
`config/settings.py` 中新增:
- `RULE_HIT_MODE: str = "subset"`
可选值仅允许:
- `subset`
- `all`
非法值在启动阶段直接报错并退出,不做自动兜底。
### 5.3 启动设计
保留两类启动方式:
#### 普通启动
```bash
python main.py --rule-hit-mode all
```
#### 热重载启动
新增项目级启动脚本,例如:
```bash
python dev.py --reload --rule-hit-mode all
```
设计上不再要求裸命令 `uvicorn main:app --reload ...` 直接支持业务参数。README 中将明确推荐项目脚本作为热重载入口。
### 5.4 规则计划编排设计
规则计划生成逻辑拆分为两层:
1. 基础规则池定义
2. 模式对应的计划编排
`subset` 模式下:
- 完全沿用当前按 `log_id` 稳定抽样逻辑
`all` 模式下:
- 默认取四类规则池当前已实现规则的全集
- 再应用显式互斥组裁剪
- 输出“全部兼容规则命中”计划
### 5.5 互斥规则处理
为避免“全部命中”时出现语义自相矛盾的测试数据,引入显式互斥组定义。
互斥组处理规则:
- 互斥关系必须通过常量显式维护,不允许散落在样本 builder 内隐式判断
- 每个互斥组按固定优先级保留一个代表规则
- 未被声明为互斥的规则,默认视为可共存
第一版实现中,若现有规则样本已能共存,则允许互斥组为空;但结构必须预留,确保后续新增互斥规则时不破坏 `all` 模式语义。
### 5.6 对现有样本的兼容判断
根据当前样本实现与测试约束:
- `SALARY_QUICK_TRANSFER``SALARY_UNUSED` 已通过不同主体拆分,可共存
- 大额交易、一期规则、二期流水规则目前主要通过不同对手方、不同时间或不同主体构造,未发现必须立即裁剪的硬冲突
- 二期基线规则可继续按命中计划幂等写入
因此,第一版预计“互斥组定义为空或极少数”,但仍要通过独立常量与测试明确这一口径。
## 6. 代码改动边界
本次设计预期改动集中在以下位置:
- `lsfx-mock-server/config/settings.py`
- `lsfx-mock-server/main.py`
- `lsfx-mock-server/services/file_service.py`
- `lsfx-mock-server/README.md`
- `lsfx-mock-server/tests/`
- 新增热重载启动脚本
不触碰规则样本库的大规模重构,不改接口协议。
## 7. 测试设计
### 7.1 单元测试
- `subset` 模式下同一 `log_id` 仍返回稳定子集
- `all` 模式下返回四类规则池的兼容全集
- 若存在互斥组,验证不会同时出现同组规则
- 启动参数仅允许 `subset|all`
### 7.2 集成测试
- 普通启动时可切换到 `all`
- 热重载入口可切换到 `all`
- `all` 模式生成的 `rule_plan` 会被正确写入 `FileRecord`
- 后续流水查询与第二期基线补齐继续消费同一份计划
### 7.3 回归重点
- 默认不传参数时行为不变
- 现有随机子集链路测试不回归
- 已有规则样本生成顺序与分页查询稳定性不回归
## 8. 风险与控制
### 8.1 风险
- “全部规则命中”表述容易被误解为无条件全开
- 热重载启动若继续依赖裸 `uvicorn main:app`,无法自然接入业务参数
- 后续新增规则若存在互斥关系,可能破坏 `all` 模式语义
### 8.2 控制措施
- 文档与 README 统一使用“全部兼容规则命中”
- 热重载统一走项目级启动脚本
- 互斥组通过显式常量维护,并由测试守护
## 9. 预期交付
- 设计文档 1 份
- 后续实施计划 2 份:
- 后端实施计划
- 前端实施计划(明确本次无需前端代码改动)
- Mock 服务代码、README 与测试更新
## 10. 验收标准
- 默认启动仍为随机子集命中
- 显式传入命令行参数后可切换至“全部兼容规则命中”
- 普通启动与热重载启动均可切换
- `all` 模式下不会同时出现已定义互斥规则
- 所有相关测试通过,且无残留测试进程

View File

@@ -0,0 +1,33 @@
# 第二期银行流水规则元数据修复验证记录
## 执行命令
```bash
mvn test -pl ccdi-project -Dtest=CcdiBankTagRuleSqlMetadataTest
bin/mysql_utf8_exec.sh sql/migration/2026-03-20-sync-bank-tag-phase2-rule-metadata.sql
python3 - <<'PY'
# 查询 ccdi_bank_tag_rule 第二期 10 条规则的 indicator_code、business_caliber、remark
PY
mvn test -pl ccdi-project -Dtest=CcdiBankTagRuleSqlMetadataTest,CcdiBankTagAnalysisMapperXmlTest,BankTagRuleConfigResolverTest,CcdiBankTagServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest,CcdiProjectOverviewServiceStructureTest,CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewServiceImplTest,CcdiProjectOverviewControllerTest
```
## 执行时间
- 2026-03-20 16:29 执行 `mvn test -pl ccdi-project -Dtest=CcdiBankTagRuleSqlMetadataTest` 红灯验证,确认第二期迁移脚本缺失,测试报 `NoSuchFileException`
- 2026-03-20 16:30 重新执行 `mvn test -pl ccdi-project -Dtest=CcdiBankTagRuleSqlMetadataTest`,结果 `BUILD SUCCESS``Tests run: 2, Failures: 0, Errors: 0, Skipped: 0`
- 2026-03-20 16:30 执行 `bin/mysql_utf8_exec.sh sql/migration/2026-03-20-sync-bank-tag-phase2-rule-metadata.sql`,脚本落库成功,无报错、无乱码输出。
- 2026-03-20 16:30 查询 `ccdi_bank_tag_rule` 第二期 10 条规则元数据,确认数据库已与真实规则状态对齐。
- 2026-03-20 16:30 执行最终回归命令 `mvn test -pl ccdi-project -Dtest=CcdiBankTagRuleSqlMetadataTest,CcdiBankTagAnalysisMapperXmlTest,BankTagRuleConfigResolverTest,CcdiBankTagServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest,CcdiProjectOverviewServiceStructureTest,CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewServiceImplTest,CcdiProjectOverviewControllerTest`,结果 `BUILD SUCCESS``Tests run: 49, Failures: 0, Errors: 0, Skipped: 0`,完成时间 `2026-03-20T16:30:53+08:00`
## 结果摘要
- 已补第二期元数据迁移脚本与 SQL 校验测试。
- 修复后已确认:
- 第二期 10 条规则 `remark` 均已变为真实规则说明
- `FIXED_COUNTERPARTY_TRANSFER.indicator_code` 已变为 `NULL`
- `SALARY_UNUSED.business_caliber` 乱码已修复为“工资发放后除代扣项目外连续30天无消费或转账支出记录。”
- `TAX_ASSET_REGISTRATION_MISMATCH.business_caliber` 已更新为“员工及关系人有5000元以上的纳税记录但当前资产登记口径下无房产登记。”
- `MONTHLY_FIXED_INCOME.indicator_code` 继续保持 `MONTHLY_FIXED_INCOME`
- 测试日志中的 `threshold missing``refresh failed` 为既有异常路径断言产生的预期日志,不代表最终回归失败。
## 结论
- 第二期规则元数据修复已完成,仓库脚本与当前数据库均已对齐到真实规则状态。
- 当前验证仅执行 Maven 单元测试与数据库只读复核,未启动额外前后端进程,因此无需执行进程清理。

View File

@@ -0,0 +1,27 @@
# 银行流水真实规则第二期后端验证记录
## 执行命令
```bash
mvn test -pl ccdi-project -Dtest=BankTagRuleConfigResolverTest
mvn test -pl ccdi-project -Dtest=CcdiModelParamSqlDefaultsTest
mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest
mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest,CcdiBankTagServiceImplTest
mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest,BankTagRuleConfigResolverTest,CcdiBankTagServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest,CcdiProjectOverviewServiceStructureTest,CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewServiceImplTest,CcdiProjectOverviewControllerTest
```
## 执行时间
- 2026-03-20 14:59 重新执行最终回归命令 `mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest,BankTagRuleConfigResolverTest,CcdiBankTagServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest,CcdiProjectOverviewServiceStructureTest,CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewServiceImplTest,CcdiProjectOverviewControllerTest`47 个测试全部通过0 failure0 error。
- 本轮最终回归 `BUILD SUCCESS`Maven 输出完成时间为 `2026-03-20T14:59:17+08:00`
## 结果摘要
- 第二期 10 条规则已全部通过结构测试或分发测试校验,不再保留占位 SQL。
- `BankTagRuleConfigResolverTest` 已覆盖第二期 3 条阈值规则与 7 条无参规则的参数解析行为。
- `CcdiBankTagAnalysisMapperXmlTest` 已确认第二期 3 条资产比对规则和 7 条对象规则均输出真实查询字段,剩余占位规则数量按预期下降。
- `CcdiBankTagServiceImplTest` 已确认第二期阈值型对象规则参数成功接入服务分发链路,无参规则继续走空参数配置。
- 项目概览相关 4 组测试继续通过,说明第二期规则接入未破坏风险人数刷新、概览 SQL 与概览接口结构。
- 测试日志中出现的 `threshold missing``refresh failed` 为断言异常场景的预期输出,不代表本轮回归失败。
## 结论
- 第二期后端实现已完成10 条规则均已接入现有项目级流水打标主链路。
- 当前验证范围为 Maven 单元测试与 SQL 结构测试,未启动额外前后端进程,因此无需执行进程清理。

View File

@@ -0,0 +1,48 @@
# LSFX Mock 第二期稳定随机命中后端验证记录
## 验证时间
- 2026-03-20
## pytest 验证
- 聚焦验证命令:
- `python3 -m pytest lsfx-mock-server/tests/test_file_service.py -k "phase2_rule_hit_plan" -v`
- `python3 -m pytest lsfx-mock-server/tests/test_phase2_baseline_service.py -v`
- `python3 -m pytest lsfx-mock-server/tests/test_statement_service.py -k "phase2 or salary_quick_transfer_and_salary_unused" -v`
- `python3 -m pytest lsfx-mock-server/tests/integration/test_full_workflow.py -k "phase2" -v`
- 聚焦验证结果:
- 上述 4 组命令全部通过。
- 全量回归命令:
- `python3 -m pytest lsfx-mock-server/tests/test_file_service.py lsfx-mock-server/tests/test_statement_service.py lsfx-mock-server/tests/test_phase2_baseline_service.py lsfx-mock-server/tests/test_api.py lsfx-mock-server/tests/integration/test_full_workflow.py -v`
- 全量回归结果:
- `48 passed`
## SQL 执行与核验
- 执行命令:
- `bin/mysql_utf8_exec.sh sql/migration/2026-03-20-lsfx-mock-phase2-hit-baseline.sql`
- 只读核验结果:
- `ccdi_purchase_transaction` 中存在 `LSFXMOCKP2PUR001 / 兰溪市联调供应链有限公司 / 186000.00`
- `ccdi_asset_info` 中存在 3 条 `LSFX Mock P2` 资产基线记录
- 资产记录写回查询时,`asset_status` 读数为 `NORMAL`;结合现网样例数据的 `正常` 口径,推测当前库内存在状态值归一化或历史兼容现象,但本次“不匹配”目标仍由非 `住宅` 子类型稳定满足
- 幂等性复核:
- 脚本重复执行 1 次后再次查询
- `ccdi_purchase_transaction` 计数为 `1`
- `ccdi_asset_info` 计数为 `3`
## 端到端链路结果
- `test_inner_flow_should_apply_phase2_baselines_before_get_bank_statement` 通过。
- 结果表明:
- `getJZFileOrZjrcuFile` 在返回 `logId` 前已调用第二期基线写入。
- `getBSByLogId` 仍可在同一 `logId` 下稳定读取流水列表。
## 结论
- 第二期稳定随机命中计划、流水样本装配、数据库基线写入和集成链路均已接通。
- 当前 `lsfx-mock-server` 回归通过,数据库基线可重复执行且未产生重复脏数据。
## 环境清理
- 本次验证未启动长期驻留的前后端开发进程,无残留进程需要额外关闭。

View File

@@ -0,0 +1,43 @@
# 结果总览员工结果表后端验证记录
## 验证范围
- 员工结果表 DDL、实体、Mapper 与 XML 基础映射
- 员工归并命中明细聚合与快照字段生成
- 标签写库后的同事务重算链路
- 结果总览 4 类查询切换到员工结果表后的 SQL 与服务封装
## 验证命令
```bash
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultEntityTest,CcdiProjectOverviewEmployeeResultMapperXmlTest
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultBuilderTest
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewServiceStructureTest,CcdiProjectOverviewServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest
mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewMapperRiskModelCardsTest,CcdiProjectOverviewMapperRiskModelPeopleTest,CcdiProjectOverviewServiceImplTest
```
## 验证结果
- 2026-03-22 执行 `mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultEntityTest,CcdiProjectOverviewEmployeeResultMapperXmlTest`
- 首次红灯确认缺少结果表实体、Mapper 与 SQL 脚本。
- 完成最小实现后复跑2 个测试全部通过。
- 2026-03-22 执行 `mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewEmployeeResultBuilderTest`
- 首次红灯,确认缺少员工归并命中明细 VO 与 builder。
- 完成聚合实现并修正测试样本后复跑1 个测试通过。
- 2026-03-22 执行 `mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewServiceStructureTest,CcdiProjectOverviewServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest`
- 首次红灯,确认 `ICcdiProjectOverviewService` 尚未暴露员工结果表重算方法,标签重算链路尚未接入新方法。
- 接入 `refreshOverviewEmployeeResults` 并切换标签成功链路后复跑13 个测试全部通过。
- 2026-03-22 执行 `mvn test -pl ccdi-project -Dtest=CcdiProjectOverviewMapperSqlTest,CcdiProjectOverviewMapperRiskModelCardsTest,CcdiProjectOverviewMapperRiskModelPeopleTest,CcdiProjectOverviewServiceImplTest`
- 首次红灯,确认结果总览 SQL 仍直接依赖运行时归并基表。
- 切换到员工结果表与快照展开 SQL并收敛测试断言后复跑16 个测试全部通过。
## SQL 执行情况
- 本次未在数据库执行 [`2026-03-20-create-project-overview-employee-result-table.sql`](/Users/wkc/Desktop/ccdi/ccdi/.worktrees/codex/results-overview-employee-result-table/sql/migration/2026-03-20-create-project-overview-employee-result-table.sql)。
- 因此未执行 `bin/mysql_utf8_exec.sh sql/migration/2026-03-20-create-project-overview-employee-result-table.sql`
## 结论
- 结果总览员工结果表后端链路已按设计落地,覆盖建表、聚合构建、同事务重算与页面查询切换。
- 模型卡片 `warningCount` 仍保持为原始标签命中次数,没有退化为员工人数或规则去重数。
- 本轮验证全部基于 Maven 定向测试完成,相关命令均已在 2026-03-22 实际执行且通过。

View File

@@ -0,0 +1,40 @@
# 结果总览员工结果表前端验证记录
## 验证范围
- 结果总览既有接口路径保持不变
- 模型区人员查询参数透传保持不变
- 结果总览入口页并发取数结构保持不变
- 风险仪表盘、风险人员总览、模型区与风险明细区展示边界保持不变
- 页面不重新引入已移除的 TOP10 区块
## 执行日期
- 2026-03-22
## 验证命令
```bash
cd ruoyi-ui
node tests/unit/project-overview-api.test.js
node tests/unit/preliminary-check-api-integration.test.js
node tests/unit/preliminary-check-summary-and-people.test.js
node tests/unit/preliminary-check-model-and-detail.test.js
node tests/unit/preliminary-check-risk-people-binding.test.js
node tests/unit/preliminary-check-model-linkage-flow.test.js
```
## 每条命令结果
- `node tests/unit/project-overview-api.test.js``exit 0`,确认 `/ccdi/project/overview/dashboard``/ccdi/project/overview/risk-people``/ccdi/project/overview/risk-models/cards``/ccdi/project/overview/risk-models/people` 4 个接口路径仍为当前前端契约,模型区人员分页参数继续按既有字段透传。
- `node tests/unit/preliminary-check-api-integration.test.js``exit 0`,确认入口页仍通过 `Promise.all` 并发请求仪表盘、风险人员总览、模型卡片 3 类接口,未感知结果表底层实现。
- `node tests/unit/preliminary-check-summary-and-people.test.js``exit 0`,确认风险仪表盘继续展示 `总人数``高风险``中风险``低风险``无风险人员`,风险人员总览继续展示姓名、身份证号、所属部门、风险等级、命中模型数、核心异常点。
- `node tests/unit/preliminary-check-model-and-detail.test.js``exit 0`,确认模型区仍保留 `ANY / ALL`、关键词筛选、部门筛选与异常标签展示,风险明细区静态结构未被破坏。
- `node tests/unit/preliminary-check-risk-people-binding.test.js``exit 0`,确认风险人员总览仍绑定 `overviewList`,核心异常点支持标签归一化,且未回退到 `topRiskList`
- `node tests/unit/preliminary-check-model-linkage-flow.test.js``exit 0`,确认模型卡片选中/取消、重置、分页与参数构造仍按既有联动流程工作。
## 结论
- 结果总览页前端未新增页面和交互,当前实现已满足“后端改为员工结果表出数后前端契约不变”的要求。
- 本轮主要收益是把现有稳定行为沉淀为显式单测,降低后续改动引入接口漂移、展示漂移或联动回归的风险。
- 本轮验证仅执行前端 Node 单测脚本,未启动额外前后端进程,因此无需执行进程清理。

View File

@@ -0,0 +1,189 @@
# 后端全量打标规则验证记录
## 验证时间
- 2026-03-22
## 验证对象
- 项目:`project_id=49`
- 后端:`http://localhost:62318`
- Mock`http://localhost:8000`
- Mock 启动参数:`main.py --rule-hit-mode all`
## 执行命令
```bash
mvn -pl ccdi-project \
-Dtest=CcdiBankTagServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest,CcdiBankTagAnalysisMapperXmlTest,CcdiBankTagRuleSqlMetadataTest,CcdiModelParamSqlDefaultsTest \
test
curl -s http://localhost:62318/login/test \
-H 'Content-Type: application/json' \
-d '{"username":"admin","password":"admin123"}'
curl -s http://localhost:62318/ccdi/file-upload/pull-bank-info \
-H "Authorization: Bearer <TOKEN>" \
-H 'Content-Type: application/json' \
-d '{"projectId":49,"idCards":["558455197203132040","523342199111246421","38056420050404632X"],"startDate":"2026-03-01","endDate":"2026-03-22"}'
curl -s http://localhost:62318/ccdi/project/tags/rebuild \
-H "Authorization: Bearer <TOKEN>" \
-H 'Content-Type: application/json' \
-d '{"projectId":49,"modelCode":null}'
```
## 自动化结果
- Maven 回归结果:`BUILD SUCCESS`
- 用例统计:`Tests run: 29, Failures: 0, Errors: 0, Skipped: 0`
- 说明:
- 当前规则元数据、服务分发、XML 结构与风险人数刷新回归未发现失败
- 测试日志中的异常路径日志属于断言场景,不代表回归失败
## 数据库核验
### 最新任务
- 查询表:`ccdi_bank_tag_task`
- 结果:
- `id=47`
- `project_id=49`
- `trigger_type=MANUAL`
- `status=SUCCESS`
- `hit_count=2340`
- `success_rule_count=33`
- `failed_rule_count=0`
### 实际命中规则
- 查询表:`ccdi_bank_statement_tag_result`
- 项目 `49` 实际命中规则数:`19`
- 实际命中规则:
- `HOUSE_OR_CAR_EXPENSE`
- `TAX_EXPENSE`
- `SINGLE_LARGE_INCOME`
- `CUMULATIVE_INCOME`
- `ANNUAL_TURNOVER`
- `LARGE_CASH_DEPOSIT`
- `LARGE_TRANSFER`
- `MULTI_PARTY_GAMBLING_TRANSFER`
- `GAMBLING_SENSITIVE_KEYWORD`
- `HOUSE_REGISTRATION_MISMATCH`
- `PROPERTY_FEE_REGISTRATION_MISMATCH`
- `TAX_ASSET_REGISTRATION_MISMATCH`
- `FOREX_BUY_AMT`
- `FOREX_SELL_AMT`
- `LARGE_PURCHASE_TRANSACTION`
- `SUPPLIER_CONCENTRATION`
- `STOCK_TFR_LARGE`
- `SALARY_QUICK_TRANSFER`
- `LARGE_STOCK_TRADING`
### 未命中规则分类
- 未命中总数:`14`
#### A. 当前仍是占位 SQL 的规则
- `ABNORMAL_CUSTOMER_TRANSACTION`
- `INCOME_ASSET_MISMATCH`
- `CROSS_BORDER_AMT`
- `INTEREST_PAYMENT_BY_OTHERS`
- `WITHDRAW_AMT`
- `PROXY_ACCOUNT_OPERATION`
结论:
- 这 6 条当前不能判定为“可正确打标”,因为实现仍停留在占位层。
#### B. 已有真实 SQL但本次 all 模式项目数据未打中
- `FREQUENT_CASH_DEPOSIT`
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- `SPECIAL_AMOUNT_TRANSACTION`
- `MONTHLY_FIXED_INCOME`
- `FIXED_COUNTERPARTY_TRANSFER`
- `SUSPICIOUS_INCOME_KEYWORD`
- `WITHDRAW_CNT`
- `SALARY_UNUSED`
逐条追因:
- `FREQUENT_CASH_DEPOSIT`
- 项目 `49``batch_id=45565/69755/70053` 中,分别存在 `2026-03-10` 同日 `6` 笔存现样本,金额均为 `3000000~3500000` 元,证件号为员工本人。
- `taskId=47` 日志显示运行时阈值为 `{LARGE_CASH_DEPOSIT=5000, FREQUENT_CASH_DEPOSIT=2}`,但仍记录为“规则无命中”。
- 当前按源码 SQL 直接复核,这批数据已经足以查出 `3` 个对象,因此这条不是“项目数据不满足”,而是 `2026-03-22 09:11` 验证时复用的旧后端进程与当前 Mapper XML 不一致,属于验证环境版本不一致造成的假阴性。
- `LOW_INCOME_RELATIVE_LARGE_TRANSACTION`
- Mock 样本确实写入了两笔亲属转入,单个关系人累计交易额为 `120000` 元。
- 但对应关系人的 `annual_income` 实际为 `64000~128000` 元,折算月收入均高于 `3000` 元。
- 该规则卡在“低收入关系人”前置条件,不是金额不足,而是家庭关系表里的收入数据不满足 SQL 口径。
- `SPECIAL_AMOUNT_TRANSACTION`
- Mock 样本金额固定为 `88888.88` 元,对手方为“兰溪特别金额结算中心”。
- SQL 只识别 `520``1314` 两类特殊金额。
- 这条未命中的直接原因是样本金额语义与真实 SQL 的金额枚举不一致。
- `MONTHLY_FIXED_INCOME`
- Mock 只为每个对象生成了 `2025-12``2026-01``2026-02``2026-03``4` 个月、每月 `7200` 元的稳定转入。
- SQL 要求近 `12` 个月内至少 `6` 个月月收入超过阈值,且波动系数不高于 `0.3`
- 本次样本满足金额稳定,但月数只有 `4`,被 `COUNT(DISTINCT incomeMonth) >= 6` 这一层拦截。
- `FIXED_COUNTERPARTY_TRANSFER`
- “季度稳定兼职收入”样本都落在 `secondary` 身份,也就是家属证件号;项目 `49` 复核可见这些证件号均为 `NON_STAFF`
- SQL 从 `ccdi_base_staff` 开始内连接,只统计员工本人近 `12` 个月的固定对手方转入。
- 这条未命中的直接原因是样本主体落在家属号,进入 SQL 第一层 `inner join ccdi_base_staff` 时就被过滤掉了。
- `SUSPICIOUS_INCOME_KEYWORD`
- Mock 样本摘要为“咨询返现收入”,对手方为“灰度信息咨询有限公司”。
- SQL 关键词仅覆盖“工资、分红、红利、奖金、劳务费、批量代付”等收入表达,不包含“返现”“咨询”。
- 这条未命中的直接原因是摘要关键词与真实 SQL 的关键词集合不一致。
- `WITHDRAW_CNT`
- 当前按源码 SQL 直接复核,项目 `49` 已能查出 `3` 个对象在 `2026-03-12` 单日提现次数超过阈值 `3` 次,其中计数分别为 `4``4``5`
-`taskId=47` 日志在同一阈值 `{WITHDRAW_CNT=3}` 下仍记录为“规则无命中”。
- 结合 `taskId=47` 发生在 `2026-03-22 09:11:57`、当前后端进程启动于 `2026-03-22 12:11:28` 这一事实,可以判定这条与 `FREQUENT_CASH_DEPOSIT` 一样,属于验证时后端运行版本未刷新导致的假阴性,而不是数据本身不满足。
- `SALARY_UNUSED`
- `2026-02-10` 那组“工资入账 + 代扣公积金”样本全部落在家属证件号SQL 的 `inner join ccdi_base_staff` 会直接排除。
- 员工本人侧仅保留了 `2026-03-14` 的工资入账样本,但这些样本正是 `SALARY_QUICK_TRANSFER` 的基础数据,工资到账后 `30` 天内均已有 `2~5` 笔非代扣支出。
- 因此这条是“双重不满足”:专用样本被员工主表连接挡掉,员工本人样本又被 `not exists` 的“30 天内无支出”条件挡掉。
结论:
- 这 8 条里,`LOW_INCOME_RELATIVE_LARGE_TRANSACTION``SPECIAL_AMOUNT_TRANSACTION``MONTHLY_FIXED_INCOME``FIXED_COUNTERPARTY_TRANSFER``SUSPICIOUS_INCOME_KEYWORD``SALARY_UNUSED` 属于“样本/主数据口径与 SQL 条件不一致”。
- `FREQUENT_CASH_DEPOSIT``WITHDRAW_CNT` 不属于数据不满足,而是 `2026-03-22` 那次验证复用了旧后端进程,导致运行时 SQL 与当前源码不一致。
### Mock all 模式边界
- Mock all 模式当前直接覆盖 26 条规则,不包含:
- `ABNORMAL_CUSTOMER_TRANSACTION`
- `INCOME_ASSET_MISMATCH`
- `CROSS_BORDER_AMT`
- `INTEREST_PAYMENT_BY_OTHERS`
- `LARGE_PURCHASE_TRANSACTION`
- `WITHDRAW_AMT`
- `PROXY_ACCOUNT_OPERATION`
- 其中 `LARGE_PURCHASE_TRANSACTION` 通过采购基线事实命中,已在本次结果中验证到
## 接口回查
- 回查接口:`GET /ccdi/project/bank-statement/detail/70334`
- 返回:
- `code=200`
- `data.hitTags` 非空
- 命中标签包含 `LARGE_TRANSFER`
样例摘要:
- `bank_statement_id=70334`
- `rule_code=LARGE_TRANSFER`
- `reason_detail=大额转账支出 12000000.00 元,超过阈值 5000 元`
## 最终结论
- 本次验证证明:
- 后端当前 33 条规则都已经进入执行链路,最新任务 `success_rule_count=33`
- 在项目 `49` 的真实联调链路下,只有 19 条规则产生了实际标签结果
- 因此截至 2026-03-22结论应为
- 后端打标方法“不是全部规则都已验证可正确打标”
- 当前状态更准确地说是“部分规则已验证通过,部分规则未命中,另有 6 条规则仍为占位实现”
## 环境说明
- 本次验证复用了已经运行中的本地后端与 Mock 服务
- 未额外启动新的前后端进程,因此本次无新增进程需要清理

View File

@@ -0,0 +1,41 @@
# LSFX Mock 规则命中模式后端验证记录
## 执行命令
```bash
cd lsfx-mock-server
python3 -m pytest tests/test_startup.py tests/test_file_service.py -k "rule_hit_plan or parse_args" -v
PORT=18000 python3 main.py --rule-hit-mode all > /tmp/lsfx_main_18000.log 2>&1 &
sleep 3
kill <main-pid>
PORT=18001 python3 dev.py --reload --rule-hit-mode all > /tmp/lsfx_dev_18001.log 2>&1 &
sleep 5
kill <dev-pid>
```
## 测试结果
- 2026-03-22 执行:
`python3 -m pytest tests/test_startup.py tests/test_file_service.py -k "rule_hit_plan or parse_args" -v`
- 结果:`10 passed, 5 deselected, 1 warning in 0.27s`
- warning 为现有 `pydantic` 弃用提示,本次改动未引入失败或 error。
## 启动验证结果
- 普通启动验证:
- 使用临时端口 `18000` 执行 `python3 main.py --rule-hit-mode all`
- 日志显示 `Uvicorn running on http://0.0.0.0:18000`
- 结束后日志显示正常 shutdown进程已清理
- 热重载启动验证:
- 使用临时端口 `18001` 执行 `python3 dev.py --reload --rule-hit-mode all`
- 日志显示 reloader 进程与 server 进程均成功启动
- 结束后日志显示 server process 与 reloader process 已停止
## 进程清理结果
- 启动验证结束后,针对本次工作树路径再次执行进程扫描。
- `main.py --rule-hit-mode all` 无残留进程。
- `dev.py --reload --rule-hit-mode all` 无残留进程。
## 说明
- 原计划默认使用 `8000` 端口进行烟测。
- 验证时发现本机已有独立 `Python main.py` 进程占用 `8000`,为避免影响现有环境,本次改用临时端口 `18000/18001` 完成等价验证。

View File

@@ -22,15 +22,20 @@ pip install -r requirements.txt
### 2. 启动服务 ### 2. 启动服务
```bash ```bash
python main.py python main.py --rule-hit-mode subset
python main.py --rule-hit-mode all
``` ```
或使用 uvicorn支持热重载 热重载启动请使用项目脚本入口
```bash ```bash
uvicorn main:app --reload --host 0.0.0.0 --port 8000 python dev.py --reload --rule-hit-mode subset
python dev.py --reload --rule-hit-mode all
``` ```
- `subset`:默认模式,按 `logId` 稳定随机命中部分规则
- `all`:全部兼容规则命中模式,会命中当前可共存的全部规则
### 3. 访问 API 文档 ### 3. 访问 API 文档
- **Swagger UI**: http://localhost:8000/docs - **Swagger UI**: http://localhost:8000/docs
@@ -135,6 +140,7 @@ PORT=8000
# 模拟配置 # 模拟配置
PARSE_DELAY_SECONDS=4 PARSE_DELAY_SECONDS=4
MAX_FILE_SIZE=10485760 MAX_FILE_SIZE=10485760
RULE_HIT_MODE=subset
``` ```
### 响应模板 ### 响应模板

104
lsfx-mock-server/STARTUP.md Normal file
View File

@@ -0,0 +1,104 @@
# LSFX Mock 服务启动说明
## 适用范围
本文说明 `lsfx-mock-server` 的本地启动方式,以及启动参数 `--rule-hit-mode` 的写法。
服务目录:
```bash
cd lsfx-mock-server
```
## 普通启动
普通启动入口为 `main.py`
### 默认随机子集模式
```bash
python3 main.py --rule-hit-mode subset
```
### 全部兼容规则命中模式
```bash
python3 main.py --rule-hit-mode all
```
## 热重载启动
热重载启动入口为 `dev.py`,不要再直接使用裸 `uvicorn main:app --reload ...` 透传业务参数。
### 默认随机子集模式
```bash
python3 dev.py --reload --rule-hit-mode subset
```
### 全部兼容规则命中模式
```bash
python3 dev.py --reload --rule-hit-mode all
```
## 启动参数说明
### `--rule-hit-mode`
可选值只有两个:
- `subset`:默认模式。按 `logId` 稳定随机命中每类规则池中的部分规则。
- `all`:全部兼容规则命中模式。会命中当前已定义的全部可共存规则;如果后续配置了互斥组,会按互斥组优先级保留组内首个规则。
非法值会在启动阶段直接报错退出,例如下面这种写法是不允许的:
```bash
python3 main.py --rule-hit-mode invalid
```
### `--reload`
`dev.py` 支持该参数:
```bash
python3 dev.py --reload --rule-hit-mode subset
```
含义是开启热重载,适合本地开发联调。
## 推荐用法
### 日常开发
```bash
python3 dev.py --reload --rule-hit-mode subset
```
### 规则联调或一次性覆盖更多规则
```bash
python3 dev.py --reload --rule-hit-mode all
```
### 稳定复现普通启动行为
```bash
python3 main.py --rule-hit-mode subset
```
## 启动成功后的访问地址
- Swagger UI: `http://localhost:8000/docs`
- ReDoc: `http://localhost:8000/redoc`
- 健康检查: `http://localhost:8000/health`
## 补充说明
- `RULE_HIT_MODE` 也可以通过环境变量配置,但当前项目推荐优先使用命令行参数,便于直接区分本次启动的命中模式。
- 如果本机 `8000` 端口已被占用,可以临时通过环境变量覆盖端口,例如:
```bash
PORT=18000 python3 main.py --rule-hit-mode all
PORT=18001 python3 dev.py --reload --rule-hit-mode all
```

View File

@@ -1,6 +1,7 @@
from pydantic_settings import BaseSettings from pydantic_settings import BaseSettings
from pathlib import Path from pathlib import Path
import re import re
from typing import Literal
def _load_ruoyi_mysql_defaults() -> dict: def _load_ruoyi_mysql_defaults() -> dict:
@@ -42,6 +43,7 @@ class Settings(BaseSettings):
# 模拟配置 # 模拟配置
PARSE_DELAY_SECONDS: int = 4 # 文件解析延迟秒数 PARSE_DELAY_SECONDS: int = 4 # 文件解析延迟秒数
MAX_FILE_SIZE: int = 10485760 # 10MB MAX_FILE_SIZE: int = 10485760 # 10MB
RULE_HIT_MODE: Literal["subset", "all"] = "subset"
# 测试数据配置 # 测试数据配置
INITIAL_PROJECT_ID: int = 1000 INITIAL_PROJECT_ID: int = 1000

29
lsfx-mock-server/dev.py Normal file
View File

@@ -0,0 +1,29 @@
import argparse
import uvicorn
from config.settings import settings
from main import apply_rule_hit_mode
def parse_args(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument("--reload", action="store_true")
parser.add_argument("--rule-hit-mode", choices=["subset", "all"], default="subset")
return parser.parse_args(argv)
def main():
args = parse_args()
apply_rule_hit_mode(args.rule_hit_mode)
uvicorn.run(
"main:app",
host=settings.HOST,
port=settings.PORT,
log_level="debug" if settings.DEBUG else "info",
reload=args.reload,
)
if __name__ == "__main__":
main()

View File

@@ -3,6 +3,9 @@
基于 FastAPI 实现的 Mock 服务器,用于模拟流水分析平台的 7 个核心接口 基于 FastAPI 实现的 Mock 服务器,用于模拟流水分析平台的 7 个核心接口
""" """
import argparse
import os
from fastapi import FastAPI from fastapi import FastAPI
from routers import api from routers import api
from config.settings import settings from config.settings import settings
@@ -68,9 +71,23 @@ async def health_check():
} }
def parse_args(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument("--rule-hit-mode", choices=["subset", "all"], default="subset")
return parser.parse_args(argv)
def apply_rule_hit_mode(rule_hit_mode: str) -> None:
os.environ["RULE_HIT_MODE"] = rule_hit_mode
settings.RULE_HIT_MODE = rule_hit_mode
if __name__ == "__main__": if __name__ == "__main__":
import uvicorn import uvicorn
args = parse_args()
apply_rule_hit_mode(args.rule_hit_mode)
# 启动服务器 # 启动服务器
uvicorn.run( uvicorn.run(
app, app,

View File

@@ -1,6 +1,7 @@
from fastapi import BackgroundTasks, UploadFile from fastapi import BackgroundTasks, UploadFile
from utils.response_builder import ResponseBuilder from utils.response_builder import ResponseBuilder
from config.settings import settings from config.settings import settings
from services.phase2_baseline_service import Phase2BaselineService
from services.staff_identity_repository import StaffIdentityRepository from services.staff_identity_repository import StaffIdentityRepository
from typing import Dict, List, Union from typing import Dict, List, Union
from dataclasses import dataclass, field from dataclasses import dataclass, field
@@ -31,6 +32,24 @@ PHASE1_RULE_CODES = [
"WITHDRAW_CNT", "WITHDRAW_CNT",
] ]
PHASE2_STATEMENT_RULE_CODES = [
"LOW_INCOME_RELATIVE_LARGE_TRANSACTION",
"MULTI_PARTY_GAMBLING_TRANSFER",
"MONTHLY_FIXED_INCOME",
"FIXED_COUNTERPARTY_TRANSFER",
"SALARY_QUICK_TRANSFER",
"SALARY_UNUSED",
]
PHASE2_BASELINE_RULE_CODES = [
"HOUSE_REGISTRATION_MISMATCH",
"PROPERTY_FEE_REGISTRATION_MISMATCH",
"TAX_ASSET_REGISTRATION_MISMATCH",
"SUPPLIER_CONCENTRATION",
]
RULE_CONFLICT_GROUPS = []
@dataclass @dataclass
class FileRecord: class FileRecord:
@@ -88,6 +107,8 @@ class FileRecord:
family_id_cards: List[str] = field(default_factory=list) family_id_cards: List[str] = field(default_factory=list)
large_transaction_hit_rules: List[str] = field(default_factory=list) large_transaction_hit_rules: List[str] = field(default_factory=list)
phase1_hit_rules: List[str] = field(default_factory=list) phase1_hit_rules: List[str] = field(default_factory=list)
phase2_statement_hit_rules: List[str] = field(default_factory=list)
phase2_baseline_hit_rules: List[str] = field(default_factory=list)
class FileService: class FileService:
@@ -97,10 +118,11 @@ class FileService:
LOG_ID_MIN = settings.INITIAL_LOG_ID LOG_ID_MIN = settings.INITIAL_LOG_ID
LOG_ID_MAX = 99999 LOG_ID_MAX = 99999
def __init__(self, staff_identity_repository=None): def __init__(self, staff_identity_repository=None, phase2_baseline_service=None):
self.file_records: Dict[int, FileRecord] = {} # logId -> FileRecord self.file_records: Dict[int, FileRecord] = {} # logId -> FileRecord
self.log_counter = settings.INITIAL_LOG_ID self.log_counter = settings.INITIAL_LOG_ID
self.staff_identity_repository = staff_identity_repository or StaffIdentityRepository() self.staff_identity_repository = staff_identity_repository or StaffIdentityRepository()
self.phase2_baseline_service = phase2_baseline_service or Phase2BaselineService()
def get_file_record(self, log_id: int) -> FileRecord: def get_file_record(self, log_id: int) -> FileRecord:
"""按 logId 获取已存在的文件记录。""" """按 logId 获取已存在的文件记录。"""
@@ -159,16 +181,57 @@ class FileService:
selected_codes = set(rng.sample(rule_codes, rng.randint(min_count, max_count))) selected_codes = set(rng.sample(rule_codes, rng.randint(min_count, max_count)))
return [rule_code for rule_code in rule_codes if rule_code in selected_codes] return [rule_code for rule_code in rule_codes if rule_code in selected_codes]
def _build_rule_hit_plan(self, log_id: int) -> dict: def _build_subset_rule_hit_plan(self, log_id: int) -> dict:
"""基于 logId 生成稳定的规则命中计划。""" """基于 logId 生成稳定的规则子集命中计划。"""
rng = random.Random(f"rule-plan:{log_id}") rng = random.Random(f"rule-plan:{log_id}")
return { return {
"large_transaction_hit_rules": self._pick_rule_subset( "large_transaction_hit_rules": self._pick_rule_subset(
rng, LARGE_TRANSACTION_RULE_CODES, 2, 4 rng, LARGE_TRANSACTION_RULE_CODES, 2, 4
), ),
"phase1_hit_rules": self._pick_rule_subset(rng, PHASE1_RULE_CODES, 2, 4), "phase1_hit_rules": self._pick_rule_subset(rng, PHASE1_RULE_CODES, 2, 4),
"phase2_statement_hit_rules": self._pick_rule_subset(
rng, PHASE2_STATEMENT_RULE_CODES, 2, 4
),
"phase2_baseline_hit_rules": self._pick_rule_subset(
rng, PHASE2_BASELINE_RULE_CODES, 2, 4
),
} }
def _build_all_compatible_rule_hit_plan(self) -> dict:
"""生成全部兼容规则命中计划。"""
return {
"large_transaction_hit_rules": list(LARGE_TRANSACTION_RULE_CODES),
"phase1_hit_rules": list(PHASE1_RULE_CODES),
"phase2_statement_hit_rules": list(PHASE2_STATEMENT_RULE_CODES),
"phase2_baseline_hit_rules": list(PHASE2_BASELINE_RULE_CODES),
}
def _apply_conflict_groups(self, rule_plan: dict) -> dict:
"""按显式互斥组裁剪规则计划,同组仅保留固定优先级的首个规则。"""
resolved_plan = {plan_key: list(rule_codes) for plan_key, rule_codes in rule_plan.items()}
for plan_key, rule_codes in resolved_plan.items():
filtered_codes = list(rule_codes)
for conflict_group in RULE_CONFLICT_GROUPS:
kept_rule_code = next(
(rule_code for rule_code in conflict_group if rule_code in filtered_codes),
None,
)
if kept_rule_code is None:
continue
filtered_codes = [
rule_code
for rule_code in filtered_codes
if rule_code == kept_rule_code or rule_code not in conflict_group
]
resolved_plan[plan_key] = filtered_codes
return resolved_plan
def _build_rule_hit_plan(self, log_id: int) -> dict:
"""按配置模式生成规则命中计划。"""
if settings.RULE_HIT_MODE == "all":
return self._apply_conflict_groups(self._build_all_compatible_rule_hit_plan())
return self._build_subset_rule_hit_plan(log_id)
def _create_file_record( def _create_file_record(
self, self,
*, *,
@@ -191,6 +254,8 @@ class FileService:
family_id_cards: List[str] = None, family_id_cards: List[str] = None,
large_transaction_hit_rules: List[str] = None, large_transaction_hit_rules: List[str] = None,
phase1_hit_rules: List[str] = None, phase1_hit_rules: List[str] = None,
phase2_statement_hit_rules: List[str] = None,
phase2_baseline_hit_rules: List[str] = None,
parsing: bool = True, parsing: bool = True,
status: int = -5, status: int = -5,
) -> FileRecord: ) -> FileRecord:
@@ -223,6 +288,8 @@ class FileService:
family_id_cards=list(family_id_cards or []), family_id_cards=list(family_id_cards or []),
large_transaction_hit_rules=list(large_transaction_hit_rules or []), large_transaction_hit_rules=list(large_transaction_hit_rules or []),
phase1_hit_rules=list(phase1_hit_rules or []), phase1_hit_rules=list(phase1_hit_rules or []),
phase2_statement_hit_rules=list(phase2_statement_hit_rules or []),
phase2_baseline_hit_rules=list(phase2_baseline_hit_rules or []),
parsing=parsing, parsing=parsing,
status=status, status=status,
) )
@@ -231,6 +298,14 @@ class FileService:
"""读取一个员工及其亲属身份范围。""" """读取一个员工及其亲属身份范围。"""
return self.staff_identity_repository.select_random_staff_with_families() return self.staff_identity_repository.select_random_staff_with_families()
def _apply_phase2_baselines(self, file_record: FileRecord) -> None:
"""按当前记录命中的第二期基线规则幂等补齐外部事实。"""
self.phase2_baseline_service.apply(
staff_id_card=file_record.staff_id_card,
family_id_cards=file_record.family_id_cards,
baseline_rule_codes=file_record.phase2_baseline_hit_rules,
)
async def upload_file( async def upload_file(
self, group_id: int, file: UploadFile, background_tasks: BackgroundTasks self, group_id: int, file: UploadFile, background_tasks: BackgroundTasks
) -> Dict: ) -> Dict:
@@ -280,12 +355,15 @@ class FileService:
staff_name=identity_scope["staff_name"], staff_name=identity_scope["staff_name"],
staff_id_card=identity_scope["staff_id_card"], staff_id_card=identity_scope["staff_id_card"],
family_id_cards=identity_scope["family_id_cards"], family_id_cards=identity_scope["family_id_cards"],
large_transaction_hit_rules=rule_hit_plan["large_transaction_hit_rules"], large_transaction_hit_rules=rule_hit_plan.get("large_transaction_hit_rules", []),
phase1_hit_rules=rule_hit_plan["phase1_hit_rules"], phase1_hit_rules=rule_hit_plan.get("phase1_hit_rules", []),
phase2_statement_hit_rules=rule_hit_plan.get("phase2_statement_hit_rules", []),
phase2_baseline_hit_rules=rule_hit_plan.get("phase2_baseline_hit_rules", []),
) )
# 存储记录 # 存储记录
self.file_records[log_id] = file_record self.file_records[log_id] = file_record
self._apply_phase2_baselines(file_record)
# 添加后台任务(延迟解析) # 添加后台任务(延迟解析)
background_tasks.add_task(self._delayed_parse, log_id) background_tasks.add_task(self._delayed_parse, log_id)
@@ -607,12 +685,15 @@ class FileService:
staff_name=identity_scope["staff_name"], staff_name=identity_scope["staff_name"],
staff_id_card=identity_scope["staff_id_card"], staff_id_card=identity_scope["staff_id_card"],
family_id_cards=identity_scope["family_id_cards"], family_id_cards=identity_scope["family_id_cards"],
large_transaction_hit_rules=rule_hit_plan["large_transaction_hit_rules"], large_transaction_hit_rules=rule_hit_plan.get("large_transaction_hit_rules", []),
phase1_hit_rules=rule_hit_plan["phase1_hit_rules"], phase1_hit_rules=rule_hit_plan.get("phase1_hit_rules", []),
phase2_statement_hit_rules=rule_hit_plan.get("phase2_statement_hit_rules", []),
phase2_baseline_hit_rules=rule_hit_plan.get("phase2_baseline_hit_rules", []),
parsing=False, parsing=False,
) )
self.file_records[log_id] = file_record self.file_records[log_id] = file_record
self._apply_phase2_baselines(file_record)
# 返回成功的响应包含logId数组 # 返回成功的响应包含logId数组
return { return {

View File

@@ -0,0 +1,264 @@
from textwrap import dedent
from typing import Dict, List, Tuple
from config.settings import settings
class Phase2BaselineService:
"""第二期数据库基线服务。
"""
SUPPLIER_PURCHASE_ID = "LSFXMOCKP2PUR001"
SUPPLIER_NAME = "兰溪市联调供应链有限公司"
ASSET_IDENTIFIERS = {
"HOUSE_REGISTRATION_MISMATCH": "LSFX Mock P2 HOUSE_REGISTRATION_MISMATCH",
"PROPERTY_FEE_REGISTRATION_MISMATCH": "LSFX Mock P2 PROPERTY_FEE_REGISTRATION_MISMATCH",
"TAX_ASSET_REGISTRATION_MISMATCH": "LSFX Mock P2 TAX_ASSET_REGISTRATION_MISMATCH",
}
def __init__(self):
self.db_config = {
"host": settings.CCDI_DB_HOST,
"port": settings.CCDI_DB_PORT,
"database": settings.CCDI_DB_NAME,
"username": settings.CCDI_DB_USERNAME,
"password": settings.CCDI_DB_PASSWORD,
"connect_timeout_seconds": settings.CCDI_DB_CONNECT_TIMEOUT_SECONDS,
}
@staticmethod
def _sql_quote(value: str) -> str:
return "'" + value.replace("'", "''") + "'"
def _connect(self):
try:
import pymysql
except ImportError as exc:
raise RuntimeError("缺少 PyMySQL 依赖,无法写入第二期数据库基线") from exc
return pymysql.connect(
host=settings.CCDI_DB_HOST,
port=settings.CCDI_DB_PORT,
user=settings.CCDI_DB_USERNAME,
password=settings.CCDI_DB_PASSWORD,
database=settings.CCDI_DB_NAME,
charset="utf8mb4",
connect_timeout=settings.CCDI_DB_CONNECT_TIMEOUT_SECONDS,
autocommit=False,
)
def _resolve_asset_owner_scopes(
self,
staff_id_card: str,
family_id_cards: List[str],
) -> Dict[str, Tuple[str, str]]:
family_pool = [card for card in family_id_cards if card]
first_family = family_pool[0] if family_pool else staff_id_card
second_family = family_pool[1] if len(family_pool) > 1 else first_family
return {
"HOUSE_REGISTRATION_MISMATCH": (staff_id_card, staff_id_card),
"PROPERTY_FEE_REGISTRATION_MISMATCH": (staff_id_card, first_family),
"TAX_ASSET_REGISTRATION_MISMATCH": (staff_id_card, second_family),
}
def _build_supplier_concentration_sql(self, staff_id_card: str) -> List[str]:
purchase_id = self.SUPPLIER_PURCHASE_ID
supplier_name = self.SUPPLIER_NAME
sql = [
dedent(
f"""
DELETE FROM ccdi_purchase_transaction
WHERE purchase_id = {self._sql_quote(purchase_id)};
"""
).strip(),
dedent(
f"""
INSERT INTO ccdi_purchase_transaction (
purchase_id,
purchase_category,
project_name,
subject_name,
subject_desc,
purchase_qty,
budget_amount,
bid_amount,
actual_amount,
contract_amount,
settlement_amount,
purchase_method,
supplier_name,
contact_person,
contact_phone,
supplier_uscc,
supplier_bank_account,
apply_date,
plan_approve_date,
announce_date,
bid_open_date,
contract_sign_date,
expected_delivery_date,
actual_delivery_date,
acceptance_date,
settlement_date,
applicant_id,
applicant_name,
apply_department,
purchase_leader_id,
purchase_leader_name,
purchase_department,
created_by,
updated_by
)
SELECT
{self._sql_quote(purchase_id)},
'设备采购',
'LSFX Mock P2',
'可疑采购基线',
'用于命中 SUPPLIER_CONCENTRATION 真实规则',
1,
188000.00,
186000.00,
186000.00,
186000.00,
186000.00,
'竞争性谈判',
{self._sql_quote(supplier_name)},
'联调联系人',
'13800000000',
'91330781P2PUR00011',
'6222000000001234',
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CAST(s.staff_id AS CHAR),
s.name,
'纪检初核部',
NULL,
NULL,
NULL,
'admin',
'admin'
FROM ccdi_base_staff s
WHERE s.id_card = {self._sql_quote(staff_id_card)}
LIMIT 1;
"""
).strip(),
]
return sql
def _build_asset_mismatch_sql(
self,
rule_code: str,
family_id: str,
person_id: str,
) -> List[str]:
asset_name = self.ASSET_IDENTIFIERS[rule_code]
asset_main_type = "房产"
asset_sub_type = "商铺"
return [
dedent(
f"""
DELETE FROM ccdi_asset_info
WHERE asset_name = {self._sql_quote(asset_name)};
"""
).strip(),
dedent(
f"""
INSERT INTO ccdi_asset_info (
family_id,
person_id,
asset_main_type,
asset_sub_type,
asset_name,
ownership_ratio,
purchase_eval_date,
original_value,
current_value,
valuation_date,
asset_status,
remarks,
create_by,
update_by
)
VALUES (
{self._sql_quote(family_id)},
{self._sql_quote(person_id)},
{self._sql_quote(asset_main_type)},
{self._sql_quote(asset_sub_type)},
{self._sql_quote(asset_name)},
100.00,
CURRENT_DATE,
1880000.00,
1880000.00,
CURRENT_DATE,
'正常',
{self._sql_quote(f'用于命中 {rule_code} 真实规则的第二期基线')},
'admin',
'admin'
);
"""
).strip(),
]
def build_sql_plan(
self,
staff_id_card: str,
family_id_cards: List[str],
baseline_rule_codes: List[str],
) -> List[str]:
"""生成第二期基线 SQL 计划。"""
selected_rule_codes = []
for rule_code in baseline_rule_codes or []:
if rule_code not in selected_rule_codes:
selected_rule_codes.append(rule_code)
sql_plan: List[str] = []
asset_owner_ids = self._resolve_asset_owner_scopes(
staff_id_card,
family_id_cards or [],
)
for rule_code in selected_rule_codes:
if rule_code == "SUPPLIER_CONCENTRATION":
sql_plan.extend(self._build_supplier_concentration_sql(staff_id_card))
elif rule_code in self.ASSET_IDENTIFIERS:
family_id, person_id = asset_owner_ids[rule_code]
sql_plan.extend(
self._build_asset_mismatch_sql(
rule_code=rule_code,
family_id=family_id,
person_id=person_id,
)
)
return sql_plan
def apply(
self,
staff_id_card: str,
family_id_cards: List[str],
baseline_rule_codes: List[str],
) -> None:
"""按当前命中的规则将第二期基线幂等写入数据库。"""
sql_plan = self.build_sql_plan(staff_id_card, family_id_cards, baseline_rule_codes)
if not sql_plan:
return None
with self._connect() as connection:
try:
with connection.cursor() as cursor:
for sql in sql_plan:
cursor.execute(sql)
connection.commit()
except Exception:
connection.rollback()
raise
return None

View File

@@ -164,6 +164,35 @@ def _build_sample_context(
} }
def _build_phase2_subjects(
log_id: int,
staff_id_card: Optional[str] = None,
family_id_cards: Optional[List[str]] = None,
) -> Dict[str, str]:
identity_scope = resolve_identity_scope(log_id)
fallback_staff = identity_scope["staff"]["id_card"]
fallback_family = identity_scope["family"]["id_card"]
primary_subject = staff_id_card or fallback_staff
family_pool = [
card
for card in (family_id_cards or [])
if card and card != primary_subject
]
secondary_subject = (
family_pool[0]
if family_pool
else (fallback_family if fallback_family != primary_subject else primary_subject)
)
tertiary_subject = family_pool[1] if len(family_pool) > 1 else secondary_subject
return {
"primary": primary_subject,
"secondary": secondary_subject,
"tertiary": tertiary_subject,
}
def build_house_or_car_samples(group_id: int, log_id: int, **kwargs) -> List[Dict]: def build_house_or_car_samples(group_id: int, log_id: int, **kwargs) -> List[Dict]:
context = _build_sample_context(log_id, **kwargs) context = _build_sample_context(log_id, **kwargs)
return [ return [
@@ -559,6 +588,223 @@ def build_withdraw_cnt_samples(group_id: int, log_id: int, **kwargs) -> List[Dic
] ]
def build_low_income_relative_large_transaction_samples(
group_id: int,
log_id: int,
**kwargs,
) -> List[Dict]:
context = _build_sample_context(log_id, **kwargs)
subjects = _build_phase2_subjects(
log_id,
staff_id_card=kwargs.get("staff_id_card"),
family_id_cards=kwargs.get("family_id_cards"),
)
return [
_build_statement(
group_id,
log_id,
trx_datetime=REFERENCE_NOW - timedelta(days=18),
cret_no=subjects["secondary"],
customer_name="兰溪惠民互助协会",
user_memo="亲属大额转入",
cash_type="对私转账",
cr_amount=68000.0,
le_name=context["le_name"],
account_mask_no=context["account_no"],
customer_account_mask_no="6222024888800001",
),
_build_statement(
group_id,
log_id,
trx_datetime=REFERENCE_NOW - timedelta(days=9),
cret_no=subjects["secondary"],
customer_name="兰溪惠民互助协会",
user_memo="亲属经营补贴",
cash_type="对私转账",
cr_amount=52000.0,
le_name=context["le_name"],
account_mask_no=context["account_no"],
customer_account_mask_no="6222024888800001",
),
]
def build_multi_party_gambling_transfer_samples(
group_id: int,
log_id: int,
**kwargs,
) -> List[Dict]:
context = _build_sample_context(log_id, **kwargs)
subjects = _build_phase2_subjects(
log_id,
staff_id_card=kwargs.get("staff_id_card"),
family_id_cards=kwargs.get("family_id_cards"),
)
transfer_specs = [
("欢乐游戏科技有限公司", 3888.0),
("星彩娱乐网络科技有限公司", 4288.0),
("极速竞技服务有限公司", 4688.0),
]
return [
_build_statement(
group_id,
log_id,
trx_datetime=datetime(2026, 3, 11, 10 + index, 0, 0),
cret_no=subjects["primary"],
customer_name=customer_name,
user_memo="手机银行转账",
cash_type="对私转账",
dr_amount=dr_amount,
le_name=context["le_name"],
account_mask_no=context["account_no"],
customer_account_mask_no=f"62220247777000{index + 1}",
)
for index, (customer_name, dr_amount) in enumerate(transfer_specs)
]
def build_monthly_fixed_income_samples(group_id: int, log_id: int, **kwargs) -> List[Dict]:
context = _build_sample_context(log_id, **kwargs)
subjects = _build_phase2_subjects(
log_id,
staff_id_card=kwargs.get("staff_id_card"),
family_id_cards=kwargs.get("family_id_cards"),
)
income_months = [
datetime(2025, 12, 5, 9, 0, 0),
datetime(2026, 1, 5, 9, 0, 0),
datetime(2026, 2, 5, 9, 0, 0),
datetime(2026, 3, 5, 9, 0, 0),
]
return [
_build_statement(
group_id,
log_id,
trx_datetime=trx_datetime,
cret_no=subjects["primary"],
customer_name="兰溪远航信息服务有限公司",
user_memo="月度稳定兼职收入",
cash_type="对私转账",
cr_amount=7200.0,
le_name=context["le_name"],
account_mask_no=context["account_no"],
customer_account_mask_no="6222024666600101",
)
for trx_datetime in income_months
]
def build_fixed_counterparty_transfer_samples(
group_id: int,
log_id: int,
**kwargs,
) -> List[Dict]:
context = _build_sample_context(log_id, **kwargs)
subjects = _build_phase2_subjects(
log_id,
staff_id_card=kwargs.get("staff_id_card"),
family_id_cards=kwargs.get("family_id_cards"),
)
quarter_dates = [
datetime(2025, 4, 8, 9, 0, 0),
datetime(2025, 7, 8, 9, 0, 0),
datetime(2025, 10, 8, 9, 0, 0),
datetime(2026, 1, 8, 9, 0, 0),
]
return [
_build_statement(
group_id,
log_id,
trx_datetime=trx_datetime,
cret_no=subjects["secondary"],
customer_name="兰溪零工服务有限公司",
user_memo="季度稳定兼职收入",
cash_type="对私转账",
cr_amount=4200.0,
le_name=context["le_name"],
account_mask_no=context["account_no"],
customer_account_mask_no="6222024666600201",
)
for trx_datetime in quarter_dates
]
def build_salary_quick_transfer_samples(group_id: int, log_id: int, **kwargs) -> List[Dict]:
context = _build_sample_context(log_id, **kwargs)
subjects = _build_phase2_subjects(
log_id,
staff_id_card=kwargs.get("staff_id_card"),
family_id_cards=kwargs.get("family_id_cards"),
)
salary_time = datetime(2026, 3, 14, 9, 0, 0)
return [
_build_statement(
group_id,
log_id,
trx_datetime=salary_time,
cret_no=subjects["primary"],
customer_name="浙江兰溪农村商业银行股份有限公司",
user_memo="工资入账",
cash_type="工资代发",
cr_amount=12000.0,
le_name=context["le_name"],
account_mask_no=context["account_no"],
customer_account_mask_no="6222024666600301",
),
_build_statement(
group_id,
log_id,
trx_datetime=salary_time + timedelta(hours=6),
cret_no=subjects["primary"],
customer_name="张某某",
user_memo="工资到账后快速转出",
cash_type="对私转账",
dr_amount=10800.0,
le_name=context["le_name"],
account_mask_no=context["account_no"],
customer_account_mask_no="6222024666600302",
),
]
def build_salary_unused_samples(group_id: int, log_id: int, **kwargs) -> List[Dict]:
context = _build_sample_context(log_id, **kwargs)
subjects = _build_phase2_subjects(
log_id,
staff_id_card=kwargs.get("staff_id_card"),
family_id_cards=kwargs.get("family_id_cards"),
)
salary_time = datetime(2026, 2, 10, 9, 0, 0)
return [
_build_statement(
group_id,
log_id,
trx_datetime=salary_time,
cret_no=subjects["secondary"],
customer_name="浙江兰溪农村商业银行股份有限公司",
user_memo="工资入账",
cash_type="工资代发",
cr_amount=9800.0,
le_name=context["le_name"],
account_mask_no=context["account_no"],
customer_account_mask_no="6222024666600401",
),
_build_statement(
group_id,
log_id,
trx_datetime=salary_time + timedelta(days=5),
cret_no=subjects["secondary"],
customer_name="兰溪住房公积金中心",
user_memo="代扣公积金",
cash_type="代扣支出",
dr_amount=500.0,
le_name=context["le_name"],
account_mask_no=context["account_no"],
customer_account_mask_no="6222024666600402",
),
]
LARGE_TRANSACTION_BUILDERS = { LARGE_TRANSACTION_BUILDERS = {
"HOUSE_OR_CAR_EXPENSE": build_house_or_car_samples, "HOUSE_OR_CAR_EXPENSE": build_house_or_car_samples,
"TAX_EXPENSE": build_tax_samples, "TAX_EXPENSE": build_tax_samples,
@@ -581,6 +827,15 @@ PHASE1_RULE_BUILDERS = {
"WITHDRAW_CNT": build_withdraw_cnt_samples, "WITHDRAW_CNT": build_withdraw_cnt_samples,
} }
PHASE2_STATEMENT_RULE_BUILDERS = {
"LOW_INCOME_RELATIVE_LARGE_TRANSACTION": build_low_income_relative_large_transaction_samples,
"MULTI_PARTY_GAMBLING_TRANSFER": build_multi_party_gambling_transfer_samples,
"MONTHLY_FIXED_INCOME": build_monthly_fixed_income_samples,
"FIXED_COUNTERPARTY_TRANSFER": build_fixed_counterparty_transfer_samples,
"SALARY_QUICK_TRANSFER": build_salary_quick_transfer_samples,
"SALARY_UNUSED": build_salary_unused_samples,
}
def build_seed_statements_for_rule_plan( def build_seed_statements_for_rule_plan(
group_id: int, group_id: int,
@@ -600,6 +855,11 @@ def build_seed_statements_for_rule_plan(
if builder is not None: if builder is not None:
statements.extend(builder(group_id, log_id, **kwargs)) statements.extend(builder(group_id, log_id, **kwargs))
for rule_code in rule_plan.get("phase2_statement_hit_rules", []):
builder = PHASE2_STATEMENT_RULE_BUILDERS.get(rule_code)
if builder is not None:
statements.extend(builder(group_id, log_id, **kwargs))
return statements return statements

View File

@@ -149,6 +149,9 @@ class StatementService:
list(record.large_transaction_hit_rules) if record is not None else [] list(record.large_transaction_hit_rules) if record is not None else []
), ),
"phase1_hit_rules": list(record.phase1_hit_rules) if record is not None else [], "phase1_hit_rules": list(record.phase1_hit_rules) if record is not None else [],
"phase2_statement_hit_rules": (
list(record.phase2_statement_hit_rules) if record is not None else []
),
} }
if record is not None and record.staff_id_card: if record is not None and record.staff_id_card:
allowed_identity_cards = tuple([record.staff_id_card, *record.family_id_cards]) allowed_identity_cards = tuple([record.staff_id_card, *record.family_id_cards])

View File

@@ -197,3 +197,42 @@ def test_inner_flow_bank_statement_should_keep_same_rule_subset(client):
).json() ).json()
assert page1["data"]["bankStatementList"] == page2["data"]["bankStatementList"] assert page1["data"]["bankStatementList"] == page2["data"]["bankStatementList"]
def test_inner_flow_should_apply_phase2_baselines_before_get_bank_statement(client, monkeypatch):
from routers.api import file_service
applied = {}
def fake_apply(**kwargs):
applied["called"] = True
applied["baseline_rule_codes"] = kwargs["baseline_rule_codes"]
monkeypatch.setattr(file_service.phase2_baseline_service, "apply", fake_apply)
monkeypatch.setattr(
file_service,
"_build_rule_hit_plan",
lambda log_id: {
"large_transaction_hit_rules": [],
"phase1_hit_rules": [],
"phase2_statement_hit_rules": ["MONTHLY_FIXED_INCOME"],
"phase2_baseline_hit_rules": ["SUPPLIER_CONCENTRATION"],
},
)
response = client.post(
"/watson/api/project/getJZFileOrZjrcuFile",
data={
"groupId": 1001,
"customerNo": "phase2_customer",
"dataChannelCode": "channel_code",
"requestDateId": 20240101,
"dataStartDateId": 20240101,
"dataEndDateId": 20240131,
"uploadUserId": 902001,
},
)
assert response.status_code == 200
assert applied["called"] is True
assert applied["baseline_rule_codes"] == ["SUPPLIER_CONCENTRATION"]

View File

@@ -8,7 +8,14 @@ import io
from fastapi import BackgroundTasks from fastapi import BackgroundTasks
from fastapi.datastructures import UploadFile from fastapi.datastructures import UploadFile
from services.file_service import FileRecord, FileService from services.file_service import (
LARGE_TRANSACTION_RULE_CODES,
PHASE1_RULE_CODES,
PHASE2_BASELINE_RULE_CODES,
PHASE2_STATEMENT_RULE_CODES,
FileRecord,
FileService,
)
class FakeStaffIdentityRepository: class FakeStaffIdentityRepository:
@@ -185,6 +192,55 @@ def test_build_rule_hit_plan_should_be_deterministic_for_same_log_id():
assert 2 <= len(plan1["phase1_hit_rules"]) <= 4 assert 2 <= len(plan1["phase1_hit_rules"]) <= 4
def test_phase2_rule_hit_plan_should_be_deterministic_for_same_log_id():
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
plan1 = service._build_rule_hit_plan(10001)
plan2 = service._build_rule_hit_plan(10001)
assert plan1 == plan2
assert 2 <= len(plan1["phase2_statement_hit_rules"]) <= 4
assert 2 <= len(plan1["phase2_baseline_hit_rules"]) <= 4
def test_build_rule_hit_plan_should_return_all_compatible_rules_in_all_mode(monkeypatch):
monkeypatch.setattr("services.file_service.settings.RULE_HIT_MODE", "all")
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
plan = service._build_rule_hit_plan(10001)
assert plan["large_transaction_hit_rules"] == LARGE_TRANSACTION_RULE_CODES
assert plan["phase1_hit_rules"] == PHASE1_RULE_CODES
assert plan["phase2_statement_hit_rules"] == PHASE2_STATEMENT_RULE_CODES
assert plan["phase2_baseline_hit_rules"] == PHASE2_BASELINE_RULE_CODES
def test_build_rule_hit_plan_should_keep_subset_mode_as_default():
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
plan1 = service._build_rule_hit_plan(10001)
plan2 = service._build_rule_hit_plan(10001)
assert plan1 == plan2
assert 2 <= len(plan1["large_transaction_hit_rules"]) <= 4
def test_build_rule_hit_plan_should_drop_conflicting_rules_from_all_mode(monkeypatch):
monkeypatch.setattr("services.file_service.settings.RULE_HIT_MODE", "all")
monkeypatch.setattr(
"services.file_service.RULE_CONFLICT_GROUPS",
[["SALARY_QUICK_TRANSFER", "SALARY_UNUSED"]],
)
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
plan = service._build_rule_hit_plan(10001)
assert not (
"SALARY_QUICK_TRANSFER" in plan["phase2_statement_hit_rules"]
and "SALARY_UNUSED" in plan["phase2_statement_hit_rules"]
)
def test_fetch_inner_flow_should_persist_rule_hit_plan(monkeypatch): def test_fetch_inner_flow_should_persist_rule_hit_plan(monkeypatch):
service = FileService(staff_identity_repository=FakeStaffIdentityRepository()) service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
monkeypatch.setattr( monkeypatch.setattr(
@@ -218,3 +274,46 @@ def test_fetch_inner_flow_should_persist_rule_hit_plan(monkeypatch):
"GAMBLING_SENSITIVE_KEYWORD", "GAMBLING_SENSITIVE_KEYWORD",
"FOREX_BUY_AMT", "FOREX_BUY_AMT",
] ]
def test_fetch_inner_flow_should_persist_phase2_rule_hit_plan(monkeypatch):
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
monkeypatch.setattr(
service,
"_build_rule_hit_plan",
lambda log_id: {
"large_transaction_hit_rules": ["HOUSE_OR_CAR_EXPENSE", "TAX_EXPENSE"],
"phase1_hit_rules": ["GAMBLING_SENSITIVE_KEYWORD", "FOREX_BUY_AMT"],
"phase2_statement_hit_rules": [
"LOW_INCOME_RELATIVE_LARGE_TRANSACTION",
"SALARY_QUICK_TRANSFER",
],
"phase2_baseline_hit_rules": [
"HOUSE_REGISTRATION_MISMATCH",
"SUPPLIER_CONCENTRATION",
],
},
)
response = service.fetch_inner_flow(
{
"groupId": 1001,
"customerNo": "test_customer_001",
"dataChannelCode": "test_code",
"requestDateId": 20240101,
"dataStartDateId": 20240101,
"dataEndDateId": 20240131,
"uploadUserId": 902001,
}
)
log_id = response["data"][0]
record = service.file_records[log_id]
assert record.phase2_statement_hit_rules == [
"LOW_INCOME_RELATIVE_LARGE_TRANSACTION",
"SALARY_QUICK_TRANSFER",
]
assert record.phase2_baseline_hit_rules == [
"HOUSE_REGISTRATION_MISMATCH",
"SUPPLIER_CONCENTRATION",
]

View File

@@ -0,0 +1,101 @@
"""
第二期数据库基线服务测试
"""
from contextlib import nullcontext
from services.phase2_baseline_service import Phase2BaselineService
def test_build_sql_plan_should_return_idempotent_sql_plan_for_selected_phase2_baselines():
"""抽中第二期基线规则时,应生成幂等 SQL 计划。"""
service = Phase2BaselineService()
sql_plan = service.build_sql_plan(
staff_id_card="330101198801010011",
family_id_cards=["330101199001010022"],
baseline_rule_codes=[
"SUPPLIER_CONCENTRATION",
"HOUSE_REGISTRATION_MISMATCH",
],
)
assert any("LSFXMOCKP2PUR001" in sql for sql in sql_plan)
assert any("LSFX Mock P2 HOUSE" in sql for sql in sql_plan)
assert any("'房产'" in sql for sql in sql_plan)
assert any("'正常'" in sql for sql in sql_plan)
assert any(sql.strip().startswith("DELETE") for sql in sql_plan)
assert any(sql.strip().startswith("INSERT") for sql in sql_plan)
def test_build_sql_plan_should_skip_unselected_phase2_rules():
"""未选中的规则不应写入无关 SQL。"""
service = Phase2BaselineService()
sql_plan = service.build_sql_plan(
staff_id_card="330101198801010011",
family_id_cards=[],
baseline_rule_codes=["SUPPLIER_CONCENTRATION"],
)
assert any("LSFXMOCKP2PUR001" in sql for sql in sql_plan)
assert not any("LSFX Mock P2 HOUSE" in sql for sql in sql_plan)
assert not any("ccdi_asset_info" in sql for sql in sql_plan)
def test_build_sql_plan_should_use_staff_scope_for_family_asset_baselines():
"""亲属资产基线应保留员工归属与亲属实际持有人的双字段语义。"""
service = Phase2BaselineService()
sql_plan = service.build_sql_plan(
staff_id_card="330101198801010011",
family_id_cards=["330101199001010022"],
baseline_rule_codes=["PROPERTY_FEE_REGISTRATION_MISMATCH"],
)
assert any("'330101198801010011'" in sql for sql in sql_plan)
assert any("'330101199001010022'" in sql for sql in sql_plan)
assert not any("'REAL_ESTATE'" in sql for sql in sql_plan)
def test_apply_should_execute_generated_sql_plan(monkeypatch):
"""apply() 应执行生成出的 SQL 计划,而不是只返回字符串。"""
service = Phase2BaselineService()
executed_sql = []
committed = {"value": False}
class FakeCursor:
def execute(self, sql):
executed_sql.append(sql.strip())
class FakeConnection:
def __init__(self):
self.cursor_instance = FakeCursor()
def cursor(self):
return nullcontext(self.cursor_instance)
def commit(self):
committed["value"] = True
def rollback(self):
raise AssertionError("测试路径不应触发回滚")
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
monkeypatch.setattr(service, "_connect", lambda: FakeConnection())
result = service.apply(
staff_id_card="330101198801010011",
family_id_cards=["330101199001010022"],
baseline_rule_codes=["SUPPLIER_CONCENTRATION"],
)
assert result is None
assert committed["value"] is True
assert any("DELETE FROM ccdi_purchase_transaction" in sql for sql in executed_sql)
assert any("INSERT INTO ccdi_purchase_transaction" in sql for sql in executed_sql)

View File

@@ -0,0 +1,19 @@
import pytest
from main import parse_args as parse_main_args
from dev import parse_args as parse_dev_args
def test_main_parse_args_should_default_to_subset():
args = parse_main_args([])
assert args.rule_hit_mode == "subset"
def test_main_parse_args_should_accept_all_mode():
args = parse_main_args(["--rule-hit-mode", "all"])
assert args.rule_hit_mode == "all"
def test_dev_parse_args_should_reject_invalid_mode():
with pytest.raises(SystemExit):
parse_dev_args(["--rule-hit-mode", "invalid"])

View File

@@ -87,6 +87,54 @@ def test_build_seed_statements_for_rule_plan_should_generate_withdraw_cnt_sample
) >= 4 ) >= 4
def test_build_seed_statements_for_rule_plan_should_only_include_requested_phase2_rules():
plan = {
"large_transaction_hit_rules": [],
"phase1_hit_rules": [],
"phase2_statement_hit_rules": [
"MULTI_PARTY_GAMBLING_TRANSFER",
"SALARY_QUICK_TRANSFER",
],
"phase2_baseline_hit_rules": [],
}
statements = build_seed_statements_for_rule_plan(
group_id=1000,
log_id=30001,
rule_plan=plan,
)
assert any(item["userMemo"] == "工资入账" for item in statements)
assert any(item["customerName"] == "欢乐游戏科技有限公司" for item in statements)
assert not any(item["userMemo"] == "季度稳定兼职收入" for item in statements)
def test_salary_quick_transfer_and_salary_unused_should_use_different_identity_groups():
plan = {
"large_transaction_hit_rules": [],
"phase1_hit_rules": [],
"phase2_statement_hit_rules": [
"SALARY_QUICK_TRANSFER",
"SALARY_UNUSED",
],
"phase2_baseline_hit_rules": [],
}
statements = build_seed_statements_for_rule_plan(
group_id=1000,
log_id=30001,
rule_plan=plan,
)
salary_id_cards = {
item["cretNo"]
for item in statements
if item["userMemo"] == "工资入账"
}
assert len(salary_id_cards) >= 2
def test_large_transaction_seed_should_cover_all_eight_rules(): def test_large_transaction_seed_should_cover_all_eight_rules():
"""大额交易样本生成器必须覆盖 8 条已实现规则的关键口径。""" """大额交易样本生成器必须覆盖 8 条已实现规则的关键口径。"""
statements = build_large_transaction_seed_statements(group_id=1000, log_id=20001) statements = build_large_transaction_seed_statements(group_id=1000, log_id=20001)

View File

@@ -10,6 +10,7 @@ const source = fs.readFileSync(
[ [
"getOverviewDashboard", "getOverviewDashboard",
"getOverviewRiskPeople", "getOverviewRiskPeople",
"getOverviewRiskModelCards",
"loadOverviewData", "loadOverviewData",
"Promise.all", "Promise.all",
].forEach((token) => assert(source.includes(token), token)); ].forEach((token) => assert(source.includes(token), token));
@@ -17,3 +18,25 @@ const source = fs.readFileSync(
assert(!source.includes("getOverviewTopRiskPeople"), "页面不应再依赖TOP10接口"); assert(!source.includes("getOverviewTopRiskPeople"), "页面不应再依赖TOP10接口");
assert(!source.includes("topRiskPeopleRes"), "页面不应再处理TOP10响应"); assert(!source.includes("topRiskPeopleRes"), "页面不应再处理TOP10响应");
assert(!source.includes("topRiskPeopleData"), "页面不应再处理TOP10数据"); assert(!source.includes("topRiskPeopleData"), "页面不应再处理TOP10数据");
const loadOverviewDataBlock = source.match(
/const \[dashboardRes, riskPeopleRes, riskModelCardsRes\] = await Promise\.all\(\[([\s\S]*?)\]\);/m
);
assert(loadOverviewDataBlock, "入口页应继续并发请求仪表盘、风险人员和模型卡片");
[
"getOverviewDashboard(this.projectId)",
"getOverviewRiskPeople(this.projectId)",
"getOverviewRiskModelCards(this.projectId)",
].forEach((token) => assert(loadOverviewDataBlock[0].includes(token), token));
[
"createOverviewLoadedData",
"dashboardData",
"riskPeopleData",
"riskModelCardsData",
].forEach((token) => assert(source.includes(token), token));
["employeeResult", "resultTable", "topRiskList"].forEach((token) =>
assert(!source.includes(token), `入口页不应感知或回退到旧/新底层数据源字段:${token}`)
);

View File

@@ -20,6 +20,12 @@ const detail = fs.readFileSync(
["模型预警次数统计", "命中模型涉及人员", "员工姓名或工号", "异常标签"].forEach((token) => ["模型预警次数统计", "命中模型涉及人员", "员工姓名或工号", "异常标签"].forEach((token) =>
assert(model.includes(token), token) assert(model.includes(token), token)
); );
["<el-radio-button label=\"ANY\">任意触发</el-radio-button>", "<el-radio-button label=\"ALL\">同时触发</el-radio-button>"].forEach(
(token) => assert(model.includes(token), token)
);
["部门", "请选择部门", "查询", "重置", "selectedModelText"].forEach((token) =>
assert(model.includes(token), token)
);
["涉险交易明细", "异常账户人员信息", "查看详情"].forEach((token) => ["涉险交易明细", "异常账户人员信息", "查看详情"].forEach((token) =>
assert(detail.includes(token), token) assert(detail.includes(token), token)
); );

View File

@@ -14,12 +14,15 @@ const source = fs.readFileSync(
'return "全部模型"', 'return "全部模型"',
"this.selectedModelCodes = [...this.selectedModelCodes, modelCode]", "this.selectedModelCodes = [...this.selectedModelCodes, modelCode]",
"this.selectedModelCodes = this.selectedModelCodes.filter((item) => item !== modelCode)", "this.selectedModelCodes = this.selectedModelCodes.filter((item) => item !== modelCode)",
"buildPeopleParams()",
"modelCodes: this.selectedModelCodes", "modelCodes: this.selectedModelCodes",
"matchMode: this.matchMode", "matchMode: this.matchMode",
'this.matchMode = "ANY"', 'this.matchMode = "ANY"',
'this.selectedModelCodes = []',
'this.keyword = ""', 'this.keyword = ""',
"this.deptId = undefined", "this.deptId = undefined",
"this.pageNum = 1", "this.pageNum = 1",
"this.fetchPeopleList({ syncCardLoading: true })",
"keyword: this.keyword", "keyword: this.keyword",
"deptId: this.deptId", "deptId: this.deptId",
].forEach((token) => assert(source.includes(token), token)); ].forEach((token) => assert(source.includes(token), token));

View File

@@ -9,6 +9,8 @@ const source = fs.readFileSync(
[ [
"sectionData.overviewList", "sectionData.overviewList",
"normalizeOverviewRows",
"normalizeRiskPointTags",
"riskCount", "riskCount",
"riskPoint", "riskPoint",
"modelCount", "modelCount",
@@ -19,3 +21,5 @@ assert(!source.includes("sectionData.topRiskList"), "不应再绑定TOP10列表"
assert(!source.includes("scope.row.riskLevelType || 'danger'"), "riskLevelType fallback"); assert(!source.includes("scope.row.riskLevelType || 'danger'"), "riskLevelType fallback");
assert(source.includes('scope.row.actionLabel || "查看详情"'), "actionLabel fallback"); assert(source.includes('scope.row.actionLabel || "查看详情"'), "actionLabel fallback");
assert(source.includes("return [];"), "overviewList 缺省时应回落为空数组");
assert(source.includes(".split(/[、,;]/)"), "核心异常点字符串应支持拆分为标签");

View File

@@ -23,10 +23,26 @@ const people = fs.readFileSync(
), ),
"utf8" "utf8"
); );
const mockSource = fs.readFileSync(
path.resolve(
__dirname,
"../../src/views/ccdiProject/components/detail/preliminaryCheck.mock.js"
),
"utf8"
);
["风险仪表盘", "overview-stats"].forEach((token) => assert(stats.includes(token), token)); ["风险仪表盘", "overview-stats"].forEach((token) => assert(stats.includes(token), token));
["总人数", "高风险", "中风险", "低风险", "无风险人员"].forEach((token) =>
assert(mockSource.includes(`label: "${token}"`), token)
);
["currentData.summary", "currentData.riskPeople"].forEach((token) =>
assert(entry.includes(token), token)
);
["风险人员总览", "风险等级", "命中模型数", "查看详情"].forEach((token) => ["风险人员总览", "风险等级", "命中模型数", "查看详情"].forEach((token) =>
assert(people.includes(token), token) assert(people.includes(token), token)
); );
["姓名", "身份证号", "所属部门", "核心异常点", "riskPointTagList"].forEach((token) =>
assert(people.includes(token), token)
);
assert(!people.includes("中高风险人员TOP10"), "不应保留TOP10区块"); assert(!people.includes("中高风险人员TOP10"), "不应保留TOP10区块");
assert(entry.includes("risk-people-section"), "入口应挂载风险人员区"); assert(entry.includes("risk-people-section"), "入口应挂载风险人员区");

View File

@@ -10,11 +10,35 @@ const source = fs.readFileSync(
[ [
"getOverviewDashboard", "getOverviewDashboard",
"getOverviewRiskPeople", "getOverviewRiskPeople",
"getOverviewRiskModelCards",
"getOverviewRiskModelPeople",
"/ccdi/project/overview/dashboard", "/ccdi/project/overview/dashboard",
"/ccdi/project/overview/risk-people", "/ccdi/project/overview/risk-people",
"/ccdi/project/overview/risk-models/cards",
"/ccdi/project/overview/risk-models/people",
].forEach((token) => assert(source.includes(token), token)); ].forEach((token) => assert(source.includes(token), token));
[ [
"getOverviewTopRiskPeople", "getOverviewTopRiskPeople",
"/ccdi/project/overview/top-risk-people", "/ccdi/project/overview/top-risk-people",
].forEach((token) => assert(!source.includes(token), token)); ].forEach((token) => assert(!source.includes(token), token));
const riskModelPeopleFn = source.match(
/export function getOverviewRiskModelPeople\(params\) \{[\s\S]*?params:\s*\{([\s\S]*?)\}\s*\}\s*\)/m
);
assert(riskModelPeopleFn, "应保留模型人员接口参数透传逻辑");
[
"projectId: params.projectId",
"modelCodes: params.modelCodes",
"matchMode: params.matchMode",
"keyword: params.keyword",
"deptId: params.deptId",
"pageNum: params.pageNum",
"pageSize: params.pageSize",
].forEach((token) => assert(riskModelPeopleFn[0].includes(token), token));
["employeeResult", "resultTable", "overview/result"].forEach((token) =>
assert(!source.includes(token), `前端 API 契约不应感知结果表实现:${token}`)
);

View File

@@ -80,27 +80,27 @@ VALUES
('LARGE_TRANSACTION', '大额交易', 'FREQUENT_CASH_DEPOSIT', '短时间多次存现', 'FREQUENT_CASH_DEPOSIT', 'OBJECT', 'HIGH', '识别短时间多次现金存入对象', 1, 70, 'system', '初始化规则'), ('LARGE_TRANSACTION', '大额交易', 'FREQUENT_CASH_DEPOSIT', '短时间多次存现', 'FREQUENT_CASH_DEPOSIT', 'OBJECT', 'HIGH', '识别短时间多次现金存入对象', 1, 70, 'system', '初始化规则'),
('LARGE_TRANSACTION', '大额交易', 'LARGE_TRANSFER', '大额转账交易', 'FREQUENT_TRANSFER', 'STATEMENT', 'HIGH', '识别大额转账流水', 1, 80, 'system', '初始化规则'), ('LARGE_TRANSACTION', '大额交易', 'LARGE_TRANSFER', '大额转账交易', 'FREQUENT_TRANSFER', 'STATEMENT', 'HIGH', '识别大额转账流水', 1, 80, 'system', '初始化规则'),
('ABNORMAL_TRANSACTION', '异常交易', 'ABNORMAL_CUSTOMER_TRANSACTION', '与客户之间非正常资金往来', NULL, 'STATEMENT', 'HIGH', '员工及关系人与客户及关系人之间有超过1000元以上的资金往来客户指信贷类客户包括贷款户、担保人中介库人员包括中介注册的主体及主体关系人。', 1, 10, 'system', '占位规则待补充真实SQL'), ('ABNORMAL_TRANSACTION', '异常交易', 'ABNORMAL_CUSTOMER_TRANSACTION', '与客户之间非正常资金往来', NULL, 'STATEMENT', 'HIGH', '员工及关系人与客户及关系人之间有超过1000元以上的资金往来客户指信贷类客户包括贷款户、担保人中介库人员包括中介注册的主体及主体关系人。', 1, 10, 'system', '占位规则待补充真实SQL'),
('ABNORMAL_TRANSACTION', '异常交易', 'LOW_INCOME_RELATIVE_LARGE_TRANSACTION', '低收入亲属大额交易', NULL, 'OBJECT', 'GENERAL', '关系人中没有收入或月收入低于3000元的人员累计交易金额超过10万元。', 1, 20, 'system', '占位规则待补充真实SQL'), ('ABNORMAL_TRANSACTION', '异常交易', 'LOW_INCOME_RELATIVE_LARGE_TRANSACTION', '低收入亲属大额交易', NULL, 'OBJECT', 'GENERAL', '关系人中没有收入或月收入低于3000元的人员累计交易金额超过10万元。', 1, 20, 'system', '真实规则识别低收入关系人累计交易超10万元的员工对象'),
('SUSPICIOUS_GAMBLING', '疑似赌博', 'MULTI_PARTY_GAMBLING_TRANSFER', '疑似赌博交易', NULL, 'OBJECT', 'HIGH', '多人2人及以上、多次2次以上、相近时间同一天有转账、微信转账、支付宝转账发生且额度在可疑区间。金额区间可在排查设置页面进行设置', 1, 10, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_GAMBLING', '疑似赌博', 'MULTI_PARTY_GAMBLING_TRANSFER', '疑似赌博交易', NULL, 'OBJECT', 'HIGH', '多人2人及以上、多次2次以上、相近时间同一天有转账、微信转账、支付宝转账发生且额度在可疑区间。金额区间可在排查设置页面进行设置', 1, 10, 'system', '真实规则:识别同日多对手方且金额落在可疑区间的疑似赌博对象'),
('SUSPICIOUS_GAMBLING', '疑似赌博', 'GAMBLING_SENSITIVE_KEYWORD', '疑似敏感交易', NULL, 'STATEMENT', 'HIGH', '备注或交易摘要、对手有“游戏、抖币、体彩、福彩”等字眼。', 1, 20, 'system', '真实规则:识别摘要或对手方命中赌博敏感词的支出流水'), ('SUSPICIOUS_GAMBLING', '疑似赌博', 'GAMBLING_SENSITIVE_KEYWORD', '疑似敏感交易', NULL, 'STATEMENT', 'HIGH', '备注或交易摘要、对手有“游戏、抖币、体彩、福彩”等字眼。', 1, 20, 'system', '真实规则:识别摘要或对手方命中赌博敏感词的支出流水'),
('SUSPICIOUS_RELATION', '可疑关系', 'SPECIAL_AMOUNT_TRANSACTION', '特殊金额交易', NULL, 'STATEMENT', NULL, '除与配偶、子女外发生特殊金额交易如1314元、520元等具有特殊含义的金额。', 1, 10, 'system', '真实规则:识别与非配偶子女发生的特殊金额交易'), ('SUSPICIOUS_RELATION', '可疑关系', 'SPECIAL_AMOUNT_TRANSACTION', '特殊金额交易', NULL, 'STATEMENT', NULL, '除与配偶、子女外发生特殊金额交易如1314元、520元等具有特殊含义的金额。', 1, 10, 'system', '真实规则:识别与非配偶子女发生的特殊金额交易'),
('SUSPICIOUS_PART_TIME', '可疑兼职', 'MONTHLY_FIXED_INCOME', '疑似兼职', 'MONTHLY_FIXED_INCOME', 'OBJECT', NULL, '除本行工资收入外,每月有固定收入,固定收入金额自行设置。', 1, 10, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_PART_TIME', '可疑兼职', 'MONTHLY_FIXED_INCOME', '疑似兼职', 'MONTHLY_FIXED_INCOME', 'OBJECT', NULL, '除本行工资收入外,每月有固定收入,固定收入金额自行设置。', 1, 10, 'system', '真实规则识别近12个月持续出现稳定月度非工资收入的员工对象'),
('SUSPICIOUS_PART_TIME', '可疑兼职', 'FIXED_COUNTERPARTY_TRANSFER', '疑似兼职', 'FIXED_COUNTERPARTY_TRANSFER', 'OBJECT', NULL, '每季或每年从固定交易对手转入金额金额可设区间值如5000-10000。', 1, 20, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_PART_TIME', '可疑兼职', 'FIXED_COUNTERPARTY_TRANSFER', '疑似兼职', NULL, 'OBJECT', NULL, '每季或每年从固定交易对手转入金额金额可设区间值如5000-10000。', 1, 20, 'system', '真实规则:识别固定交易对手季度转入金额落在设定区间的员工对象'),
('SUSPICIOUS_PART_TIME', '可疑兼职', 'SUSPICIOUS_INCOME_KEYWORD', '疑似兼职', NULL, 'STATEMENT', 'HIGH', '转入资金摘要有“工资”、“分红”、“红利”、“利息(非银行结息)”等收入', 1, 30, 'system', '真实规则:识别非本行工资代发的收入关键词转入流水'), ('SUSPICIOUS_PART_TIME', '可疑兼职', 'SUSPICIOUS_INCOME_KEYWORD', '疑似兼职', NULL, 'STATEMENT', 'HIGH', '转入资金摘要有“工资”、“分红”、“红利”、“利息(非银行结息)”等收入', 1, 30, 'system', '真实规则:识别非本行工资代发的收入关键词转入流水'),
('SUSPICIOUS_PROPERTY', '可疑财产', 'HOUSE_REGISTRATION_MISMATCH', '购房交易与房产登记不匹配', NULL, 'STATEMENT', NULL, '员工及关系人有购房交易,但名下房产无新增登记;有新增登记购房,但无相关购房交易记录。', 1, 10, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_PROPERTY', '可疑财产', 'HOUSE_REGISTRATION_MISMATCH', '购房交易与房产登记不匹配', NULL, 'STATEMENT', NULL, '员工及关系人有购房交易,但名下房产无新增登记;有新增登记购房,但无相关购房交易记录。', 1, 10, 'system', '真实规则:识别购房支出但当前房产登记口径缺失的流水'),
('SUSPICIOUS_PROPERTY', '可疑财产', 'PROPERTY_FEE_REGISTRATION_MISMATCH', '物业缴费与房产登记不匹配', NULL, 'STATEMENT', NULL, '员工及关系人有物业缴费记录,但名下房产无新增登记。', 1, 20, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_PROPERTY', '可疑财产', 'PROPERTY_FEE_REGISTRATION_MISMATCH', '物业缴费与房产登记不匹配', NULL, 'STATEMENT', NULL, '员工及关系人有物业缴费记录,但名下房产无新增登记。', 1, 20, 'system', '真实规则:识别物业缴费但当前房产登记口径缺失的流水'),
('SUSPICIOUS_PROPERTY', '可疑财产', 'TAX_ASSET_REGISTRATION_MISMATCH', '大额纳税与资产登记不匹配', NULL, 'STATEMENT', NULL, '员工及关系人有5000元以上的纳税记录名下无房产车产新增登记。', 1, 30, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_PROPERTY', '可疑财产', 'TAX_ASSET_REGISTRATION_MISMATCH', '大额纳税与资产登记不匹配', NULL, 'STATEMENT', NULL, '员工及关系人有5000元以上的纳税记录当前资产登记口径下无房产登记。', 1, 30, 'system', '真实规则:识别大额纳税但当前房产登记口径缺失的流水'),
('SUSPICIOUS_PROPERTY', '可疑财产', 'INCOME_ASSET_MISMATCH', '收入资产不符', NULL, 'STATEMENT', 'HIGH', '豪华房产价值超家庭年收入10倍', 1, 40, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_PROPERTY', '可疑财产', 'INCOME_ASSET_MISMATCH', '收入资产不符', NULL, 'STATEMENT', 'HIGH', '豪华房产价值超家庭年收入10倍', 1, 40, 'system', '占位规则待补充真实SQL'),
('SUSPICIOUS_FOREIGN_EXCHANGE', '可疑外汇交易', 'FOREX_BUY_AMT', '可疑外汇交易', 'SINGLE_PURCHASE_AMOUNT', 'STATEMENT', NULL, '单笔购汇金额超限', 1, 10, 'system', '真实规则:识别单笔购汇金额超过阈值的流水'), ('SUSPICIOUS_FOREIGN_EXCHANGE', '可疑外汇交易', 'FOREX_BUY_AMT', '可疑外汇交易', 'SINGLE_PURCHASE_AMOUNT', 'STATEMENT', NULL, '单笔购汇金额超限', 1, 10, 'system', '真实规则:识别单笔购汇金额超过阈值的流水'),
('SUSPICIOUS_FOREIGN_EXCHANGE', '可疑外汇交易', 'FOREX_SELL_AMT', '可疑外汇交易', 'SINGLE_SETTLEMENT_AMOUNT', 'STATEMENT', NULL, '单笔结汇金额超限', 1, 20, 'system', '真实规则:识别单笔结汇金额超过阈值的流水'), ('SUSPICIOUS_FOREIGN_EXCHANGE', '可疑外汇交易', 'FOREX_SELL_AMT', '可疑外汇交易', 'SINGLE_SETTLEMENT_AMOUNT', 'STATEMENT', NULL, '单笔结汇金额超限', 1, 20, 'system', '真实规则:识别单笔结汇金额超过阈值的流水'),
('SUSPICIOUS_FOREIGN_EXCHANGE', '可疑外汇交易', 'CROSS_BORDER_AMT', '可疑外汇交易', 'CROSS_BORDER_AMT', 'STATEMENT', NULL, '单笔跨境汇款金额超限', 1, 30, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_FOREIGN_EXCHANGE', '可疑外汇交易', 'CROSS_BORDER_AMT', '可疑外汇交易', 'CROSS_BORDER_AMT', 'STATEMENT', NULL, '单笔跨境汇款金额超限', 1, 30, 'system', '占位规则待补充真实SQL'),
('SUSPICIOUS_INTEREST_PAYMENT', '可疑付息', 'INTEREST_PAYMENT_BY_OTHERS', '可疑付息', NULL, 'OBJECT', 'HIGH', '客户经理管户的客户在智柜、柜面连续代交利息且代交人数超过2人。', 1, 10, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_INTEREST_PAYMENT', '可疑付息', 'INTEREST_PAYMENT_BY_OTHERS', '可疑付息', NULL, 'OBJECT', 'HIGH', '客户经理管户的客户在智柜、柜面连续代交利息且代交人数超过2人。', 1, 10, 'system', '占位规则待补充真实SQL'),
('SUSPICIOUS_PURCHASE', '可疑采购', 'LARGE_PURCHASE_TRANSACTION', '可疑采购', NULL, 'STATEMENT', NULL, '单笔采购金额超过10万元。', 1, 10, 'system', '真实规则识别单笔采购金额超过10万元的采购事项'), ('SUSPICIOUS_PURCHASE', '可疑采购', 'LARGE_PURCHASE_TRANSACTION', '可疑采购', NULL, 'STATEMENT', NULL, '单笔采购金额超过10万元。', 1, 10, 'system', '真实规则识别单笔采购金额超过10万元的采购事项'),
('SUSPICIOUS_PURCHASE', '可疑采购', 'SUPPLIER_CONCENTRATION', '可疑采购', NULL, 'OBJECT', NULL, '单个供应商采购额占总采购额比例超过70%。', 1, 20, 'system', '占位规则待补充真实SQL'), ('SUSPICIOUS_PURCHASE', '可疑采购', 'SUPPLIER_CONCENTRATION', '可疑采购', NULL, 'OBJECT', NULL, '单个供应商采购额占总采购额比例超过70%。', 1, 20, 'system', '真实规则识别单个供应商采购额占比超过70%的员工对象'),
('ABNORMAL_BEHAVIOR', '异常行为', 'STOCK_TFR_LARGE', '可疑银证大额转账', 'STOCK_TFR_LARGE', 'STATEMENT', NULL, '家庭老人/非关系人银证大额转账', 1, 10, 'system', '真实规则:识别银证转账金额超过阈值的流水'), ('ABNORMAL_BEHAVIOR', '异常行为', 'STOCK_TFR_LARGE', '可疑银证大额转账', 'STOCK_TFR_LARGE', 'STATEMENT', NULL, '家庭老人/非关系人银证大额转账', 1, 10, 'system', '真实规则:识别银证转账金额超过阈值的流水'),
('ABNORMAL_BEHAVIOR', '异常行为', 'WITHDRAW_CNT', '微信支付宝频繁提现', 'WITHDRAW_CNT', 'OBJECT', NULL, '微信、支付宝单日提现次数超过设置次数', 1, 20, 'system', '真实规则:识别微信支付宝单日提现次数超过阈值的对象'), ('ABNORMAL_BEHAVIOR', '异常行为', 'WITHDRAW_CNT', '微信支付宝频繁提现', 'WITHDRAW_CNT', 'OBJECT', NULL, '微信、支付宝单日提现次数超过设置次数', 1, 20, 'system', '真实规则:识别微信支付宝单日提现次数超过阈值的对象'),
('ABNORMAL_BEHAVIOR', '异常行为', 'WITHDRAW_AMT', '微信支付宝提现超额', 'WITHDRAW_AMT', 'OBJECT', NULL, '微信、支付宝单日累计提现金额超过限额', 1, 30, 'system', '占位规则待补充真实SQL'), ('ABNORMAL_BEHAVIOR', '异常行为', 'WITHDRAW_AMT', '微信支付宝提现超额', 'WITHDRAW_AMT', 'OBJECT', NULL, '微信、支付宝单日累计提现金额超过限额', 1, 30, 'system', '占位规则待补充真实SQL'),
('ABNORMAL_BEHAVIOR', '异常行为', 'SALARY_QUICK_TRANSFER', '工资快速转出', NULL, 'OBJECT', NULL, '工资发放后24小时内转出超过80%的资金', 1, 40, 'system', '占位规则待补充真实SQL'), ('ABNORMAL_BEHAVIOR', '异常行为', 'SALARY_QUICK_TRANSFER', '工资快速转出', NULL, 'OBJECT', NULL, '工资发放后24小时内转出超过80%的资金', 1, 40, 'system', '真实规则识别工资入账24小时内快速转出的员工对象'),
('ABNORMAL_BEHAVIOR', '异常行为', 'SALARY_UNUSED', '工资无使用记录', NULL, 'OBJECT', NULL, '工资发放后除代扣项目外连续30天犖奕魏蜗鸦蜃思锹肌', 1, 50, 'system', '占位规则待补充真实SQL'), ('ABNORMAL_BEHAVIOR', '异常行为', 'SALARY_UNUSED', '工资无使用记录', NULL, 'OBJECT', NULL, '工资发放后除代扣项目外连续30天无消费或转账支出记录。', 1, 50, 'system', '真实规则识别工资入账后30天内无消费或转账支出的员工对象'),
('ABNORMAL_BEHAVIOR', '异常行为', 'LARGE_STOCK_TRADING', '大额炒股', 'STOCK_TFR_LARGE', 'STATEMENT', 'HIGH', '单次三方资管交易金额超过100万元。', 1, 60, 'system', '真实规则:识别单笔三方资管交易金额超过阈值的流水'), ('ABNORMAL_BEHAVIOR', '异常行为', 'LARGE_STOCK_TRADING', '大额炒股', 'STOCK_TFR_LARGE', 'STATEMENT', 'HIGH', '单次三方资管交易金额超过100万元。', 1, 60, 'system', '真实规则:识别单笔三方资管交易金额超过阈值的流水'),
('ABNORMAL_BEHAVIOR', '异常行为', 'PROXY_ACCOUNT_OPERATION', '疑似代理他人账户', NULL, 'OBJECT', NULL, NULL, 1, 70, 'system', '占位规则待补充真实SQL'); ('ABNORMAL_BEHAVIOR', '异常行为', 'PROXY_ACCOUNT_OPERATION', '疑似代理他人账户', NULL, 'OBJECT', NULL, NULL, 1, 70, 'system', '占位规则待补充真实SQL');

View File

@@ -0,0 +1,27 @@
create table if not exists `ccdi_project_overview_employee_result` (
`id` bigint not null auto_increment comment '主键ID',
`project_id` bigint not null comment '项目ID',
`staff_id_card` varchar(18) not null comment '员工身份证号',
`staff_code` varchar(64) default null comment '员工工号',
`staff_name` varchar(64) default null comment '员工姓名',
`dept_id` bigint default null comment '部门ID',
`dept_name` varchar(128) default null comment '部门名称',
`rule_count` int not null default 0 comment '命中规则数',
`model_count` int not null default 0 comment '命中模型数',
`hit_count` int not null default 0 comment '命中次数',
`risk_level_code` varchar(32) not null comment '风险等级编码',
`risk_point` varchar(1000) default null comment '风险点',
`model_codes_csv` varchar(1000) default null comment '命中模型编码CSV',
`model_names_json` json default null comment '命中模型名称快照',
`hit_rules_json` json default null comment '命中规则快照',
`model_hit_summary_json` json default null comment '模型命中汇总快照',
`create_by` varchar(64) default null comment '创建者',
`create_time` datetime default current_timestamp comment '创建时间',
`update_by` varchar(64) default null comment '更新者',
`update_time` datetime default current_timestamp on update current_timestamp comment '更新时间',
`remark` varchar(500) default null comment '备注',
primary key (`id`),
unique key `uk_ccdi_project_overview_employee_result` (`project_id`, `staff_id_card`),
key `idx_ccdi_project_overview_employee_result_risk_level` (`project_id`, `risk_level_code`),
key `idx_ccdi_project_overview_employee_result_dept` (`project_id`, `dept_id`)
) engine=innodb default charset=utf8mb4 comment='结果总览员工结果表';

View File

@@ -0,0 +1,191 @@
START TRANSACTION;
DELETE FROM ccdi_purchase_transaction
WHERE purchase_id = 'LSFXMOCKP2PUR001';
INSERT INTO ccdi_purchase_transaction (
purchase_id,
purchase_category,
project_name,
subject_name,
subject_desc,
purchase_qty,
budget_amount,
bid_amount,
actual_amount,
contract_amount,
settlement_amount,
purchase_method,
supplier_name,
contact_person,
contact_phone,
supplier_uscc,
supplier_bank_account,
apply_date,
plan_approve_date,
announce_date,
bid_open_date,
contract_sign_date,
expected_delivery_date,
actual_delivery_date,
acceptance_date,
settlement_date,
applicant_id,
applicant_name,
apply_department,
purchase_leader_id,
purchase_leader_name,
purchase_department,
created_by,
updated_by
)
SELECT
'LSFXMOCKP2PUR001',
'设备采购',
'LSFX Mock P2',
'可疑采购基线',
'用于命中 SUPPLIER_CONCENTRATION 真实规则',
1,
188000.00,
186000.00,
186000.00,
186000.00,
186000.00,
'竞争性谈判',
'兰溪市联调供应链有限公司',
'联调联系人',
'13800000000',
'91330781P2PUR00011',
'6222000000001234',
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CURRENT_DATE,
CAST(s.staff_id AS CHAR),
s.name,
'纪检初核部',
NULL,
NULL,
NULL,
'admin',
'admin'
FROM ccdi_base_staff s
WHERE COALESCE(TRIM(s.id_card), '') <> ''
AND COALESCE(TRIM(CAST(s.staff_id AS CHAR)), '') <> ''
AND COALESCE(TRIM(s.name), '') <> ''
LIMIT 1;
DELETE FROM ccdi_asset_info
WHERE asset_name = 'LSFX Mock P2 HOUSE_REGISTRATION_MISMATCH';
INSERT INTO ccdi_asset_info (
family_id,
person_id,
asset_main_type,
asset_sub_type,
asset_name,
ownership_ratio,
purchase_eval_date,
original_value,
current_value,
valuation_date,
asset_status,
remarks,
create_by,
update_by
)
VALUES (
'330101198801010011',
'330101198801010011',
'房产',
'商铺',
'LSFX Mock P2 HOUSE_REGISTRATION_MISMATCH',
100.00,
CURRENT_DATE,
1880000.00,
1880000.00,
CURRENT_DATE,
'正常',
'用于命中 HOUSE_REGISTRATION_MISMATCH 真实规则的第二期基线',
'admin',
'admin'
);
DELETE FROM ccdi_asset_info
WHERE asset_name = 'LSFX Mock P2 PROPERTY_FEE_REGISTRATION_MISMATCH';
INSERT INTO ccdi_asset_info (
family_id,
person_id,
asset_main_type,
asset_sub_type,
asset_name,
ownership_ratio,
purchase_eval_date,
original_value,
current_value,
valuation_date,
asset_status,
remarks,
create_by,
update_by
)
VALUES (
'330101198801010011',
'330101199001010022',
'房产',
'商铺',
'LSFX Mock P2 PROPERTY_FEE_REGISTRATION_MISMATCH',
100.00,
CURRENT_DATE,
1880000.00,
1880000.00,
CURRENT_DATE,
'正常',
'用于命中 PROPERTY_FEE_REGISTRATION_MISMATCH 真实规则的第二期基线',
'admin',
'admin'
);
DELETE FROM ccdi_asset_info
WHERE asset_name = 'LSFX Mock P2 TAX_ASSET_REGISTRATION_MISMATCH';
INSERT INTO ccdi_asset_info (
family_id,
person_id,
asset_main_type,
asset_sub_type,
asset_name,
ownership_ratio,
purchase_eval_date,
original_value,
current_value,
valuation_date,
asset_status,
remarks,
create_by,
update_by
)
VALUES (
'330101198801010011',
'330101199202020044',
'房产',
'商铺',
'LSFX Mock P2 TAX_ASSET_REGISTRATION_MISMATCH',
100.00,
CURRENT_DATE,
1880000.00,
1880000.00,
CURRENT_DATE,
'正常',
'用于命中 TAX_ASSET_REGISTRATION_MISMATCH 真实规则的第二期基线',
'admin',
'admin'
);
COMMIT;

View File

@@ -0,0 +1,41 @@
START TRANSACTION;
INSERT INTO ccdi_bank_tag_rule (
model_code,
model_name,
rule_code,
rule_name,
indicator_code,
result_type,
risk_level,
business_caliber,
enabled,
sort_order,
create_by,
remark
) VALUES
('ABNORMAL_TRANSACTION', '异常交易', 'LOW_INCOME_RELATIVE_LARGE_TRANSACTION', '低收入亲属大额交易', NULL, 'OBJECT', 'GENERAL', '关系人中没有收入或月收入低于3000元的人员累计交易金额超过10万元。', 1, 20, 'system', '真实规则识别低收入关系人累计交易超10万元的员工对象'),
('SUSPICIOUS_GAMBLING', '疑似赌博', 'MULTI_PARTY_GAMBLING_TRANSFER', '疑似赌博交易', NULL, 'OBJECT', 'HIGH', '多人2人及以上、多次2次以上、相近时间同一天有转账、微信转账、支付宝转账发生且额度在可疑区间。金额区间可在排查设置页面进行设置', 1, 10, 'system', '真实规则:识别同日多对手方且金额落在可疑区间的疑似赌博对象'),
('SUSPICIOUS_PART_TIME', '可疑兼职', 'MONTHLY_FIXED_INCOME', '疑似兼职', 'MONTHLY_FIXED_INCOME', 'OBJECT', NULL, '除本行工资收入外,每月有固定收入,固定收入金额自行设置。', 1, 10, 'system', '真实规则识别近12个月持续出现稳定月度非工资收入的员工对象'),
('SUSPICIOUS_PART_TIME', '可疑兼职', 'FIXED_COUNTERPARTY_TRANSFER', '疑似兼职', NULL, 'OBJECT', NULL, '每季或每年从固定交易对手转入金额金额可设区间值如5000-10000。', 1, 20, 'system', '真实规则:识别固定交易对手季度转入金额落在设定区间的员工对象'),
('SUSPICIOUS_PROPERTY', '可疑财产', 'HOUSE_REGISTRATION_MISMATCH', '购房交易与房产登记不匹配', NULL, 'STATEMENT', NULL, '员工及关系人有购房交易,但名下房产无新增登记;有新增登记购房,但无相关购房交易记录。', 1, 10, 'system', '真实规则:识别购房支出但当前房产登记口径缺失的流水'),
('SUSPICIOUS_PROPERTY', '可疑财产', 'PROPERTY_FEE_REGISTRATION_MISMATCH', '物业缴费与房产登记不匹配', NULL, 'STATEMENT', NULL, '员工及关系人有物业缴费记录,但名下房产无新增登记。', 1, 20, 'system', '真实规则:识别物业缴费但当前房产登记口径缺失的流水'),
('SUSPICIOUS_PROPERTY', '可疑财产', 'TAX_ASSET_REGISTRATION_MISMATCH', '大额纳税与资产登记不匹配', NULL, 'STATEMENT', NULL, '员工及关系人有5000元以上的纳税记录但当前资产登记口径下无房产登记。', 1, 30, 'system', '真实规则:识别大额纳税但当前房产登记口径缺失的流水'),
('SUSPICIOUS_PURCHASE', '可疑采购', 'SUPPLIER_CONCENTRATION', '可疑采购', NULL, 'OBJECT', NULL, '单个供应商采购额占总采购额比例超过70%。', 1, 20, 'system', '真实规则识别单个供应商采购额占比超过70%的员工对象'),
('ABNORMAL_BEHAVIOR', '异常行为', 'SALARY_QUICK_TRANSFER', '工资快速转出', NULL, 'OBJECT', NULL, '工资发放后24小时内转出超过80%的资金。', 1, 40, 'system', '真实规则识别工资入账24小时内快速转出的员工对象'),
('ABNORMAL_BEHAVIOR', '异常行为', 'SALARY_UNUSED', '工资无使用记录', NULL, 'OBJECT', NULL, '工资发放后除代扣项目外连续30天无消费或转账支出记录。', 1, 50, 'system', '真实规则识别工资入账后30天内无消费或转账支出的员工对象')
ON DUPLICATE KEY UPDATE
model_code = VALUES(model_code),
model_name = VALUES(model_name),
rule_name = VALUES(rule_name),
indicator_code = VALUES(indicator_code),
result_type = VALUES(result_type),
risk_level = VALUES(risk_level),
business_caliber = VALUES(business_caliber),
enabled = VALUES(enabled),
sort_order = VALUES(sort_order),
update_by = 'system',
update_time = NOW(),
remark = VALUES(remark);
COMMIT;