修复流水详情原始文件关联与Mock随机logId
This commit is contained in:
@@ -328,7 +328,15 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
|
||||
fur.file_name AS originalFileName,
|
||||
fur.upload_time AS uploadTime
|
||||
FROM ccdi_bank_statement bs
|
||||
LEFT JOIN ccdi_file_upload_record fur ON fur.log_id = bs.batch_id AND fur.project_id = bs.project_id
|
||||
LEFT JOIN (
|
||||
SELECT latest_record.project_id, latest_record.log_id, latest_record.file_name, latest_record.upload_time
|
||||
FROM ccdi_file_upload_record latest_record
|
||||
INNER JOIN (
|
||||
SELECT project_id, log_id, MAX(id) AS max_id
|
||||
FROM ccdi_file_upload_record
|
||||
GROUP BY project_id, log_id
|
||||
) latest_meta ON latest_meta.max_id = latest_record.id
|
||||
) fur ON fur.log_id = bs.batch_id AND fur.project_id = bs.project_id
|
||||
WHERE bs.bank_statement_id = #{bankStatementId}
|
||||
</select>
|
||||
|
||||
|
||||
@@ -121,7 +121,10 @@ class CcdiBankStatementMapperXmlTest {
|
||||
String xml = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
|
||||
|
||||
assertTrue(
|
||||
xml.contains("LEFT JOIN ccdi_file_upload_record fur ON fur.log_id = bs.batch_id AND fur.project_id = bs.project_id"),
|
||||
xml.contains("LEFT JOIN (")
|
||||
&& xml.contains("SELECT latest_record.project_id, latest_record.log_id, latest_record.file_name, latest_record.upload_time")
|
||||
&& xml.contains("MAX(id) AS max_id")
|
||||
&& xml.contains("fur.log_id = bs.batch_id AND fur.project_id = bs.project_id"),
|
||||
xml
|
||||
);
|
||||
assertTrue(xml.contains("fur.file_name AS originalFileName"), xml);
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
package com.ruoyi.ccdi.project.sql;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertAll;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class CcdiBankTagRuleSqlMetadataTest {
|
||||
|
||||
@Test
|
||||
void phase1MetadataSql_shouldAlignInitAndMigrationScripts() throws IOException {
|
||||
String initSql = readProjectFile("sql", "2026-03-16-bank-tagging.sql");
|
||||
String migrationSql = readProjectFile("sql", "migration", "2026-03-20-sync-bank-tag-phase1-rule-metadata.sql");
|
||||
|
||||
assertPhase1Metadata(initSql);
|
||||
assertPhase1Metadata(migrationSql);
|
||||
}
|
||||
|
||||
private void assertPhase1Metadata(String sqlContent) {
|
||||
assertAll(
|
||||
() -> assertTrue(sqlContent.contains("'FOREX_BUY_AMT'")
|
||||
&& sqlContent.contains("'SINGLE_PURCHASE_AMOUNT'"),
|
||||
"FOREX_BUY_AMT 应使用 SINGLE_PURCHASE_AMOUNT"),
|
||||
() -> assertTrue(sqlContent.contains("'FOREX_SELL_AMT'")
|
||||
&& sqlContent.contains("'SINGLE_SETTLEMENT_AMOUNT'"),
|
||||
"FOREX_SELL_AMT 应使用 SINGLE_SETTLEMENT_AMOUNT"),
|
||||
() -> assertTrue(sqlContent.contains("'LARGE_STOCK_TRADING'")
|
||||
&& sqlContent.contains("'STOCK_TFR_LARGE'"),
|
||||
"LARGE_STOCK_TRADING 应使用 STOCK_TFR_LARGE"),
|
||||
() -> assertTrue(sqlContent.contains("真实规则:识别单笔购汇金额超过阈值的流水"),
|
||||
"应同步 FOREX_BUY_AMT 的真实规则说明"),
|
||||
() -> assertTrue(sqlContent.contains("真实规则:识别单笔结汇金额超过阈值的流水"),
|
||||
"应同步 FOREX_SELL_AMT 的真实规则说明"),
|
||||
() -> assertTrue(sqlContent.contains("真实规则:识别单笔三方资管交易金额超过阈值的流水"),
|
||||
"应同步 LARGE_STOCK_TRADING 的真实规则说明")
|
||||
);
|
||||
}
|
||||
|
||||
private String readProjectFile(String... parts) throws IOException {
|
||||
Path path = Path.of("..", parts);
|
||||
return Files.readString(path, StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
@@ -38,3 +38,18 @@
|
||||
## 执行说明
|
||||
- 验证过程中若任一层失败,立即停在对应层记录证据,不继续给出“验证通过”结论。
|
||||
- 本次执行基于当前本地开发环境,不额外引入修复或扩展范围。
|
||||
|
||||
## 当前进展
|
||||
- 2026-03-20 15:21:54 CST 完成阶段 1:已对齐验证范围、读取来源实施记录、选定 `project_id=47`,并创建实施记录与验证记录骨架。
|
||||
- 2026-03-20 15:21:54 CST 完成阶段 2:`lsfx-mock-server` 聚焦回归与全量回归全部通过,确认规则命中计划、样本装配、缓存稳定性与集成链路未回退。
|
||||
- 2026-03-20 15:23:10 CST 完成阶段 3:`ccdi-project` 第一期真实规则目标测试全部 `BUILD SUCCESS`,规则映射、真实 SQL、规则分发与风险人数刷新链路保持通过。
|
||||
- 2026-03-20 15:24 左右执行阶段 4:采购基线脚本成功重跑,`LSFXMOCKPUR001` 基线记录存在且金额满足门槛;但第一期规则元数据查询发现 `indicator_code` 与既有实施记录不一致,判定为“数据基线异常”,按计划停在数据库核验层,不继续执行接口端到端验证。
|
||||
- 2026-03-20 15:41:06 CST 完成问题修复与复验:
|
||||
- 已新增第一期规则元数据 SQL 校验测试与增量修复脚本。
|
||||
- 已将修复脚本落库,确认 `FOREX_BUY_AMT`、`FOREX_SELL_AMT`、`LARGE_STOCK_TRADING` 的 `indicator_code` 与 9 条一期真实规则 `remark` 均已对齐。
|
||||
- 已完成项目 `47` 的拉取本行信息、手动重算、任务轮询、命中结果查询与流水详情接口复验。
|
||||
- Mock 与后端验证进程均已关闭。
|
||||
- 2026-03-20 16:01 左右完成补充复验:
|
||||
- 重新启动 Mock 与后端服务,复跑项目 `47` 的登录、拉取本行信息、手动重算、任务轮询与详情接口链路。
|
||||
- 自动任务 `id=39` 与手动任务 `id=40` 均执行成功,`hit_count=3636`,`success_rule_count=33`,`failed_rule_count=0`。
|
||||
- 针对之前出现 `selectOne()` 重复结果异常的样例 `bank_statement_id=67279`,详情接口已返回 `code=200`,并正确带出 `GAMBLING_SENSITIVE_KEYWORD` 命中标签与原始文件名。
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
# 第一期银行流水规则元数据修复实施记录
|
||||
|
||||
## 问题背景
|
||||
- 2026-03-20 新增模型打标完整验证在数据库核验阶段发现:
|
||||
- `FOREX_BUY_AMT.indicator_code` 仍为 `FOREX_BUY_AMT`
|
||||
- `FOREX_SELL_AMT.indicator_code` 仍为 `FOREX_SELL_AMT`
|
||||
- `LARGE_STOCK_TRADING.indicator_code` 为 `NULL`
|
||||
- 同时,第一期已落地真实规则的 `remark` 仍停留在“占位规则,待补充真实SQL”。
|
||||
|
||||
## 根因分析
|
||||
- 主初始化脚本 [`sql/2026-03-16-bank-tagging.sql`](/Users/wkc/Desktop/ccdi/ccdi/sql/2026-03-16-bank-tagging.sql) 已包含第一期真实规则的正确元数据。
|
||||
- 老增量脚本 [`sql/migration/2026-03-18-sync-bank-tag-uppercase-and-rules.sql`](/Users/wkc/Desktop/ccdi/ccdi/sql/migration/2026-03-18-sync-bank-tag-uppercase-and-rules.sql) 仍写入旧的占位元数据。
|
||||
- 已执行过 2026-03-18 增量脚本、但未补后续迁移的环境,会停留在旧的 `indicator_code` 与 `remark` 状态。
|
||||
|
||||
## 本次修改
|
||||
- 新增 SQL 资产校验测试 [`ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiBankTagRuleSqlMetadataTest.java`](/Users/wkc/Desktop/ccdi/ccdi/ccdi-project/src/test/java/com/ruoyi/ccdi/project/sql/CcdiBankTagRuleSqlMetadataTest.java)
|
||||
- 先以缺失迁移脚本的红灯方式固定问题。
|
||||
- 约束初始化脚本与增量脚本必须同时对齐:
|
||||
- `FOREX_BUY_AMT -> SINGLE_PURCHASE_AMOUNT`
|
||||
- `FOREX_SELL_AMT -> SINGLE_SETTLEMENT_AMOUNT`
|
||||
- `LARGE_STOCK_TRADING -> STOCK_TFR_LARGE`
|
||||
- 三条规则真实说明文案保持一致。
|
||||
- 新增增量脚本 [`sql/migration/2026-03-20-sync-bank-tag-phase1-rule-metadata.sql`](/Users/wkc/Desktop/ccdi/ccdi/sql/migration/2026-03-20-sync-bank-tag-phase1-rule-metadata.sql)
|
||||
- 使用 `INSERT ... ON DUPLICATE KEY UPDATE` 同步第一期 9 条真实规则元数据。
|
||||
- 修复三条规则的 `indicator_code`。
|
||||
- 同步 9 条规则的真实规则 `remark`。
|
||||
- 将增量脚本通过 `bin/mysql_utf8_exec.sh` 落到当前验证数据库。
|
||||
|
||||
## 实施结果
|
||||
- 规则元数据已对齐到第一期真实规则状态。
|
||||
- 新增 SQL 校验测试可在仓库层拦住“只改初始化脚本、遗漏增量脚本”的回归。
|
||||
- 修复后重新完成接口链路复验,项目 `47` 的自动拉取、手动重算、命中结果查询与详情接口均已通过。
|
||||
@@ -0,0 +1,28 @@
|
||||
# Mock 服务随机 logId 实施记录
|
||||
|
||||
## 问题背景
|
||||
- 2026-03-20 联调过程中,`lsfx-mock-server` 的 `logId` 仍使用进程内递增方式分配。
|
||||
- 仓库文档与接口预期要求 Mock 返回随机 `logId`,避免联调时对顺序值形成隐式依赖。
|
||||
|
||||
## 根因分析
|
||||
- [`lsfx-mock-server/services/file_service.py`](/Users/wkc/Desktop/ccdi/ccdi/lsfx-mock-server/services/file_service.py) 中,`upload_file()` 与 `fetch_inner_flow()` 都直接通过 `self.log_counter += 1` 生成 `logId`。
|
||||
- 现有测试只覆盖了 `logId` 落在 `10000-99999` 区间内,没有约束“冲突时需要重试并避让已有记录”。
|
||||
|
||||
## 本次修改
|
||||
- 在 [`lsfx-mock-server/tests/test_file_service.py`](/Users/wkc/Desktop/ccdi/ccdi/lsfx-mock-server/tests/test_file_service.py) 先新增红灯测试 `test_generate_log_id_should_retry_when_random_value_conflicts`。
|
||||
- 固定随机值第一次命中已存在 `logId` 时必须重试。
|
||||
- 同步把行内流水测试中的旧递增断言改为随机区间断言。
|
||||
- 在 [`lsfx-mock-server/services/file_service.py`](/Users/wkc/Desktop/ccdi/ccdi/lsfx-mock-server/services/file_service.py) 新增统一 `_generate_log_id()`。
|
||||
- 在 `10000-99999` 区间内随机生成。
|
||||
- 若命中 `file_records` 中已存在的 `logId`,则继续重试直到拿到未占用值。
|
||||
- `upload_file()` 与 `fetch_inner_flow()` 均切换为调用该方法。
|
||||
|
||||
## 验证结果
|
||||
- `python3 -m pytest lsfx-mock-server/tests/test_file_service.py -k "fetch_inner_flow_persists_primary_binding_record or generate_log_id_should_retry_when_random_value_conflicts" -v`
|
||||
- 结果:`2 passed`
|
||||
- `python3 -m pytest lsfx-mock-server/tests/test_file_service.py lsfx-mock-server/tests/test_statement_service.py lsfx-mock-server/tests/test_api.py lsfx-mock-server/tests/integration/test_full_workflow.py -v`
|
||||
- 结果:`39 passed, 20 warnings`
|
||||
|
||||
## 实施结果
|
||||
- Mock 服务的新建上传记录与行内流水记录已改为随机 `logId`。
|
||||
- 同一 `logId` 下的规则命中计划、流水样本与上传状态复用逻辑保持不变。
|
||||
@@ -1,16 +1,224 @@
|
||||
# 新增模型打标完整验证记录
|
||||
|
||||
## 执行命令
|
||||
- 待补充本次实际执行的 pytest、Maven、SQL、curl 与 Python 核验命令。
|
||||
```bash
|
||||
cd lsfx-mock-server
|
||||
python3 -m pytest tests/test_file_service.py -k "rule_hit_plan or persist_rule_hit_plan" -v
|
||||
python3 -m pytest tests/test_statement_service.py -k "rule_plan_should_only_include or withdraw_cnt_samples" -v
|
||||
python3 -m pytest tests/test_statement_service.py -k "follow_rule_hit_plan or fixed_total_count_200 or cached_result" -v
|
||||
python3 -m pytest tests/integration/test_full_workflow.py -k "same_rule_subset or share_same_primary_binding" -v
|
||||
python3 -m pytest tests/test_file_service.py tests/test_statement_service.py tests/test_api.py tests/integration/test_full_workflow.py -v
|
||||
|
||||
cd ..
|
||||
mvn test -pl ccdi-project -Dtest=BankTagRuleConfigResolverTest
|
||||
mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest
|
||||
mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest,CcdiBankTagServiceImplTest
|
||||
mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest,BankTagRuleConfigResolverTest,CcdiBankTagServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest
|
||||
```
|
||||
|
||||
## Mock 自动化结果
|
||||
- 2026-03-20 15:21:54 CST 完成 Mock 聚焦回归与全量回归。
|
||||
- 聚焦回归结果:
|
||||
- `tests/test_file_service.py -k "rule_hit_plan or persist_rule_hit_plan"`: `2 passed, 4 deselected, 1 warning`
|
||||
- `tests/test_statement_service.py -k "rule_plan_should_only_include or withdraw_cnt_samples"`: `2 passed, 11 deselected, 1 warning`
|
||||
- `tests/test_statement_service.py -k "follow_rule_hit_plan or fixed_total_count_200 or cached_result"`: `3 passed, 10 deselected, 1 warning`
|
||||
- `tests/integration/test_full_workflow.py -k "same_rule_subset or share_same_primary_binding"`: `2 passed, 3 deselected, 3 warnings`
|
||||
- 全量回归结果:
|
||||
- `python3 -m pytest tests/test_file_service.py tests/test_statement_service.py tests/test_api.py tests/integration/test_full_workflow.py -v`
|
||||
- 摘要:`38 passed, 20 warnings in 4.15s`
|
||||
- warning 摘要:
|
||||
- `pydantic` 的 class-based config 弃用提示仍存在。
|
||||
- `httpx` 的 `app` shortcut 弃用提示仍存在。
|
||||
- 两类 warning 与既有 Mock 验证记录一致,本次未新增 failure 或 error。
|
||||
|
||||
## 主工程自动化结果
|
||||
- 2026-03-20 15:22:27 CST 执行 `mvn test -pl ccdi-project -Dtest=BankTagRuleConfigResolverTest`,结果 `BUILD SUCCESS`,`Tests run: 6, Failures: 0, Errors: 0, Skipped: 0`。
|
||||
- 2026-03-20 15:22:47 CST 执行 `mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest`,结果 `BUILD SUCCESS`,`Tests run: 8, Failures: 0, Errors: 0, Skipped: 0`。
|
||||
- 2026-03-20 15:22:57 CST 执行 `mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest,CcdiBankTagServiceImplTest`,结果 `BUILD SUCCESS`,`Tests run: 19, Failures: 0, Errors: 0, Skipped: 0`。
|
||||
- 2026-03-20 15:23:10 CST 执行 `mvn test -pl ccdi-project -Dtest=CcdiBankTagAnalysisMapperXmlTest,BankTagRuleConfigResolverTest,CcdiBankTagServiceImplTest,CcdiBankTagServiceRiskCountRefreshTest`,结果 `BUILD SUCCESS`,`Tests run: 27, Failures: 0, Errors: 0, Skipped: 0`。
|
||||
- 结果归纳:
|
||||
- `BankTagRuleConfigResolverTest` 证明第一期规则参数映射保持通过。
|
||||
- `CcdiBankTagAnalysisMapperXmlTest` 证明真实 SQL 结构保持通过。
|
||||
- `CcdiBankTagServiceImplTest` 证明规则分发和异常路径断言保持通过。
|
||||
- `CcdiBankTagServiceRiskCountRefreshTest` 证明风险人数刷新链路保持通过。
|
||||
- 日志说明:
|
||||
- 测试日志中的 `threshold missing` 与 `refresh failed` 为异常路径断言场景产生的预期日志,不代表本轮 Maven 回归失败。
|
||||
|
||||
## 数据库核验
|
||||
- 待补充采购基线、规则元数据、任务状态与命中结果查询。
|
||||
```bash
|
||||
bin/mysql_utf8_exec.sh sql/migration/2026-03-20-lsfx-mock-random-hit-rule-purchase-baseline.sql
|
||||
|
||||
python3 - <<'PY'
|
||||
from pathlib import Path
|
||||
import pymysql, re
|
||||
|
||||
text = Path('ruoyi-admin/src/main/resources/application-dev.yml').read_text(encoding='utf-8')
|
||||
match = re.search(r"url:\s*jdbc:mysql://(?P<host>[^:/?#]+):(?P<port>\d+)/(?P<db>[^?\n]+).*?\n\s*username:\s*(?P<user>[^\n]+)\n\s*password:\s*(?P<pwd>[^\n]+)", text, re.S)
|
||||
conn = pymysql.connect(
|
||||
host=match.group('host'),
|
||||
port=int(match.group('port')),
|
||||
user=match.group('user').strip(),
|
||||
password=match.group('pwd').strip(),
|
||||
database=match.group('db').strip(),
|
||||
charset='utf8mb4',
|
||||
cursorclass=pymysql.cursors.DictCursor,
|
||||
)
|
||||
with conn, conn.cursor() as cursor:
|
||||
cursor.execute("""
|
||||
SELECT purchase_id, actual_amount, supplier_name
|
||||
FROM ccdi_purchase_transaction
|
||||
WHERE purchase_id = 'LSFXMOCKPUR001'
|
||||
AND actual_amount > 100000
|
||||
""")
|
||||
print(cursor.fetchone())
|
||||
PY
|
||||
|
||||
python3 - <<'PY'
|
||||
from pathlib import Path
|
||||
import pymysql, re
|
||||
|
||||
TARGET_RULES = (
|
||||
'GAMBLING_SENSITIVE_KEYWORD','SPECIAL_AMOUNT_TRANSACTION','SUSPICIOUS_INCOME_KEYWORD',
|
||||
'FOREX_BUY_AMT','FOREX_SELL_AMT','LARGE_PURCHASE_TRANSACTION',
|
||||
'STOCK_TFR_LARGE','WITHDRAW_CNT','LARGE_STOCK_TRADING'
|
||||
)
|
||||
|
||||
text = Path('ruoyi-admin/src/main/resources/application-dev.yml').read_text(encoding='utf-8')
|
||||
match = re.search(r"url:\s*jdbc:mysql://(?P<host>[^:/?#]+):(?P<port>\d+)/(?P<db>[^?\n]+).*?\n\s*username:\s*(?P<user>[^\n]+)\n\s*password:\s*(?P<pwd>[^\n]+)", text, re.S)
|
||||
conn = pymysql.connect(
|
||||
host=match.group('host'),
|
||||
port=int(match.group('port')),
|
||||
user=match.group('user').strip(),
|
||||
password=match.group('pwd').strip(),
|
||||
database=match.group('db').strip(),
|
||||
charset='utf8mb4',
|
||||
cursorclass=pymysql.cursors.DictCursor,
|
||||
)
|
||||
sql = f"""
|
||||
SELECT model_code, rule_code, indicator_code
|
||||
FROM ccdi_bank_tag_rule
|
||||
WHERE rule_code IN ({','.join(['%s'] * len(TARGET_RULES))})
|
||||
ORDER BY model_code, sort_order, rule_code
|
||||
"""
|
||||
with conn, conn.cursor() as cursor:
|
||||
cursor.execute(sql, TARGET_RULES)
|
||||
for row in cursor.fetchall():
|
||||
print(row)
|
||||
PY
|
||||
```
|
||||
|
||||
- 采购基线脚本执行结果:
|
||||
- `bin/mysql_utf8_exec.sh sql/migration/2026-03-20-lsfx-mock-random-hit-rule-purchase-baseline.sql` 执行成功,无报错、无乱码输出。
|
||||
- 采购基线查询结果:
|
||||
- 返回 `{'purchase_id': 'LSFXMOCKPUR001', 'actual_amount': Decimal('186000.00'), 'supplier_name': '兰溪市联调供应链有限公司'}`
|
||||
- 结论:`LSFXMOCKPUR001` 存在,且 `actual_amount > 100000`,采购基线正常。
|
||||
- 规则元数据查询结果:
|
||||
- 共查询到 9 条目标规则,`rule_code` 均存在。
|
||||
- 返回摘要:
|
||||
- `STOCK_TFR_LARGE -> indicator_code=STOCK_TFR_LARGE`
|
||||
- `WITHDRAW_CNT -> indicator_code=WITHDRAW_CNT`
|
||||
- `LARGE_STOCK_TRADING -> indicator_code=NULL`
|
||||
- `FOREX_BUY_AMT -> indicator_code=FOREX_BUY_AMT`
|
||||
- `FOREX_SELL_AMT -> indicator_code=FOREX_SELL_AMT`
|
||||
- 其余 4 条规则 `indicator_code=NULL`
|
||||
- 异常判定:
|
||||
- 根据既有实施记录,`FOREX_BUY_AMT` 预期应对齐为 `SINGLE_PURCHASE_AMOUNT`。
|
||||
- `FOREX_SELL_AMT` 预期应对齐为 `SINGLE_SETTLEMENT_AMOUNT`。
|
||||
- `LARGE_STOCK_TRADING` 预期应对齐为 `STOCK_TFR_LARGE`,当前查询为 `NULL`。
|
||||
- 首次执行因此在数据库层判定为“数据基线异常”。
|
||||
- 修复后复验:
|
||||
- 已执行 `bin/mysql_utf8_exec.sh sql/migration/2026-03-20-sync-bank-tag-phase1-rule-metadata.sql`
|
||||
- 修复后查询结果:
|
||||
- `FOREX_BUY_AMT -> indicator_code=SINGLE_PURCHASE_AMOUNT`
|
||||
- `FOREX_SELL_AMT -> indicator_code=SINGLE_SETTLEMENT_AMOUNT`
|
||||
- `LARGE_STOCK_TRADING -> indicator_code=STOCK_TFR_LARGE`
|
||||
- 9 条一期真实规则 `remark` 均已同步为真实规则说明
|
||||
- 结论:数据库元数据异常已修复,可继续进入接口端到端验证。
|
||||
|
||||
## 接口验证
|
||||
- 待补充登录、拉取本行信息、手动重算、流水详情回查与结果摘要。
|
||||
```bash
|
||||
curl -s http://localhost:62318/login/test \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{"username":"admin","password":"admin123"}'
|
||||
|
||||
python3 - <<'PY'
|
||||
# 读取 3 个有效身份证号并生成 /tmp/bank-tag-pull-request.json
|
||||
PY
|
||||
|
||||
curl -s http://localhost:62318/ccdi/file-upload/pull-bank-info \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
-H 'Content-Type: application/json' \
|
||||
--data-binary @/tmp/bank-tag-pull-request.json
|
||||
|
||||
curl -s http://localhost:62318/ccdi/project/tags/rebuild \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{"projectId":47,"modelCode":null}'
|
||||
|
||||
python3 - <<'PY'
|
||||
# 轮询 ccdi_bank_tag_task 并查询目标规则命中结果
|
||||
PY
|
||||
|
||||
curl -s "http://localhost:62318/ccdi/project/bank-statement/detail/66679" \
|
||||
-H "Authorization: Bearer $TOKEN"
|
||||
```
|
||||
|
||||
- 登录结果:
|
||||
- 返回 `code=200`,token 非空。
|
||||
- 拉取本行信息结果:
|
||||
- 选择身份证号:`558455197203132040`、`523342199111246421`、`38056420050404632X`
|
||||
- 接口返回 `{"msg":"拉取任务已提交","code":200,...}`
|
||||
- 自动触发任务 `id=36`,`trigger_type=AUTO_PULL_BANK_INFO`,状态 `SUCCESS`。
|
||||
- 手动重算结果:
|
||||
- 首次调用命中项目级重算锁,返回“当前项目标签正在重算中,请稍后再试”。
|
||||
- 自动拉取任务完成后再次调用,返回 `{"msg":"标签重算任务已提交","code":200}`。
|
||||
- 最新任务 `id=37`,状态 `SUCCESS`,`hit_count=3481`,`success_rule_count=33`,`failed_rule_count=0`。
|
||||
- 命中结果查询:
|
||||
- 已查到目标规则命中,包括:
|
||||
- `WITHDRAW_CNT`
|
||||
- `GAMBLING_SENSITIVE_KEYWORD`
|
||||
- `LARGE_PURCHASE_TRANSACTION`
|
||||
- 样例明细:
|
||||
- `rule_code=GAMBLING_SENSITIVE_KEYWORD`
|
||||
- `bank_statement_id=66679`
|
||||
- `reason_detail=摘要/对手命中赌博敏感词,摘要“游戏充值”,对手方“欢乐游戏科技有限公司”,支出金额 6888.00 元`
|
||||
- 详情接口回查:
|
||||
- `GET /ccdi/project/bank-statement/detail/66679` 返回 `code=200`
|
||||
- `data.hitTags` 中包含 `GAMBLING_SENSITIVE_KEYWORD`
|
||||
|
||||
## 补充复验
|
||||
- 2026-03-20 16:01 左右,基于修复后的详情查询 SQL 再次执行项目 `47` 端到端链路验证。
|
||||
- 登录结果:
|
||||
- `POST /login/test` 返回 `code=200`,token 非空。
|
||||
- 拉取本行信息结果:
|
||||
- 仍使用身份证号 `558455197203132040`、`523342199111246421`、`38056420050404632X`
|
||||
- `POST /ccdi/file-upload/pull-bank-info` 返回 `{"msg":"拉取任务已提交","code":200,...}`
|
||||
- 自动触发任务 `id=39`,`trigger_type=AUTO_PULL_BANK_INFO`,状态 `SUCCESS`
|
||||
- `hit_count=3636`,`success_rule_count=33`,`failed_rule_count=0`
|
||||
- 手动重算结果:
|
||||
- `POST /ccdi/project/tags/rebuild` 直接返回 `{"msg":"标签重算任务已提交","code":200}`
|
||||
- 最新任务 `id=40`,`trigger_type=MANUAL`,状态 `SUCCESS`
|
||||
- `hit_count=3636`,`success_rule_count=33`,`failed_rule_count=0`
|
||||
- 命中样例回查:
|
||||
- 最新 `GAMBLING_SENSITIVE_KEYWORD` 命中样例为 `bank_statement_id=67279`
|
||||
- `reason_detail=摘要/对手命中赌博敏感词,摘要“游戏充值”,对手方“欢乐游戏科技有限公司”,支出金额 6888.00 元`
|
||||
- 详情接口回查:
|
||||
- `GET /ccdi/project/bank-statement/detail/67279` 返回 `code=200`
|
||||
- 返回结果包含 `originalFileName=558455197203132040_10001.csv`
|
||||
- `data.hitTags` 中包含 `GAMBLING_SENSITIVE_KEYWORD`
|
||||
|
||||
## 结论
|
||||
- 待本次验证全部执行完成后补充。
|
||||
- 首次执行在数据库核验阶段发现第一期规则元数据异常,问题已定位并修复。
|
||||
- 修复后重新验证结果如下:
|
||||
- Mock 自动化回归通过。
|
||||
- 主工程第一期真实规则自动化回归通过。
|
||||
- 数据库采购基线与第一期规则元数据核验通过。
|
||||
- 项目 `47` 的自动拉取、手动重算、规则命中查询与详情接口回查通过。
|
||||
- 补充复验确认:重复上传记录场景下,流水详情接口已不再出现 `selectOne()` 结果重复异常。
|
||||
- 最终结论:本次“新增模型打标完整验证”在修复元数据缺口后已通过。
|
||||
|
||||
## 环境清理
|
||||
- 待补充本次验证启动的 Mock 与后端进程清理结果。
|
||||
- 已停止本次复验启动的 Mock 服务与后端 Jar 服务。
|
||||
- 端口复核结果:
|
||||
- `62318` 无监听进程
|
||||
- `8000` 无监听进程
|
||||
|
||||
@@ -94,6 +94,8 @@ class FileService:
|
||||
"""文件上传和解析服务"""
|
||||
|
||||
INNER_FLOW_TOTAL_RECORDS = 200
|
||||
LOG_ID_MIN = settings.INITIAL_LOG_ID
|
||||
LOG_ID_MAX = 99999
|
||||
|
||||
def __init__(self, staff_identity_repository=None):
|
||||
self.file_records: Dict[int, FileRecord] = {} # logId -> FileRecord
|
||||
@@ -104,6 +106,18 @@ class FileService:
|
||||
"""按 logId 获取已存在的文件记录。"""
|
||||
return self.file_records.get(log_id)
|
||||
|
||||
def _generate_log_id(self) -> int:
|
||||
"""生成当前进程内未占用的随机 logId。"""
|
||||
available_capacity = self.LOG_ID_MAX - self.LOG_ID_MIN + 1
|
||||
if len(self.file_records) >= available_capacity:
|
||||
raise RuntimeError("可用 logId 已耗尽")
|
||||
|
||||
while True:
|
||||
candidate = random.randint(self.LOG_ID_MIN, self.LOG_ID_MAX)
|
||||
if candidate not in self.file_records:
|
||||
self.log_counter = candidate
|
||||
return candidate
|
||||
|
||||
def _infer_bank_name(self, filename: str) -> tuple:
|
||||
"""根据文件名推断银行名称和模板名称"""
|
||||
if "支付宝" in filename or "alipay" in filename.lower():
|
||||
@@ -230,9 +244,8 @@ class FileService:
|
||||
Returns:
|
||||
上传响应字典
|
||||
"""
|
||||
# 生成唯一logId
|
||||
self.log_counter += 1
|
||||
log_id = self.log_counter
|
||||
# 生成唯一 logId
|
||||
log_id = self._generate_log_id()
|
||||
|
||||
# 推断银行信息
|
||||
bank_name, template_name = self._infer_bank_name(file.filename)
|
||||
@@ -570,9 +583,8 @@ class FileService:
|
||||
data_start_date_id = request.dataStartDateId
|
||||
data_end_date_id = request.dataEndDateId
|
||||
|
||||
# 使用递增 logId,确保与上传链路一致
|
||||
self.log_counter += 1
|
||||
log_id = self.log_counter
|
||||
# 使用随机 logId,确保与上传链路一致且不覆盖现有记录
|
||||
log_id = self._generate_log_id()
|
||||
rule_hit_plan = self._build_rule_hit_plan(log_id)
|
||||
|
||||
primary_enterprise_name, primary_account_no = self._generate_primary_binding()
|
||||
|
||||
@@ -8,7 +8,7 @@ import io
|
||||
from fastapi import BackgroundTasks
|
||||
from fastapi.datastructures import UploadFile
|
||||
|
||||
from services.file_service import FileService
|
||||
from services.file_service import FileRecord, FileService
|
||||
|
||||
|
||||
class FakeStaffIdentityRepository:
|
||||
@@ -139,7 +139,7 @@ def test_fetch_inner_flow_persists_primary_binding_record(monkeypatch):
|
||||
response = service.fetch_inner_flow(request)
|
||||
log_id = response["data"][0]
|
||||
|
||||
assert log_id == service.log_counter
|
||||
assert 10000 <= log_id <= 99999
|
||||
assert log_id in service.file_records
|
||||
|
||||
record = service.file_records[log_id]
|
||||
@@ -156,6 +156,24 @@ def test_fetch_inner_flow_persists_primary_binding_record(monkeypatch):
|
||||
assert record.total_records == 200
|
||||
|
||||
|
||||
def test_generate_log_id_should_retry_when_random_value_conflicts(monkeypatch):
|
||||
"""随机 logId 命中已存在记录时必须重试并返回未占用值。"""
|
||||
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
|
||||
service.file_records[34567] = FileRecord(
|
||||
log_id=34567,
|
||||
group_id=1001,
|
||||
file_name="existing.csv",
|
||||
)
|
||||
|
||||
candidate_values = iter([34567, 45678])
|
||||
monkeypatch.setattr(
|
||||
"services.file_service.random.randint",
|
||||
lambda start, end: next(candidate_values),
|
||||
)
|
||||
|
||||
assert service._generate_log_id() == 45678
|
||||
|
||||
|
||||
def test_build_rule_hit_plan_should_be_deterministic_for_same_log_id():
|
||||
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
|
||||
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
START TRANSACTION;
|
||||
|
||||
INSERT INTO ccdi_bank_tag_rule (
|
||||
model_code,
|
||||
model_name,
|
||||
rule_code,
|
||||
rule_name,
|
||||
indicator_code,
|
||||
result_type,
|
||||
risk_level,
|
||||
business_caliber,
|
||||
enabled,
|
||||
sort_order,
|
||||
create_by,
|
||||
remark
|
||||
) VALUES
|
||||
('SUSPICIOUS_GAMBLING', '疑似赌博', 'GAMBLING_SENSITIVE_KEYWORD', '疑似敏感交易', NULL, 'STATEMENT', 'HIGH', '备注或交易摘要、对手有“游戏、抖币、体彩、福彩”等字眼。', 1, 20, 'system', '真实规则:识别摘要或对手方命中赌博敏感词的支出流水'),
|
||||
('SUSPICIOUS_RELATION', '可疑关系', 'SPECIAL_AMOUNT_TRANSACTION', '特殊金额交易', NULL, 'STATEMENT', NULL, '除与配偶、子女外,发生特殊金额交易,如1314元、520元等具有特殊含义的金额。', 1, 10, 'system', '真实规则:识别与非配偶子女发生的特殊金额交易'),
|
||||
('SUSPICIOUS_PART_TIME', '可疑兼职', 'SUSPICIOUS_INCOME_KEYWORD', '疑似兼职', NULL, 'STATEMENT', 'HIGH', '转入资金摘要有“工资”、“分红”、“红利”、“利息(非银行结息)”等收入', 1, 30, 'system', '真实规则:识别非本行工资代发的收入关键词转入流水'),
|
||||
('SUSPICIOUS_FOREIGN_EXCHANGE', '可疑外汇交易', 'FOREX_BUY_AMT', '可疑外汇交易', 'SINGLE_PURCHASE_AMOUNT', 'STATEMENT', NULL, '单笔购汇金额超限', 1, 10, 'system', '真实规则:识别单笔购汇金额超过阈值的流水'),
|
||||
('SUSPICIOUS_FOREIGN_EXCHANGE', '可疑外汇交易', 'FOREX_SELL_AMT', '可疑外汇交易', 'SINGLE_SETTLEMENT_AMOUNT', 'STATEMENT', NULL, '单笔结汇金额超限', 1, 20, 'system', '真实规则:识别单笔结汇金额超过阈值的流水'),
|
||||
('SUSPICIOUS_PURCHASE', '可疑采购', 'LARGE_PURCHASE_TRANSACTION', '可疑采购', NULL, 'STATEMENT', NULL, '单笔采购金额超过10万元。', 1, 10, 'system', '真实规则:识别单笔采购金额超过10万元的采购事项'),
|
||||
('ABNORMAL_BEHAVIOR', '异常行为', 'STOCK_TFR_LARGE', '可疑银证大额转账', 'STOCK_TFR_LARGE', 'STATEMENT', NULL, '家庭老人/非关系人银证大额转账', 1, 10, 'system', '真实规则:识别银证转账金额超过阈值的流水'),
|
||||
('ABNORMAL_BEHAVIOR', '异常行为', 'WITHDRAW_CNT', '微信支付宝频繁提现', 'WITHDRAW_CNT', 'OBJECT', NULL, '微信、支付宝单日提现次数超过设置次数', 1, 20, 'system', '真实规则:识别微信支付宝单日提现次数超过阈值的对象'),
|
||||
('ABNORMAL_BEHAVIOR', '异常行为', 'LARGE_STOCK_TRADING', '大额炒股', 'STOCK_TFR_LARGE', 'STATEMENT', 'HIGH', '单次三方资管交易金额超过100万元。', 1, 60, 'system', '真实规则:识别单笔三方资管交易金额超过阈值的流水')
|
||||
ON DUPLICATE KEY UPDATE
|
||||
model_code = VALUES(model_code),
|
||||
model_name = VALUES(model_name),
|
||||
rule_name = VALUES(rule_name),
|
||||
indicator_code = VALUES(indicator_code),
|
||||
result_type = VALUES(result_type),
|
||||
risk_level = VALUES(risk_level),
|
||||
business_caliber = VALUES(business_caliber),
|
||||
enabled = VALUES(enabled),
|
||||
sort_order = VALUES(sort_order),
|
||||
update_by = 'system',
|
||||
update_time = NOW(),
|
||||
remark = VALUES(remark);
|
||||
|
||||
COMMIT;
|
||||
Reference in New Issue
Block a user