统一Mock上传状态主体账号绑定优先级

This commit is contained in:
wkc
2026-03-18 15:50:28 +08:00
parent 6fb728709e
commit 0a85c098e8
3 changed files with 96 additions and 10 deletions

View File

@@ -288,9 +288,19 @@ class FileService:
bank_name, template_name = rng.choice(bank_options)
# 生成交易日期范围
end_date = datetime.now()
# 生成基于种子的稳定时间范围,确保同一 logId 重复查询完全一致
base_datetime = datetime(2024, 1, 1, 8, 0, 0)
end_date = base_datetime + timedelta(days=rng.randint(180, 540))
start_date = end_date - timedelta(days=rng.randint(90, 365))
file_upload_time = (
base_datetime
+ timedelta(
days=rng.randint(0, 540),
hours=rng.randint(0, 23),
minutes=rng.randint(0, 59),
seconds=rng.randint(0, 59),
)
)
# 生成账号和主体
primary_enterprise_name, primary_account_no = self._generate_primary_binding_from_rng(rng)
@@ -306,7 +316,7 @@ class FileService:
"fileSize": rng.randint(10000, 100000),
"fileUploadBy": 448,
"fileUploadByUserName": "admin@support.com",
"fileUploadTime": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
"fileUploadTime": file_upload_time.strftime("%Y-%m-%d %H:%M:%S"),
"isSplit": 0,
"leId": 10000 + rng.randint(0, 9999),
"logId": log_id,
@@ -326,6 +336,11 @@ class FileService:
"uploadStatusDesc": "data.wait.confirm.newaccount"
}
def _build_deterministic_log_detail(self, log_id: int, group_id: int) -> dict:
"""构建 deterministic 回退的单条日志详情。"""
rng = random.Random(log_id)
return self._generate_deterministic_record(log_id, group_id, rng)
def _build_log_detail(self, record: FileRecord) -> dict:
"""构建日志详情对象"""
return {
@@ -406,16 +421,13 @@ class FileService:
"""
logs = []
if log_id:
if log_id is not None:
if log_id in self.file_records:
logs.append(self._build_log_detail(self.file_records[log_id]))
log_detail = self._build_log_detail(self.file_records[log_id])
else:
# 使用局部随机源,避免污染全局随机状态
rng = random.Random(log_id)
log_detail = self._build_deterministic_log_detail(log_id, group_id)
# 生成确定性的文件记录
record = self._generate_deterministic_record(log_id, group_id, rng)
logs.append(record)
logs.append(log_detail)
# 返回响应
return {

View File

@@ -126,6 +126,37 @@ def test_fetch_inner_flow_followed_by_upload_status(client):
assert log["accountNoList"][0] == record.primary_account_no
def test_upload_file_followed_by_upload_status_reads_real_record(client, monkeypatch):
"""上传文件后,上传状态查询应优先返回真实记录而不是 deterministic 回退。"""
monkeypatch.setattr(
file_service,
"_generate_primary_binding",
lambda: ("上传主体", "6222777788889999"),
)
upload_response = client.post(
"/watson/api/project/remoteUploadSplitFile",
data={"groupId": 1001},
files={"files": ("测试文件.csv", b"mock", "text/csv")},
)
assert upload_response.status_code == 200
upload_data = upload_response.json()
upload_log = upload_data["data"]["uploadLogList"][0]
log_id = upload_log["logId"]
status_response = client.get(f"/watson/api/project/bs/upload?groupId=1001&logId={log_id}")
assert status_response.status_code == 200
status_data = status_response.json()
status_log = status_data["data"]["logs"][0]
assert status_log["enterpriseNameList"] == upload_log["enterpriseNameList"]
assert status_log["accountNoList"] == upload_log["accountNoList"]
assert status_log["enterpriseNameList"] == ["上传主体"]
assert status_log["accountNoList"] == ["6222777788889999"]
assert len(status_log["enterpriseNameList"]) == 1
assert len(status_log["accountNoList"]) == 1
def test_fetch_inner_flow_marks_pending_complete(client):
"""拉取行内流水后getpendings 应返回未解析状态。"""
response = client.post(
@@ -218,9 +249,12 @@ def test_deterministic_data_generation(client):
assert log1["bankName"] == log2["bankName"]
assert log1["accountNoList"] == log2["accountNoList"]
assert log1["enterpriseNameList"] == log2["enterpriseNameList"]
assert len(log1["accountNoList"]) == 1
assert len(log1["enterpriseNameList"]) == 1
assert log1["status"] == log2["status"]
assert log1["logMeta"] == log2["logMeta"]
assert log1["templateName"] == log2["templateName"]
assert log1["fileUploadTime"] == log2["fileUploadTime"]
assert log1["trxDateStartId"] == log2["trxDateStartId"]
assert log1["trxDateEndId"] == log2["trxDateEndId"]

View File

@@ -40,6 +40,46 @@ def test_upload_file_primary_binding_response(monkeypatch):
assert record.account_no_list == ["6222021234567890"]
def test_upload_file_then_upload_status_reads_same_record(monkeypatch):
"""上传后再查状态时,上传状态接口必须读取同一条真实记录。"""
service = FileService()
monkeypatch.setattr(
service,
"_generate_primary_binding",
lambda: ("测试主体B", "6222333344445555"),
)
background_tasks = BackgroundTasks()
file = UploadFile(filename="测试文件.csv", file=io.BytesIO(b"mock"))
upload_response = asyncio.run(service.upload_file(1001, file, background_tasks))
log = upload_response["data"]["uploadLogList"][0]
monkeypatch.setattr(
service,
"_build_deterministic_log_detail",
lambda *args, **kwargs: (_ for _ in ()).throw(
AssertionError("真实记录存在时不应走 deterministic fallback")
),
)
status_response = service.get_upload_status(1001, log["logId"])
status_log = status_response["data"]["logs"][0]
assert status_log["enterpriseNameList"] == log["enterpriseNameList"]
assert status_log["accountNoList"] == log["accountNoList"]
assert status_log["bankName"] == log["bankName"]
assert status_log["templateName"] == log["templateName"]
assert status_log["uploadFileName"] == log["uploadFileName"]
assert status_log["trxDateStartId"] == log["trxDateStartId"]
assert status_log["trxDateEndId"] == log["trxDateEndId"]
assert status_log["enterpriseNameList"] == ["测试主体B"]
assert status_log["accountNoList"] == ["6222333344445555"]
assert len(status_log["enterpriseNameList"]) == 1
assert len(status_log["accountNoList"]) == 1
def test_fetch_inner_flow_persists_primary_binding_record(monkeypatch):
"""拉取行内流水必须创建并保存绑定记录。"""
service = FileService()