修复流水详情原始文件关联与Mock随机logId
This commit is contained in:
@@ -94,6 +94,8 @@ class FileService:
|
||||
"""文件上传和解析服务"""
|
||||
|
||||
INNER_FLOW_TOTAL_RECORDS = 200
|
||||
LOG_ID_MIN = settings.INITIAL_LOG_ID
|
||||
LOG_ID_MAX = 99999
|
||||
|
||||
def __init__(self, staff_identity_repository=None):
|
||||
self.file_records: Dict[int, FileRecord] = {} # logId -> FileRecord
|
||||
@@ -104,6 +106,18 @@ class FileService:
|
||||
"""按 logId 获取已存在的文件记录。"""
|
||||
return self.file_records.get(log_id)
|
||||
|
||||
def _generate_log_id(self) -> int:
|
||||
"""生成当前进程内未占用的随机 logId。"""
|
||||
available_capacity = self.LOG_ID_MAX - self.LOG_ID_MIN + 1
|
||||
if len(self.file_records) >= available_capacity:
|
||||
raise RuntimeError("可用 logId 已耗尽")
|
||||
|
||||
while True:
|
||||
candidate = random.randint(self.LOG_ID_MIN, self.LOG_ID_MAX)
|
||||
if candidate not in self.file_records:
|
||||
self.log_counter = candidate
|
||||
return candidate
|
||||
|
||||
def _infer_bank_name(self, filename: str) -> tuple:
|
||||
"""根据文件名推断银行名称和模板名称"""
|
||||
if "支付宝" in filename or "alipay" in filename.lower():
|
||||
@@ -230,9 +244,8 @@ class FileService:
|
||||
Returns:
|
||||
上传响应字典
|
||||
"""
|
||||
# 生成唯一logId
|
||||
self.log_counter += 1
|
||||
log_id = self.log_counter
|
||||
# 生成唯一 logId
|
||||
log_id = self._generate_log_id()
|
||||
|
||||
# 推断银行信息
|
||||
bank_name, template_name = self._infer_bank_name(file.filename)
|
||||
@@ -570,9 +583,8 @@ class FileService:
|
||||
data_start_date_id = request.dataStartDateId
|
||||
data_end_date_id = request.dataEndDateId
|
||||
|
||||
# 使用递增 logId,确保与上传链路一致
|
||||
self.log_counter += 1
|
||||
log_id = self.log_counter
|
||||
# 使用随机 logId,确保与上传链路一致且不覆盖现有记录
|
||||
log_id = self._generate_log_id()
|
||||
rule_hit_plan = self._build_rule_hit_plan(log_id)
|
||||
|
||||
primary_enterprise_name, primary_account_no = self._generate_primary_binding()
|
||||
|
||||
@@ -8,7 +8,7 @@ import io
|
||||
from fastapi import BackgroundTasks
|
||||
from fastapi.datastructures import UploadFile
|
||||
|
||||
from services.file_service import FileService
|
||||
from services.file_service import FileRecord, FileService
|
||||
|
||||
|
||||
class FakeStaffIdentityRepository:
|
||||
@@ -139,7 +139,7 @@ def test_fetch_inner_flow_persists_primary_binding_record(monkeypatch):
|
||||
response = service.fetch_inner_flow(request)
|
||||
log_id = response["data"][0]
|
||||
|
||||
assert log_id == service.log_counter
|
||||
assert 10000 <= log_id <= 99999
|
||||
assert log_id in service.file_records
|
||||
|
||||
record = service.file_records[log_id]
|
||||
@@ -156,6 +156,24 @@ def test_fetch_inner_flow_persists_primary_binding_record(monkeypatch):
|
||||
assert record.total_records == 200
|
||||
|
||||
|
||||
def test_generate_log_id_should_retry_when_random_value_conflicts(monkeypatch):
|
||||
"""随机 logId 命中已存在记录时必须重试并返回未占用值。"""
|
||||
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
|
||||
service.file_records[34567] = FileRecord(
|
||||
log_id=34567,
|
||||
group_id=1001,
|
||||
file_name="existing.csv",
|
||||
)
|
||||
|
||||
candidate_values = iter([34567, 45678])
|
||||
monkeypatch.setattr(
|
||||
"services.file_service.random.randint",
|
||||
lambda start, end: next(candidate_values),
|
||||
)
|
||||
|
||||
assert service._generate_log_id() == 45678
|
||||
|
||||
|
||||
def test_build_rule_hit_plan_should_be_deterministic_for_same_log_id():
|
||||
service = FileService(staff_identity_repository=FakeStaffIdentityRepository())
|
||||
|
||||
|
||||
Reference in New Issue
Block a user