合并账户库为单表

This commit is contained in:
wkc
2026-04-17 10:18:13 +08:00
parent cc1a4538af
commit 4c6ca52e7e
24 changed files with 1285 additions and 377 deletions

4
.gitignore vendored
View File

@@ -79,4 +79,6 @@ output/
logs/
.DS_Store
.DS_Store
ruoyi-ui/vue.config.js

View File

@@ -57,6 +57,12 @@
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.ruoyi</groupId>
<artifactId>ccdi-lsfx</artifactId>
<version>3.9.1</version>
<scope>compile</scope>
</dependency>
</dependencies>

View File

@@ -9,6 +9,7 @@ import lombok.Data;
import java.io.Serial;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
/**
@@ -56,6 +57,42 @@ public class CcdiAccountInfo implements Serializable {
/** 币种 */
private String currency;
/** 是否实控账户0-否 1-是 */
@TableField("is_self_account")
private Integer isActualControl;
/** 月均交易笔数 */
@TableField("monthly_avg_trans_count")
private Integer avgMonthTxnCount;
/** 月均交易金额 */
@TableField("monthly_avg_trans_amount")
private BigDecimal avgMonthTxnAmount;
/** 交易频率等级 */
@TableField("trans_freq_type")
private String txnFrequencyLevel;
/** 借方单笔最高额 */
@TableField("dr_max_single_amount")
private BigDecimal debitSingleMaxAmount;
/** 贷方单笔最高额 */
@TableField("cr_max_single_amount")
private BigDecimal creditSingleMaxAmount;
/** 借方日累计最高额 */
@TableField("dr_max_daily_amount")
private BigDecimal debitDailyMaxAmount;
/** 贷方日累计最高额 */
@TableField("cr_max_daily_amount")
private BigDecimal creditDailyMaxAmount;
/** 风险等级 */
@TableField("trans_risk_level")
private String txnRiskLevel;
/** 状态1-正常 2-已销户 */
private Integer status;

View File

@@ -1,86 +0,0 @@
package com.ruoyi.info.collection.domain;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.io.Serial;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
/**
* 账户分析结果对象 ccdi_account_result
*
* @author ruoyi
* @date 2026-04-13
*/
@Data
@TableName("ccdi_account_result")
public class CcdiAccountResult implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
/** 主键ID */
@TableId(value = "result_id", type = IdType.AUTO)
private Long resultId;
/** 账户号码 */
private String accountNo;
/** 是否实控账户0-否 1-是 */
@TableField("is_self_account")
private Integer isActualControl;
/** 月均交易笔数 */
@TableField("monthly_avg_trans_count")
private Integer avgMonthTxnCount;
/** 月均交易金额 */
@TableField("monthly_avg_trans_amount")
private BigDecimal avgMonthTxnAmount;
/** 交易频率等级 */
@TableField("trans_freq_type")
private String txnFrequencyLevel;
/** 借方单笔最高额 */
@TableField("dr_max_single_amount")
private BigDecimal debitSingleMaxAmount;
/** 贷方单笔最高额 */
@TableField("cr_max_single_amount")
private BigDecimal creditSingleMaxAmount;
/** 借方日累计最高额 */
@TableField("dr_max_daily_amount")
private BigDecimal debitDailyMaxAmount;
/** 贷方日累计最高额 */
@TableField("cr_max_daily_amount")
private BigDecimal creditDailyMaxAmount;
/** 风险等级 */
@TableField("trans_risk_level")
private String txnRiskLevel;
/** 创建者 */
@TableField(fill = FieldFill.INSERT)
private String createBy;
/** 创建时间 */
@TableField(fill = FieldFill.INSERT)
private Date createTime;
/** 更新者 */
@TableField(fill = FieldFill.INSERT_UPDATE)
private String updateBy;
/** 更新时间 */
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date updateTime;
}

View File

@@ -1,13 +0,0 @@
package com.ruoyi.info.collection.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.ruoyi.info.collection.domain.CcdiAccountResult;
/**
* 账户分析结果数据层
*
* @author ruoyi
* @date 2026-04-13
*/
public interface CcdiAccountResultMapper extends BaseMapper<CcdiAccountResult> {
}

View File

@@ -4,7 +4,6 @@ import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.ruoyi.common.utils.StringUtils;
import com.ruoyi.info.collection.domain.CcdiAccountInfo;
import com.ruoyi.info.collection.domain.CcdiAccountResult;
import com.ruoyi.info.collection.domain.CcdiBaseStaff;
import com.ruoyi.info.collection.domain.CcdiStaffFmyRelation;
import com.ruoyi.info.collection.domain.dto.CcdiAccountInfoAddDTO;
@@ -16,7 +15,6 @@ import com.ruoyi.info.collection.domain.vo.CcdiAccountInfoVO;
import com.ruoyi.info.collection.domain.vo.CcdiAccountRelationOptionVO;
import com.ruoyi.info.collection.domain.vo.ImportResult;
import com.ruoyi.info.collection.mapper.CcdiAccountInfoMapper;
import com.ruoyi.info.collection.mapper.CcdiAccountResultMapper;
import com.ruoyi.info.collection.mapper.CcdiBaseStaffMapper;
import com.ruoyi.info.collection.mapper.CcdiStaffFmyRelationMapper;
import com.ruoyi.info.collection.service.ICcdiAccountInfoService;
@@ -56,9 +54,6 @@ public class CcdiAccountInfoServiceImpl implements ICcdiAccountInfoService {
@Resource
private CcdiAccountInfoMapper accountInfoMapper;
@Resource
private CcdiAccountResultMapper accountResultMapper;
@Resource
private CcdiBaseStaffMapper baseStaffMapper;
@@ -87,9 +82,8 @@ public class CcdiAccountInfoServiceImpl implements ICcdiAccountInfoService {
CcdiAccountInfo accountInfo = new CcdiAccountInfo();
BeanUtils.copyProperties(addDTO, accountInfo);
int result = accountInfoMapper.insert(accountInfo);
syncAccountResult(accountInfo.getBankScope(), null, accountInfo.getAccountNo(), addDTO);
return result;
prepareAnalysisFields(accountInfo);
return accountInfoMapper.insert(accountInfo);
}
@Override
@@ -110,26 +104,13 @@ public class CcdiAccountInfoServiceImpl implements ICcdiAccountInfoService {
CcdiAccountInfo accountInfo = new CcdiAccountInfo();
BeanUtils.copyProperties(editDTO, accountInfo);
int result = accountInfoMapper.updateById(accountInfo);
syncAccountResult(accountInfo.getBankScope(), existing, accountInfo.getAccountNo(), editDTO);
return result;
prepareAnalysisFields(accountInfo);
return accountInfoMapper.updateById(accountInfo);
}
@Override
@Transactional
public int deleteAccountInfoByIds(Long[] ids) {
List<CcdiAccountInfo> accountList = accountInfoMapper.selectBatchIds(Arrays.asList(ids));
if (!accountList.isEmpty()) {
List<String> accountNos = accountList.stream()
.map(CcdiAccountInfo::getAccountNo)
.filter(StringUtils::isNotEmpty)
.toList();
if (!accountNos.isEmpty()) {
LambdaQueryWrapper<CcdiAccountResult> resultWrapper = new LambdaQueryWrapper<>();
resultWrapper.in(CcdiAccountResult::getAccountNo, accountNos);
accountResultMapper.delete(resultWrapper);
}
}
return accountInfoMapper.deleteBatchIds(Arrays.asList(ids));
}
@@ -250,51 +231,38 @@ public class CcdiAccountInfoServiceImpl implements ICcdiAccountInfoService {
}
}
private void syncAccountResult(String newBankScope, CcdiAccountInfo existing, String accountNo, Object dto) {
String oldBankScope = existing == null ? null : existing.getBankScope();
String oldAccountNo = existing == null ? null : existing.getAccountNo();
if (existing != null && "EXTERNAL".equals(oldBankScope)
&& (!"EXTERNAL".equals(newBankScope) || !StringUtils.equals(oldAccountNo, accountNo))) {
LambdaQueryWrapper<CcdiAccountResult> deleteWrapper = new LambdaQueryWrapper<>();
deleteWrapper.eq(CcdiAccountResult::getAccountNo, oldAccountNo);
accountResultMapper.delete(deleteWrapper);
}
if (!"EXTERNAL".equals(newBankScope)) {
private void prepareAnalysisFields(CcdiAccountInfo accountInfo) {
if (!"EXTERNAL".equals(accountInfo.getBankScope())) {
clearAnalysisFields(accountInfo);
return;
}
if (accountInfo.getIsActualControl() == null) {
accountInfo.setIsActualControl(1);
}
if (accountInfo.getAvgMonthTxnCount() == null) {
accountInfo.setAvgMonthTxnCount(0);
}
if (accountInfo.getAvgMonthTxnAmount() == null) {
accountInfo.setAvgMonthTxnAmount(BigDecimal.ZERO);
}
if (StringUtils.isEmpty(accountInfo.getTxnFrequencyLevel())) {
accountInfo.setTxnFrequencyLevel("MEDIUM");
}
if (StringUtils.isEmpty(accountInfo.getTxnRiskLevel())) {
accountInfo.setTxnRiskLevel("LOW");
}
}
LambdaQueryWrapper<CcdiAccountResult> wrapper = new LambdaQueryWrapper<>();
wrapper.eq(CcdiAccountResult::getAccountNo, accountNo);
CcdiAccountResult existingResult = accountResultMapper.selectOne(wrapper);
CcdiAccountResult accountResult = new CcdiAccountResult();
BeanUtils.copyProperties(dto, accountResult);
accountResult.setAccountNo(accountNo);
if (accountResult.getIsActualControl() == null) {
accountResult.setIsActualControl(1);
}
if (accountResult.getAvgMonthTxnCount() == null) {
accountResult.setAvgMonthTxnCount(0);
}
if (accountResult.getAvgMonthTxnAmount() == null) {
accountResult.setAvgMonthTxnAmount(BigDecimal.ZERO);
}
if (StringUtils.isEmpty(accountResult.getTxnFrequencyLevel())) {
accountResult.setTxnFrequencyLevel("MEDIUM");
}
if (StringUtils.isEmpty(accountResult.getTxnRiskLevel())) {
accountResult.setTxnRiskLevel("LOW");
}
if (existingResult == null) {
accountResultMapper.insert(accountResult);
return;
}
accountResult.setResultId(existingResult.getResultId());
accountResultMapper.updateById(accountResult);
private void clearAnalysisFields(CcdiAccountInfo accountInfo) {
accountInfo.setIsActualControl(null);
accountInfo.setAvgMonthTxnCount(null);
accountInfo.setAvgMonthTxnAmount(null);
accountInfo.setTxnFrequencyLevel(null);
accountInfo.setDebitSingleMaxAmount(null);
accountInfo.setCreditSingleMaxAmount(null);
accountInfo.setDebitDailyMaxAmount(null);
accountInfo.setCreditDailyMaxAmount(null);
accountInfo.setTxnRiskLevel(null);
}
private void validateAmount(BigDecimal amount, String fieldLabel) {

View File

@@ -67,15 +67,15 @@
ai.status AS status,
ai.effective_date AS effectiveDate,
ai.invalid_date AS invalidDate,
ar.is_self_account AS isActualControl,
ar.monthly_avg_trans_count AS avgMonthTxnCount,
ar.monthly_avg_trans_amount AS avgMonthTxnAmount,
ar.trans_freq_type AS txnFrequencyLevel,
ar.dr_max_single_amount AS debitSingleMaxAmount,
ar.cr_max_single_amount AS creditSingleMaxAmount,
ar.dr_max_daily_amount AS debitDailyMaxAmount,
ar.cr_max_daily_amount AS creditDailyMaxAmount,
ar.trans_risk_level AS txnRiskLevel,
ai.is_self_account AS isActualControl,
ai.monthly_avg_trans_count AS avgMonthTxnCount,
ai.monthly_avg_trans_amount AS avgMonthTxnAmount,
ai.trans_freq_type AS txnFrequencyLevel,
ai.dr_max_single_amount AS debitSingleMaxAmount,
ai.cr_max_single_amount AS creditSingleMaxAmount,
ai.dr_max_daily_amount AS debitDailyMaxAmount,
ai.cr_max_daily_amount AS creditDailyMaxAmount,
ai.trans_risk_level AS txnRiskLevel,
ai.create_by AS createBy,
ai.create_time AS createTime,
ai.update_by AS updateBy,
@@ -107,10 +107,10 @@
AND ai.account_type = #{query.accountType}
</if>
<if test="query.isActualControl != null">
AND ar.is_self_account = #{query.isActualControl}
AND ai.is_self_account = #{query.isActualControl}
</if>
<if test="query.riskLevel != null and query.riskLevel != ''">
AND ar.trans_risk_level = #{query.riskLevel}
AND ai.trans_risk_level = #{query.riskLevel}
</if>
<if test="query.status != null">
AND ai.status = #{query.status}
@@ -121,7 +121,6 @@
SELECT
<include refid="AccountInfoSelectColumns"/>
FROM ccdi_account_info ai
LEFT JOIN ccdi_account_result ar ON ai.account_no = ar.account_no
LEFT JOIN ccdi_base_staff bs ON ai.owner_type = 'EMPLOYEE' AND ai.owner_id = bs.id_card
LEFT JOIN ccdi_staff_fmy_relation fr ON ai.owner_type = 'RELATION' AND ai.owner_id = fr.relation_cert_no
LEFT JOIN ccdi_base_staff bsRel ON fr.person_id = bsRel.id_card
@@ -133,7 +132,6 @@
SELECT
<include refid="AccountInfoSelectColumns"/>
FROM ccdi_account_info ai
LEFT JOIN ccdi_account_result ar ON ai.account_no = ar.account_no
LEFT JOIN ccdi_base_staff bs ON ai.owner_type = 'EMPLOYEE' AND ai.owner_id = bs.id_card
LEFT JOIN ccdi_staff_fmy_relation fr ON ai.owner_type = 'RELATION' AND ai.owner_id = fr.relation_cert_no
LEFT JOIN ccdi_base_staff bsRel ON fr.person_id = bsRel.id_card
@@ -145,7 +143,6 @@
SELECT
<include refid="AccountInfoSelectColumns"/>
FROM ccdi_account_info ai
LEFT JOIN ccdi_account_result ar ON ai.account_no = ar.account_no
LEFT JOIN ccdi_base_staff bs ON ai.owner_type = 'EMPLOYEE' AND ai.owner_id = bs.id_card
LEFT JOIN ccdi_staff_fmy_relation fr ON ai.owner_type = 'RELATION' AND ai.owner_id = fr.relation_cert_no
LEFT JOIN ccdi_base_staff bsRel ON fr.person_id = bsRel.id_card

View File

@@ -0,0 +1,109 @@
package com.ruoyi.info.collection.mapper;
import com.ruoyi.info.collection.domain.dto.CcdiAccountInfoQueryDTO;
import org.apache.ibatis.builder.xml.XMLMapperBuilder;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.mapping.Environment;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.scripting.xmltags.XMLLanguageDriver;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;
import org.apache.ibatis.type.TypeAliasRegistry;
import org.junit.jupiter.api.Test;
import javax.sql.DataSource;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
class CcdiAccountInfoMapperTest {
private static final String RESOURCE = "mapper/info/collection/CcdiAccountInfoMapper.xml";
@Test
void selectAccountInfoPage_shouldReadAnalysisColumnsFromAccountInfoTableOnly() throws Exception {
MappedStatement mappedStatement = loadMappedStatement(
"com.ruoyi.info.collection.mapper.CcdiAccountInfoMapper.selectAccountInfoPage");
String sql = renderSql(mappedStatement, Map.of("query", new CcdiAccountInfoQueryDTO())).toLowerCase();
assertTrue(sql.contains("from ccdi_account_info ai"), sql);
assertFalse(sql.contains("ccdi_account_result"), sql);
assertTrue(sql.contains("ai.is_self_account as isactualcontrol"), sql);
assertTrue(sql.contains("ai.monthly_avg_trans_count as avgmonthtxncount"), sql);
assertTrue(sql.contains("ai.trans_risk_level as txnrisklevel"), sql);
}
private MappedStatement loadMappedStatement(String statementId) throws Exception {
Configuration configuration = new Configuration();
configuration.setEnvironment(new Environment("test", new JdbcTransactionFactory(), new NoOpDataSource()));
registerTypeAliases(configuration.getTypeAliasRegistry());
configuration.getLanguageRegistry().register(XMLLanguageDriver.class);
configuration.addMapper(CcdiAccountInfoMapper.class);
try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream(RESOURCE)) {
XMLMapperBuilder xmlMapperBuilder =
new XMLMapperBuilder(inputStream, configuration, RESOURCE, configuration.getSqlFragments());
xmlMapperBuilder.parse();
}
return configuration.getMappedStatement(statementId);
}
private String renderSql(MappedStatement mappedStatement, Map<String, Object> params) {
BoundSql boundSql = mappedStatement.getBoundSql(new HashMap<>(params));
return boundSql.getSql().replaceAll("\\s+", " ").trim();
}
private void registerTypeAliases(TypeAliasRegistry typeAliasRegistry) {
typeAliasRegistry.registerAlias("map", Map.class);
}
private static class NoOpDataSource implements DataSource {
@Override
public java.sql.Connection getConnection() {
throw new UnsupportedOperationException("Not required for SQL rendering tests");
}
@Override
public java.sql.Connection getConnection(String username, String password) {
throw new UnsupportedOperationException("Not required for SQL rendering tests");
}
@Override
public java.io.PrintWriter getLogWriter() {
return null;
}
@Override
public void setLogWriter(java.io.PrintWriter out) {
}
@Override
public void setLoginTimeout(int seconds) {
}
@Override
public int getLoginTimeout() {
return 0;
}
@Override
public java.util.logging.Logger getParentLogger() {
return java.util.logging.Logger.getGlobal();
}
@Override
public <T> T unwrap(Class<T> iface) {
throw new UnsupportedOperationException("Not supported");
}
@Override
public boolean isWrapperFor(Class<?> iface) {
return false;
}
}
}

View File

@@ -0,0 +1,124 @@
package com.ruoyi.info.collection.service;
import com.ruoyi.info.collection.domain.CcdiAccountInfo;
import com.ruoyi.info.collection.domain.CcdiBaseStaff;
import com.ruoyi.info.collection.domain.dto.CcdiAccountInfoAddDTO;
import com.ruoyi.info.collection.mapper.CcdiAccountInfoMapper;
import com.ruoyi.info.collection.mapper.CcdiBaseStaffMapper;
import com.ruoyi.info.collection.mapper.CcdiStaffFmyRelationMapper;
import com.ruoyi.info.collection.service.impl.CcdiAccountInfoServiceImpl;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.beans.BeanWrapperImpl;
import java.math.BigDecimal;
import java.util.Date;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class CcdiAccountInfoServiceImplTest {
@InjectMocks
private CcdiAccountInfoServiceImpl service;
@Mock
private CcdiAccountInfoMapper accountInfoMapper;
@Mock
private CcdiBaseStaffMapper baseStaffMapper;
@Mock
private CcdiStaffFmyRelationMapper staffFmyRelationMapper;
@Test
void insertExternalAccount_shouldPersistAnalysisFieldsOnAccountInfo() {
CcdiAccountInfoAddDTO dto = buildBaseAddDto();
dto.setOwnerType("EXTERNAL");
dto.setOwnerId("330101199001010011");
dto.setBankScope("EXTERNAL");
dto.setIsActualControl(0);
dto.setAvgMonthTxnCount(6);
dto.setAvgMonthTxnAmount(new BigDecimal("1234.56"));
dto.setTxnFrequencyLevel("HIGH");
dto.setDebitSingleMaxAmount(new BigDecimal("100.00"));
dto.setCreditSingleMaxAmount(new BigDecimal("200.00"));
dto.setDebitDailyMaxAmount(new BigDecimal("300.00"));
dto.setCreditDailyMaxAmount(new BigDecimal("400.00"));
dto.setTxnRiskLevel("MEDIUM");
when(accountInfoMapper.selectCount(any())).thenReturn(0L);
when(accountInfoMapper.insert(any(CcdiAccountInfo.class))).thenReturn(1);
service.insertAccountInfo(dto);
ArgumentCaptor<CcdiAccountInfo> captor = ArgumentCaptor.forClass(CcdiAccountInfo.class);
verify(accountInfoMapper).insert(captor.capture());
BeanWrapperImpl wrapper = new BeanWrapperImpl(captor.getValue());
assertEquals(0, wrapper.getPropertyValue("isActualControl"));
assertEquals(6, wrapper.getPropertyValue("avgMonthTxnCount"));
assertEquals(new BigDecimal("1234.56"), wrapper.getPropertyValue("avgMonthTxnAmount"));
assertEquals("HIGH", wrapper.getPropertyValue("txnFrequencyLevel"));
assertEquals("MEDIUM", wrapper.getPropertyValue("txnRiskLevel"));
}
@Test
void insertInternalAccount_shouldClearAnalysisFieldsOnAccountInfo() {
CcdiAccountInfoAddDTO dto = buildBaseAddDto();
dto.setOwnerType("EMPLOYEE");
dto.setOwnerId("330101199001010022");
dto.setBankScope("INTERNAL");
dto.setIsActualControl(1);
dto.setAvgMonthTxnCount(8);
dto.setAvgMonthTxnAmount(new BigDecimal("9988.66"));
dto.setTxnFrequencyLevel("HIGH");
dto.setDebitSingleMaxAmount(new BigDecimal("111.11"));
dto.setCreditSingleMaxAmount(new BigDecimal("222.22"));
dto.setDebitDailyMaxAmount(new BigDecimal("333.33"));
dto.setCreditDailyMaxAmount(new BigDecimal("444.44"));
dto.setTxnRiskLevel("HIGH");
CcdiBaseStaff staff = new CcdiBaseStaff();
staff.setIdCard(dto.getOwnerId());
when(baseStaffMapper.selectOne(any())).thenReturn(staff);
when(accountInfoMapper.selectCount(any())).thenReturn(0L);
when(accountInfoMapper.insert(any(CcdiAccountInfo.class))).thenReturn(1);
service.insertAccountInfo(dto);
ArgumentCaptor<CcdiAccountInfo> captor = ArgumentCaptor.forClass(CcdiAccountInfo.class);
verify(accountInfoMapper).insert(captor.capture());
BeanWrapperImpl wrapper = new BeanWrapperImpl(captor.getValue());
assertNull(wrapper.getPropertyValue("isActualControl"));
assertNull(wrapper.getPropertyValue("avgMonthTxnCount"));
assertNull(wrapper.getPropertyValue("avgMonthTxnAmount"));
assertNull(wrapper.getPropertyValue("txnFrequencyLevel"));
assertNull(wrapper.getPropertyValue("debitSingleMaxAmount"));
assertNull(wrapper.getPropertyValue("creditSingleMaxAmount"));
assertNull(wrapper.getPropertyValue("debitDailyMaxAmount"));
assertNull(wrapper.getPropertyValue("creditDailyMaxAmount"));
assertNull(wrapper.getPropertyValue("txnRiskLevel"));
}
private CcdiAccountInfoAddDTO buildBaseAddDto() {
CcdiAccountInfoAddDTO dto = new CcdiAccountInfoAddDTO();
dto.setAccountNo("6222024000000001");
dto.setAccountType("BANK");
dto.setAccountName("测试账户");
dto.setOpenBank("中国银行");
dto.setBankCode("BOC");
dto.setCurrency("CNY");
dto.setStatus(1);
dto.setEffectiveDate(new Date());
return dto;
}
}

View File

@@ -1,90 +0,0 @@
package com.ruoyi.lsfx.client;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.ruoyi.lsfx.domain.response.CreditParseResponse;
import com.ruoyi.lsfx.exception.LsfxApiException;
import com.ruoyi.lsfx.util.HttpUtil;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.test.util.ReflectionTestUtils;
import java.io.File;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.argThat;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class CreditParseClientTest {
private final ObjectMapper objectMapper = new ObjectMapper();
@Mock
private HttpUtil httpUtil;
@InjectMocks
private CreditParseClient client;
@BeforeEach
void setUp() {
ReflectionTestUtils.setField(client, "creditParseUrl", "http://credit-host/xfeature-mngs/conversation/htmlEval");
}
@Test
void shouldDeserializeCreditParseResponse() throws Exception {
String json = """
{
"message": "成功",
"status_code": "0",
"payload": {
"lx_header": {"query_cert_no": "3301"},
"lx_debt": {"uncle_bank_house_bal": "12.00"},
"lx_publictype": {"civil_cnt": 1}
}
}
""";
CreditParseResponse response = objectMapper.readValue(json, CreditParseResponse.class);
assertEquals("0", response.getStatusCode());
assertEquals("3301", response.getPayload().getLxHeader().get("query_cert_no"));
}
@Test
void shouldCallConfiguredUrlWithMultipartParams() {
File file = new File("sample.html");
CreditParseResponse response = new CreditParseResponse();
response.setStatusCode("0");
when(httpUtil.uploadFile(eq("http://credit-host/xfeature-mngs/conversation/htmlEval"), anyMap(), isNull(), eq(CreditParseResponse.class)))
.thenReturn(response);
CreditParseResponse actual = client.parse("LXCUSTALL", "PERSON", file);
assertEquals("0", actual.getStatusCode());
verify(httpUtil).uploadFile(eq("http://credit-host/xfeature-mngs/conversation/htmlEval"), argThat(params ->
"LXCUSTALL".equals(params.get("model"))
&& "PERSON".equals(params.get("hType"))
&& file.equals(params.get("file"))
), isNull(), eq(CreditParseResponse.class));
}
@Test
void shouldWrapHttpErrorsAsLsfxApiException() {
when(httpUtil.uploadFile(anyString(), anyMap(), isNull(), eq(CreditParseResponse.class)))
.thenThrow(new LsfxApiException("网络失败"));
assertThrows(LsfxApiException.class,
() -> client.parse("LXCUSTALL", "PERSON", new File("sample.html")));
}
}

View File

@@ -0,0 +1,33 @@
package com.ruoyi.ccdi.project.sql;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertTrue;
class CcdiAccountInfoMergeSqlTest {
@Test
void accountInfoMergeSql_shouldAddColumnsMigrateDataAndDropLegacyTable() throws IOException {
Path path = Path.of("..", "sql", "migration",
"2026-04-16-merge-ccdi-account-result-into-info.sql");
assertTrue(Files.exists(path), "账户库合表迁移脚本应存在");
String sql = Files.readString(path, StandardCharsets.UTF_8).toLowerCase();
assertAll(
() -> assertTrue(sql.contains("bin/mysql_utf8_exec.sh")),
() -> assertTrue(sql.contains("ccdi_account_info")),
() -> assertTrue(sql.contains("add column `is_self_account`")),
() -> assertTrue(sql.contains("monthly_avg_trans_count")),
() -> assertTrue(sql.contains("update `ccdi_account_info` ai")),
() -> assertTrue(sql.contains("join `ccdi_account_result` ar")),
() -> assertTrue(sql.contains("drop table `ccdi_account_result`"))
);
}
}

101
deploy/deploy-to-nas-tongweb.sh Executable file
View File

@@ -0,0 +1,101 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
SERVER_HOST="116.62.17.81"
SERVER_PORT="9444"
SERVER_USERNAME="wkc"
SERVER_PASSWORD="wkc@0825"
REMOTE_ROOT="/volume1/webapp/ccdi"
TONGWEB_HOME="${TONGWEB_HOME:-/opt/TongWeb}"
APP_NAME="${APP_NAME:-ruoyi-admin}"
DRY_RUN="false"
ensure_command() {
local command_name="$1"
if ! command -v "${command_name}" >/dev/null 2>&1; then
echo "缺少命令: ${command_name}" >&2
exit 1
fi
}
ensure_paramiko() {
if python3 - <<'PY'
import importlib.util
import sys
sys.exit(0 if importlib.util.find_spec("paramiko") else 1)
PY
then
return
fi
python3 -m pip install --user paramiko
}
POSITION=0
for arg in "$@"; do
if [[ "${arg}" == "--dry-run" ]]; then
DRY_RUN="true"
continue
fi
POSITION=$((POSITION + 1))
case "${POSITION}" in
1) SERVER_HOST="${arg}" ;;
2) SERVER_PORT="${arg}" ;;
3) SERVER_USERNAME="${arg}" ;;
4) SERVER_PASSWORD="${arg}" ;;
5) REMOTE_ROOT="${arg}" ;;
6) TONGWEB_HOME="${arg}" ;;
7) APP_NAME="${arg}" ;;
*)
echo "仅支持 [host] [port] [username] [password] [remoteRoot] [tongwebHome] [appName] [--dry-run]" >&2
exit 1
;;
esac
done
if [[ "${DRY_RUN}" == "true" ]]; then
echo "[DryRun] TongWeb NAS 部署参数预览"
echo "Host: ${SERVER_HOST}"
echo "Port: ${SERVER_PORT}"
echo "Username: ${SERVER_USERNAME}"
echo "RemoteRoot: ${REMOTE_ROOT}"
echo "TongWebHome: ${TONGWEB_HOME}"
echo "AppName: ${APP_NAME}"
exit 0
fi
echo "[1/4] 检查本地环境"
ensure_command "mvn"
ensure_command "python3"
echo "[2/4] 打包后端 war"
(
cd "${REPO_ROOT}"
mvn -pl ruoyi-admin -am package -DskipTests
)
WAR_PATH="${REPO_ROOT}/ruoyi-admin/target/ruoyi-admin.war"
if [[ ! -f "${WAR_PATH}" ]]; then
echo "未找到后端 war 包: ${WAR_PATH}" >&2
exit 1
fi
echo "[3/4] 检查远端执行依赖"
ensure_paramiko
echo "[4/4] 上传 war 并重启 TongWeb"
python3 "${SCRIPT_DIR}/remote-deploy-tongweb.py" \
--host "${SERVER_HOST}" \
--port "${SERVER_PORT}" \
--username "${SERVER_USERNAME}" \
--password "${SERVER_PASSWORD}" \
--local-war "${WAR_PATH}" \
--remote-root "${REMOTE_ROOT}" \
--tongweb-home "${TONGWEB_HOME}" \
--app-name "${APP_NAME}"

View File

@@ -0,0 +1,136 @@
import argparse
import posixpath
import shlex
import sys
from pathlib import Path
import paramiko
def parse_args():
parser = argparse.ArgumentParser(description="Upload backend war to NAS and restart TongWeb.")
parser.add_argument("--host", required=True)
parser.add_argument("--port", type=int, required=True)
parser.add_argument("--username", required=True)
parser.add_argument("--password", required=True)
parser.add_argument("--local-war", required=True)
parser.add_argument("--remote-root", required=True)
parser.add_argument("--tongweb-home", required=True)
parser.add_argument("--app-name", required=True)
return parser.parse_args()
def run_command(ssh, command):
stdin, stdout, stderr = ssh.exec_command(command)
exit_code = stdout.channel.recv_exit_status()
output = stdout.read().decode("utf-8", errors="ignore")
error = stderr.read().decode("utf-8", errors="ignore")
return exit_code, output, error
def sudo_prefix(password):
return f"printf '%s\\n' {shlex.quote(password)} | sudo -S -p '' "
def detect_command_prefix(ssh, password, command):
plain_exit_code, _, _ = run_command(ssh, f"{command} >/dev/null 2>&1")
if plain_exit_code == 0:
return ""
sudo_probe = f"{sudo_prefix(password)}{command} >/dev/null 2>&1"
sudo_exit_code, _, _ = run_command(ssh, sudo_probe)
if sudo_exit_code == 0:
return sudo_prefix(password)
raise RuntimeError(f"Remote command is not accessible: {command}")
def ensure_remote_path(ssh, prefix, remote_path):
command = f"{prefix}mkdir -p {shlex.quote(remote_path)}"
exit_code, output, error = run_command(ssh, command)
if exit_code != 0:
raise RuntimeError(f"Failed to create remote directory {remote_path}:\n{output}\n{error}")
def upload_file(sftp, local_file, remote_file):
parent_dir = posixpath.dirname(remote_file)
try:
sftp.listdir(parent_dir)
except OSError:
raise RuntimeError(f"SFTP remote directory not found: {parent_dir}")
sftp.put(str(local_file), remote_file)
def build_deploy_command(args, prefix):
app_war_name = f"{args.app_name}.war"
remote_war_path = posixpath.join(args.remote_root.rstrip("/"), "backend", app_war_name)
autodeploy_dir = posixpath.join(args.tongweb_home.rstrip("/"), "autodeploy")
deployed_war_path = posixpath.join(autodeploy_dir, app_war_name)
deployed_dir_path = posixpath.join(autodeploy_dir, args.app_name)
stop_script = posixpath.join(args.tongweb_home.rstrip("/"), "bin", "stopserver.sh")
start_script = posixpath.join(args.tongweb_home.rstrip("/"), "bin", "startservernohup.sh")
return (
"set -e;"
f"test -d {shlex.quote(args.tongweb_home)};"
f"test -x {shlex.quote(stop_script)};"
f"test -x {shlex.quote(start_script)};"
f"{prefix}mkdir -p {shlex.quote(autodeploy_dir)};"
f"{prefix}sh {shlex.quote(stop_script)} >/dev/null 2>&1 || true;"
f"{prefix}rm -rf {shlex.quote(deployed_dir_path)};"
f"{prefix}rm -f {shlex.quote(deployed_war_path)};"
f"{prefix}cp {shlex.quote(remote_war_path)} {shlex.quote(deployed_war_path)};"
f"{prefix}sh {shlex.quote(start_script)};"
"sleep 5;"
f"ls -l {shlex.quote(autodeploy_dir)};"
)
def main():
args = parse_args()
local_war = Path(args.local_war).resolve()
if not local_war.exists():
raise FileNotFoundError(f"Local war does not exist: {local_war}")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
hostname=args.host,
port=args.port,
username=args.username,
password=args.password,
timeout=20,
)
sftp = ssh.open_sftp()
try:
remote_root = args.remote_root.rstrip("/")
remote_backend_dir = posixpath.join(remote_root, "backend")
remote_war_path = posixpath.join(remote_backend_dir, f"{args.app_name}.war")
ensure_remote_path(ssh, "", remote_root)
ensure_remote_path(ssh, "", remote_backend_dir)
upload_file(sftp, local_war, remote_war_path)
command_prefix = detect_command_prefix(ssh, args.password, f"test -d {shlex.quote(args.tongweb_home)}")
deploy_command = build_deploy_command(args, command_prefix)
exit_code, output, error = run_command(ssh, deploy_command)
if exit_code != 0:
raise RuntimeError(f"Remote TongWeb deploy failed:\n{output}\n{error}")
print("=== DEPLOY OUTPUT ===")
print(output.strip())
if error.strip():
print("=== DEPLOY STDERR ===")
print(error.strip())
finally:
sftp.close()
ssh.close()
if __name__ == "__main__":
try:
main()
except Exception as exc:
print(str(exc), file=sys.stderr)
sys.exit(1)

View File

@@ -0,0 +1,94 @@
# 2026-04-14 后端运行与打包约定实施记录
## 1. 改动目标
- 固化本地后端继续走 `ruoyi-admin.jar + 内嵌 Tomcat` 启动链路
- 固化 `mvn -pl ruoyi-admin -am package -DskipTests` 同时产出 `jar``war`
- 固化部署脚本统一消费 `ruoyi-admin.war`
- 固化 `bin/restart_java_backend.sh` 默认跟随后端日志,并支持 `FOLLOW_LOGS=false`
## 2. 实施内容
### 2.1 Maven 打包链路
涉及文件:
- `ruoyi-admin/pom.xml`
实施内容:
- 保持 `ruoyi-admin``<packaging>jar</packaging>` 不变,确保本地运行仍使用可执行 `jar`
-`maven-war-plugin` 增加 `package` 阶段显式执行 `war` 目标,确保执行 `mvn -pl ruoyi-admin -am package -DskipTests` 时额外生成 `ruoyi-admin.war`
- 保留 `spring-boot-maven-plugin repackage`,继续生成可执行 `ruoyi-admin.jar`
### 2.2 本地后端重启脚本
涉及文件:
- `bin/restart_java_backend.sh`
实施内容:
- 新增 `FOLLOW_LOGS="${FOLLOW_LOGS:-true}"` 默认开关
- `start``restart` 成功后默认执行 `tail -F` 持续输出后端日志
- 当外部传入 `FOLLOW_LOGS=false` 时,仅启动后端,不进入日志跟随
### 2.3 部署产物切换
涉及文件:
- `deploy/deploy-to-nas.sh`
- `deploy/deploy.ps1`
- `docker/backend/Dockerfile`
实施内容:
- 部署目录组装时由复制 `ruoyi-admin.jar` 改为复制 `ruoyi-admin.war`
- Docker 后端镜像改为消费 `ruoyi-admin.war`
- 保证部署脚本不再把 `ruoyi-admin.jar` 当作生产部署产物
### 2.4 项目约定同步
涉及文件:
- `AGENTS.md`
实施内容:
- 补充本地运行、双产物打包、部署使用 `war``FOLLOW_LOGS` 开关等仓库级约定
- 在 Build / Run / Test Commands 中补充主应用定向打包命令
## 3. 验证记录
### 3.1 脚本检查
执行:
```bash
sh docs/tests/scripts/test-restart-java-backend.sh
sh docs/tests/scripts/test-backend-package-and-deploy-conventions.sh
```
结果:
- 两个脚本均通过
### 3.2 Maven 双产物验证
执行:
```bash
mvn -pl ruoyi-admin -am package -DskipTests
```
结果:
- 构建成功
- 生成 `ruoyi-admin/target/ruoyi-admin.jar`
- 生成 `ruoyi-admin/target/ruoyi-admin.war`
## 4. 结论
- 本地开发链路继续保持 `jar + 内嵌 Tomcat`
- 部署链路统一切换为 `war`
- 后端重启脚本默认跟日志,且支持显式关闭

View File

@@ -0,0 +1,62 @@
# 2026-04-14 NAS TongWeb 部署脚本实施记录
## 1. 目标
- 新增一套独立于 Docker 的 NAS 部署脚本
- 部署链路固定使用 `ruoyi-admin.war`
- 远端通过 `TongWeb` 自动部署目录发布应用,并使用 `stopserver.sh` / `startservernohup.sh` 重启服务
## 2. 实施内容
### 2.1 新增 TongWeb NAS 部署入口
涉及文件:
- `deploy/deploy-to-nas-tongweb.sh`
实施内容:
- 提供与现有 NAS 脚本一致的 SSH 连接参数风格
- 默认执行 `mvn -pl ruoyi-admin -am package -DskipTests`
- 本地仅校验并上传 `ruoyi-admin/target/ruoyi-admin.war`
- 支持 `--dry-run` 预览参数
### 2.2 新增 TongWeb 远端执行器
涉及文件:
- `deploy/remote-deploy-tongweb.py`
实施内容:
- 通过 SFTP 将 `war` 上传到 NAS 临时目录 `${remoteRoot}/backend/`
- 远端复制 `war``${TONGWEB_HOME}/autodeploy/${appName}.war`
- 清理 `${TONGWEB_HOME}/autodeploy/${appName}` 旧解压目录
- 依次执行 `stopserver.sh``startservernohup.sh`
### 2.3 新增脚本回归测试
涉及文件:
- `tests/deploy/test_deploy_to_nas_tongweb.py`
实施内容:
- 覆盖默认参数 `dry-run`
- 覆盖自定义参数 `dry-run`
- 校验部署入口已调用 `remote-deploy-tongweb.py`
- 校验远端执行器包含 `autodeploy``stopserver.sh``startservernohup.sh`
## 3. 验证命令
```bash
python3 -m pytest tests/deploy/test_deploy_to_nas_tongweb.py -q
bash -n deploy/deploy-to-nas-tongweb.sh
bash deploy/deploy-to-nas-tongweb.sh --dry-run
```
## 4. 说明
- 默认 `TongWebHome``/opt/TongWeb`,可通过第 6 个位置参数或环境变量 `TONGWEB_HOME` 覆盖
- 默认应用名为 `ruoyi-admin`,可通过第 7 个位置参数或环境变量 `APP_NAME` 覆盖
- 本次只新增 `TongWeb` 后端部署链路,不改动现有 Docker NAS 部署脚本

View File

@@ -0,0 +1,26 @@
# LSFX Mock Server `ccdi_account_info` 异常账户字段补迁移后端实施文档
## 背景
- `lsfx-mock-server` 上传接口 `/watson/api/project/remoteUploadSplitFile` 在写入 `ccdi_account_info` 时使用了 `is_self_account``trans_risk_level` 字段。
- 当前开发库中的 `ccdi_account_info` 为历史表结构,不包含这两列,导致 `AbnormalAccountBaselineService.apply(...)` 执行 upsert 时依次抛出 `Unknown column 'is_self_account' in 'field list'``Unknown column 'trans_risk_level' in 'field list'`,上传接口直接返回 500。
## 本次修改
- 新增增量脚本 `sql/migration/2026-04-15-sync-ccdi-account-info-abnormal-account-columns.sql`
- 脚本以最短路径为已有 `ccdi_account_info` 表补齐异常账户同步当前必需的字段,并保持可重复执行:
- 使用 `information_schema.columns` 判断字段是否已存在
- 通过 `PREPARE / EXECUTE` 仅在缺列时执行 `ALTER TABLE`
- 补齐 `is_self_account``trans_risk_level`
- 列位置与当前写库 SQL 保持一致
- 新增回归测试 `lsfx-mock-server/tests/test_schema_migration_scripts.py`,锁定该增量脚本必须存在且包含两条补列语句。
## 验证
- `python3 -m pytest /Users/wkc/Desktop/ccdi/ccdi/lsfx-mock-server/tests/test_schema_migration_scripts.py -q`
- 使用 `bin/mysql_utf8_exec.sh` 执行增量脚本后,复查 `SHOW COLUMNS FROM ccdi_account_info`,确认存在 `is_self_account``trans_risk_level` 字段。
## 影响范围
- 仅影响 `lsfx-mock-server` 依赖的 `ccdi_account_info` 历史表结构补齐。
- 不修改接口协议,不改动前端。

View File

@@ -0,0 +1,53 @@
# 账户库双表合单表后端实施计划
## 1. 目标
将账户库由 `ccdi_account_info` + `ccdi_account_result` 双表结构收敛为单表 `ccdi_account_info`,迁移完成后删除旧表,同时保持现有账户库接口、字段名和前端交互不变。
## 2. 实施范围
- 数据库增量迁移脚本
- 账户库后端实体、Mapper XML、服务层
- 外部场景种子脚本
- 账户库相关回归测试
本次不调整前端页面、接口路径和接口字段名。
## 3. 实施步骤
### 3.1 数据库迁移
1. 新增 `sql/migration/2026-04-16-merge-ccdi-account-result-into-info.sql`
2. 在脚本中先校验 `ccdi_account_info.account_no` 无重复
3.`ccdi_account_info` 补齐分析字段
4.`account_no``ccdi_account_result` 回填数据
5. 回填完成后删除 `ccdi_account_result`
### 3.2 后端代码调整
1. `CcdiAccountInfo` 实体吸收分析字段映射
2. 删除 `CcdiAccountResult` 实体与 `CcdiAccountResultMapper`
3. `CcdiAccountInfoMapper.xml` 去掉对 `ccdi_account_result` 的联表
4. `CcdiAccountInfoServiceImpl` 去掉结果表双写逻辑
5. 保持原有业务语义:
- `bankScope = EXTERNAL` 时补齐默认分析字段
- `bankScope != EXTERNAL` 时清空分析字段,避免误写
### 3.3 配套脚本与测试
1.`2026-04-13` 外部账户场景种子脚本改为单表写入
2. 新增 SQL 脚本文本断言测试
3. 新增账户库服务层与 Mapper SQL 结构测试
## 4. 验证要点
- 迁移脚本包含“补字段、回填、删旧表”三步
- 账户库列表/详情/导出查询均只读 `ccdi_account_info`
- 行外账户保存分析字段
- 行内账户清空分析字段
- 外部场景种子脚本不再写入 `ccdi_account_result`
## 5. 风险说明
- 仓库当前 `ccdi-info-collection` 模块存在既有依赖缺失问题,可能影响常规 Maven 全量编译与测试执行
- 本次需要将“账户库改动验证结果”和“仓库原有构建阻塞”分开记录

View File

@@ -0,0 +1,62 @@
# 账户库双表合单表实施记录
## 1. 本次实施内容
### 1.1 单表模型收敛
-`CcdiAccountInfo` 中补齐以下分析字段映射:
- `is_self_account`
- `monthly_avg_trans_count`
- `monthly_avg_trans_amount`
- `trans_freq_type`
- `dr_max_single_amount`
- `cr_max_single_amount`
- `dr_max_daily_amount`
- `cr_max_daily_amount`
- `trans_risk_level`
- 删除 `CcdiAccountResult` 实体与 `CcdiAccountResultMapper`
### 1.2 查询与写入逻辑调整
- `CcdiAccountInfoMapper.xml` 已移除 `ccdi_account_result` 联表
- 账户库列表、详情、导出统一从 `ccdi_account_info` 读取分析字段
- `CcdiAccountInfoServiceImpl` 已移除结果表双写逻辑
- 新增单表分析字段处理规则:
- 行外账户默认补齐分析字段缺省值
- 行内账户统一清空分析字段
### 1.3 数据迁移与种子脚本
- 新增增量脚本:
- `sql/migration/2026-04-16-merge-ccdi-account-result-into-info.sql`
- 更新外部场景种子脚本:
- `sql/migration/2026-04-13-seed-ccdi-account-info-external-scenarios.sql`
- 种子脚本已改为直接写入 `ccdi_account_info`,不再依赖旧表
### 1.4 测试补充
- 新增 `CcdiAccountInfoServiceImplTest`
- 新增 `CcdiAccountInfoMapperTest`
- 新增 `CcdiAccountInfoMergeSqlTest`
## 2. 验证记录
### 2.1 已完成验证
- `ccdi-project` 模块执行 `mvn -pl ccdi-project -DskipTests compile` 成功
- 文件级检查确认:
- 账户库主链路代码已无 `CcdiAccountResult` / `accountResultMapper` 引用
- `CcdiAccountInfoMapper.xml` 已无 `ccdi_account_result` 联表
- 新增迁移脚本包含补字段、按 `account_no` 回填、删除旧表逻辑
### 2.2 现存仓库阻塞
- `ccdi-info-collection` 模块常规编译失败,失败原因为仓库已有依赖/类缺失,与本次账户库改动不直接相关
- 典型阻塞包括:
- `com.ruoyi.common.annotation` 下若干注解类缺失
- 多个服务类依赖 `org.springframework.data.redis.core`,当前模块未解析
- 既有测试代码与当前依赖版本存在不一致
## 3. 结论
本次账户库已按方案完成“双表合单表”代码与 SQL 收敛,后续若要做完整 Maven 回归,需要先处理仓库当前已有的模块依赖与测试编译问题。

View File

@@ -0,0 +1,70 @@
#!/bin/sh
set -eu
ROOT_DIR=$(CDPATH= cd -- "$(dirname -- "$0")/../../.." && pwd)
POM_FILE="$ROOT_DIR/ruoyi-admin/pom.xml"
DEPLOY_SH="$ROOT_DIR/deploy/deploy-to-nas.sh"
DEPLOY_PS1="$ROOT_DIR/deploy/deploy.ps1"
DOCKERFILE="$ROOT_DIR/docker/backend/Dockerfile"
TARGET_DIR="$ROOT_DIR/ruoyi-admin/target"
echo "[检查] 后端打包必须同时产出 jar 与 war部署脚本只能使用 war"
if ! grep -Fq '<packaging>jar</packaging>' "$POM_FILE"; then
echo "失败: ruoyi-admin 仍需保持 jar 打包类型以支持本地内嵌 Tomcat 运行"
exit 1
fi
if ! grep -Fq '<goal>war</goal>' "$POM_FILE"; then
echo "失败: 未显式执行 war 打包目标"
exit 1
fi
if ! grep -Fq 'ruoyi-admin.war' "$DEPLOY_SH"; then
echo "失败: deploy-to-nas.sh 未改为使用 ruoyi-admin.war"
exit 1
fi
if grep -Fq 'ruoyi-admin.jar' "$DEPLOY_SH"; then
echo "失败: deploy-to-nas.sh 仍引用 ruoyi-admin.jar"
exit 1
fi
if ! grep -Fq 'ruoyi-admin.war' "$DEPLOY_PS1"; then
echo "失败: deploy.ps1 未改为使用 ruoyi-admin.war"
exit 1
fi
if grep -Fq 'ruoyi-admin.jar' "$DEPLOY_PS1"; then
echo "失败: deploy.ps1 仍引用 ruoyi-admin.jar"
exit 1
fi
if ! grep -Fq 'COPY backend/ruoyi-admin.war /app/ruoyi-admin.war' "$DOCKERFILE"; then
echo "失败: Dockerfile 未改为复制 ruoyi-admin.war"
exit 1
fi
if grep -Fq 'ruoyi-admin.jar' "$DOCKERFILE"; then
echo "失败: Dockerfile 仍引用 ruoyi-admin.jar"
exit 1
fi
echo "[检查] 执行 Maven 打包产物校验"
(
cd "$ROOT_DIR"
mvn -pl ruoyi-admin -am package -DskipTests
)
if [ ! -f "$TARGET_DIR/ruoyi-admin.jar" ]; then
echo "失败: 未生成 $TARGET_DIR/ruoyi-admin.jar"
exit 1
fi
if [ ! -f "$TARGET_DIR/ruoyi-admin.war" ]; then
echo "失败: 未生成 $TARGET_DIR/ruoyi-admin.war"
exit 1
fi
echo "通过"

View File

@@ -0,0 +1,19 @@
from pathlib import Path
def test_ccdi_account_info_should_have_incremental_migration_for_abnormal_account_columns():
project_root = Path(__file__).resolve().parents[2]
migration_path = (
project_root
/ "sql"
/ "migration"
/ "2026-04-15-sync-ccdi-account-info-abnormal-account-columns.sql"
)
assert migration_path.exists(), "缺少 ccdi_account_info 异常账户字段补迁移脚本"
sql = migration_path.read_text(encoding="utf-8")
assert "information_schema.columns" in sql
assert "ALTER TABLE `ccdi_account_info` ADD COLUMN `is_self_account`" in sql
assert "ALTER TABLE `ccdi_account_info` ADD COLUMN `trans_risk_level`" in sql

View File

@@ -11,6 +11,15 @@ INSERT INTO ccdi_account_info (
bank,
bank_code,
currency,
is_self_account,
monthly_avg_trans_count,
monthly_avg_trans_amount,
trans_freq_type,
dr_max_single_amount,
cr_max_single_amount,
dr_max_daily_amount,
cr_max_daily_amount,
trans_risk_level,
status,
effective_date,
invalid_date,
@@ -28,32 +37,6 @@ SELECT
'HZLH001',
'CNY',
1,
'2026-04-13',
NULL,
'system',
'system'
FROM dual
WHERE NOT EXISTS (
SELECT 1 FROM ccdi_account_info WHERE account_no = '622202440000010001'
);
INSERT INTO ccdi_account_result (
account_no,
is_self_account,
monthly_avg_trans_count,
monthly_avg_trans_amount,
trans_freq_type,
dr_max_single_amount,
cr_max_single_amount,
dr_max_daily_amount,
cr_max_daily_amount,
trans_risk_level,
create_by,
update_by
)
SELECT
'622202440000010001',
1,
12,
28600.00,
'MEDIUM',
@@ -62,11 +45,14 @@ SELECT
16000.00,
22000.00,
'MEDIUM',
1,
'2026-04-13',
NULL,
'system',
'system'
FROM dual
WHERE NOT EXISTS (
SELECT 1 FROM ccdi_account_result WHERE account_no = '622202440000010001'
SELECT 1 FROM ccdi_account_info WHERE account_no = '622202440000010001'
);
INSERT INTO ccdi_account_info (
@@ -79,6 +65,15 @@ INSERT INTO ccdi_account_info (
bank,
bank_code,
currency,
is_self_account,
monthly_avg_trans_count,
monthly_avg_trans_amount,
trans_freq_type,
dr_max_single_amount,
cr_max_single_amount,
dr_max_daily_amount,
cr_max_daily_amount,
trans_risk_level,
status,
effective_date,
invalid_date,
@@ -95,32 +90,6 @@ SELECT
'国泰君安杭州营业部',
'GTJAHZ01',
'CNY',
1,
'2026-04-13',
NULL,
'system',
'system'
FROM dual
WHERE NOT EXISTS (
SELECT 1 FROM ccdi_account_info WHERE account_no = 'ZQ330101199104010101'
);
INSERT INTO ccdi_account_result (
account_no,
is_self_account,
monthly_avg_trans_count,
monthly_avg_trans_amount,
trans_freq_type,
dr_max_single_amount,
cr_max_single_amount,
dr_max_daily_amount,
cr_max_daily_amount,
trans_risk_level,
create_by,
update_by
)
SELECT
'ZQ330101199104010101',
0,
6,
152000.00,
@@ -130,11 +99,14 @@ SELECT
98000.00,
116000.00,
'HIGH',
1,
'2026-04-13',
NULL,
'system',
'system'
FROM dual
WHERE NOT EXISTS (
SELECT 1 FROM ccdi_account_result WHERE account_no = 'ZQ330101199104010101'
SELECT 1 FROM ccdi_account_info WHERE account_no = 'ZQ330101199104010101'
);
INSERT INTO ccdi_account_info (
@@ -147,6 +119,15 @@ INSERT INTO ccdi_account_info (
bank,
bank_code,
currency,
is_self_account,
monthly_avg_trans_count,
monthly_avg_trans_amount,
trans_freq_type,
dr_max_single_amount,
cr_max_single_amount,
dr_max_daily_amount,
cr_max_daily_amount,
trans_risk_level,
status,
effective_date,
invalid_date,
@@ -163,32 +144,6 @@ SELECT
'支付宝',
'ALIPAY',
'CNY',
1,
'2026-04-13',
NULL,
'system',
'system'
FROM dual
WHERE NOT EXISTS (
SELECT 1 FROM ccdi_account_info WHERE account_no = '13700000035'
);
INSERT INTO ccdi_account_result (
account_no,
is_self_account,
monthly_avg_trans_count,
monthly_avg_trans_amount,
trans_freq_type,
dr_max_single_amount,
cr_max_single_amount,
dr_max_daily_amount,
cr_max_daily_amount,
trans_risk_level,
create_by,
update_by
)
SELECT
'13700000035',
0,
18,
46800.00,
@@ -198,11 +153,14 @@ SELECT
18800.00,
21600.00,
'LOW',
1,
'2026-04-13',
NULL,
'system',
'system'
FROM dual
WHERE NOT EXISTS (
SELECT 1 FROM ccdi_account_result WHERE account_no = '13700000035'
SELECT 1 FROM ccdi_account_info WHERE account_no = '13700000035'
);
INSERT INTO ccdi_account_info (
@@ -215,6 +173,15 @@ INSERT INTO ccdi_account_info (
bank,
bank_code,
currency,
is_self_account,
monthly_avg_trans_count,
monthly_avg_trans_amount,
trans_freq_type,
dr_max_single_amount,
cr_max_single_amount,
dr_max_daily_amount,
cr_max_daily_amount,
trans_risk_level,
status,
effective_date,
invalid_date,
@@ -231,32 +198,6 @@ SELECT
'微信支付',
'WXPAY',
'CNY',
1,
'2026-04-13',
NULL,
'system',
'system'
FROM dual
WHERE NOT EXISTS (
SELECT 1 FROM ccdi_account_info WHERE account_no = 'wx-ext-20260413-001'
);
INSERT INTO ccdi_account_result (
account_no,
is_self_account,
monthly_avg_trans_count,
monthly_avg_trans_amount,
trans_freq_type,
dr_max_single_amount,
cr_max_single_amount,
dr_max_daily_amount,
cr_max_daily_amount,
trans_risk_level,
create_by,
update_by
)
SELECT
'wx-ext-20260413-001',
0,
9,
9800.00,
@@ -266,9 +207,12 @@ SELECT
5600.00,
7000.00,
'LOW',
1,
'2026-04-13',
NULL,
'system',
'system'
FROM dual
WHERE NOT EXISTS (
SELECT 1 FROM ccdi_account_result WHERE account_no = 'wx-ext-20260413-001'
SELECT 1 FROM ccdi_account_info WHERE account_no = 'wx-ext-20260413-001'
);

View File

@@ -0,0 +1,31 @@
SET @ccdi_account_info_is_self_account_sql = IF(
EXISTS(
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'is_self_account'
),
'SELECT 1',
'ALTER TABLE `ccdi_account_info` ADD COLUMN `is_self_account` TINYINT NOT NULL DEFAULT 1 COMMENT ''是否本人账户'' AFTER `currency`'
);
PREPARE ccdi_account_info_is_self_account_stmt FROM @ccdi_account_info_is_self_account_sql;
EXECUTE ccdi_account_info_is_self_account_stmt;
DEALLOCATE PREPARE ccdi_account_info_is_self_account_stmt;
SET @ccdi_account_info_trans_risk_level_sql = IF(
EXISTS(
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'trans_risk_level'
),
'SELECT 1',
'ALTER TABLE `ccdi_account_info` ADD COLUMN `trans_risk_level` VARCHAR(32) DEFAULT NULL COMMENT ''交易风险等级'' AFTER `is_self_account`'
);
PREPARE ccdi_account_info_trans_risk_level_stmt FROM @ccdi_account_info_trans_risk_level_sql;
EXECUTE ccdi_account_info_trans_risk_level_stmt;
DEALLOCATE PREPARE ccdi_account_info_trans_risk_level_stmt;

View File

@@ -0,0 +1,156 @@
-- 合并账户库双表:将 ccdi_account_result 分析字段并回 ccdi_account_info并删除旧表。
-- 执行说明:涉及中文内容时请使用 bin/mysql_utf8_exec.sh 执行,确保会话字符集为 utf8mb4。
DELIMITER //
DROP PROCEDURE IF EXISTS `merge_ccdi_account_result_into_info`//
CREATE PROCEDURE `merge_ccdi_account_result_into_info`()
BEGIN
DECLARE duplicate_count INT DEFAULT 0;
DECLARE result_table_exists INT DEFAULT 0;
SELECT COUNT(*)
INTO duplicate_count
FROM (
SELECT account_no
FROM ccdi_account_info
WHERE account_no IS NOT NULL
AND account_no <> ''
GROUP BY account_no
HAVING COUNT(*) > 1
) duplicated_accounts;
IF duplicate_count > 0 THEN
SIGNAL SQLSTATE '45000'
SET MESSAGE_TEXT = 'ccdi_account_info.account_no 存在重复数据,禁止执行账户库合表迁移。';
END IF;
IF NOT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'is_self_account'
) THEN
ALTER TABLE `ccdi_account_info`
ADD COLUMN `is_self_account` TINYINT NOT NULL DEFAULT 1 COMMENT '是否本人账户' AFTER `currency`;
END IF;
IF NOT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'monthly_avg_trans_count'
) THEN
ALTER TABLE `ccdi_account_info`
ADD COLUMN `monthly_avg_trans_count` INT DEFAULT NULL COMMENT '月均交易笔数' AFTER `is_self_account`;
END IF;
IF NOT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'monthly_avg_trans_amount'
) THEN
ALTER TABLE `ccdi_account_info`
ADD COLUMN `monthly_avg_trans_amount` DECIMAL(18, 2) DEFAULT NULL COMMENT '月均交易金额' AFTER `monthly_avg_trans_count`;
END IF;
IF NOT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'trans_freq_type'
) THEN
ALTER TABLE `ccdi_account_info`
ADD COLUMN `trans_freq_type` VARCHAR(32) DEFAULT NULL COMMENT '交易频率类型' AFTER `monthly_avg_trans_amount`;
END IF;
IF NOT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'dr_max_single_amount'
) THEN
ALTER TABLE `ccdi_account_info`
ADD COLUMN `dr_max_single_amount` DECIMAL(18, 2) DEFAULT NULL COMMENT '最大单笔支出金额' AFTER `trans_freq_type`;
END IF;
IF NOT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'cr_max_single_amount'
) THEN
ALTER TABLE `ccdi_account_info`
ADD COLUMN `cr_max_single_amount` DECIMAL(18, 2) DEFAULT NULL COMMENT '最大单笔收入金额' AFTER `dr_max_single_amount`;
END IF;
IF NOT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'dr_max_daily_amount'
) THEN
ALTER TABLE `ccdi_account_info`
ADD COLUMN `dr_max_daily_amount` DECIMAL(18, 2) DEFAULT NULL COMMENT '最大单日支出金额' AFTER `cr_max_single_amount`;
END IF;
IF NOT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'cr_max_daily_amount'
) THEN
ALTER TABLE `ccdi_account_info`
ADD COLUMN `cr_max_daily_amount` DECIMAL(18, 2) DEFAULT NULL COMMENT '最大单日收入金额' AFTER `dr_max_daily_amount`;
END IF;
IF NOT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_info'
AND column_name = 'trans_risk_level'
) THEN
ALTER TABLE `ccdi_account_info`
ADD COLUMN `trans_risk_level` VARCHAR(32) DEFAULT NULL COMMENT '交易风险等级' AFTER `cr_max_daily_amount`;
END IF;
SELECT COUNT(*)
INTO result_table_exists
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name = 'ccdi_account_result';
IF result_table_exists > 0 THEN
UPDATE `ccdi_account_info` ai
JOIN `ccdi_account_result` ar
ON ai.account_no = ar.account_no
SET ai.is_self_account = ar.is_self_account,
ai.monthly_avg_trans_count = ar.monthly_avg_trans_count,
ai.monthly_avg_trans_amount = ar.monthly_avg_trans_amount,
ai.trans_freq_type = ar.trans_freq_type,
ai.dr_max_single_amount = ar.dr_max_single_amount,
ai.cr_max_single_amount = ar.cr_max_single_amount,
ai.dr_max_daily_amount = ar.dr_max_daily_amount,
ai.cr_max_daily_amount = ar.cr_max_daily_amount,
ai.trans_risk_level = ar.trans_risk_level,
ai.update_by = COALESCE(ar.update_by, ai.update_by),
ai.update_time = COALESCE(ar.update_time, ai.update_time);
DROP TABLE `ccdi_account_result`;
END IF;
END//
CALL `merge_ccdi_account_result_into_info`()//
DROP PROCEDURE IF EXISTS `merge_ccdi_account_result_into_info`//
DELIMITER ;

View File

@@ -0,0 +1,67 @@
from pathlib import Path
import subprocess
REPO_ROOT = Path(__file__).resolve().parents[2]
SCRIPT_PATH = REPO_ROOT / "deploy" / "deploy-to-nas-tongweb.sh"
REMOTE_DEPLOY_PATH = REPO_ROOT / "deploy" / "remote-deploy-tongweb.py"
def test_tongweb_sh_dry_run_uses_default_target():
result = subprocess.run(
["bash", str(SCRIPT_PATH), "--dry-run"],
cwd=REPO_ROOT,
capture_output=True,
text=True,
)
assert result.returncode == 0
assert "Host: 116.62.17.81" in result.stdout
assert "Port: 9444" in result.stdout
assert "Username: wkc" in result.stdout
assert "RemoteRoot: /volume1/webapp/ccdi" in result.stdout
assert "TongWebHome: /opt/TongWeb" in result.stdout
assert "AppName: ruoyi-admin" in result.stdout
def test_tongweb_sh_dry_run_accepts_override_arguments():
result = subprocess.run(
[
"bash",
str(SCRIPT_PATH),
"10.0.0.8",
"2222",
"deploy-user",
"secret",
"/volume2/custom/app",
"/data/TongWeb7",
"ccdi-console",
"--dry-run",
],
cwd=REPO_ROOT,
capture_output=True,
text=True,
)
assert result.returncode == 0
assert "Host: 10.0.0.8" in result.stdout
assert "Port: 2222" in result.stdout
assert "Username: deploy-user" in result.stdout
assert "RemoteRoot: /volume2/custom/app" in result.stdout
assert "TongWebHome: /data/TongWeb7" in result.stdout
assert "AppName: ccdi-console" in result.stdout
def test_tongweb_sh_script_should_call_remote_deploy_helper():
script_text = SCRIPT_PATH.read_text(encoding="utf-8")
assert 'remote-deploy-tongweb.py' in script_text
assert 'ruoyi-admin/target/ruoyi-admin.war' in script_text
def test_remote_deploy_tongweb_should_use_autodeploy_and_tongweb_scripts():
script_text = REMOTE_DEPLOY_PATH.read_text(encoding="utf-8")
assert "autodeploy" in script_text
assert "startservernohup.sh" in script_text
assert "stopserver.sh" in script_text