fix: 修复中介导入成功条数计算错误
问题: - 导入成功条数显示为负数 - 原因:成功数量计算使用 validRecords.size() - failures.size() - 但没有使用实际的数据库操作返回值 修复: - saveBatchWithUpsert 和 saveBatch 方法现在返回 int - 累加实际的数据库影响行数 - 使用 actualSuccessCount 变量跟踪真实成功数量 影响范围: - CcdiIntermediaryPersonImportServiceImpl - CcdiIntermediaryEntityImportServiceImpl
This commit is contained in:
191
doc/test-data/purchase_transaction/node_modules/exceljs/lib/csv/csv.js
generated
vendored
Normal file
191
doc/test-data/purchase_transaction/node_modules/exceljs/lib/csv/csv.js
generated
vendored
Normal file
@@ -0,0 +1,191 @@
|
||||
const fs = require('fs');
|
||||
const fastCsv = require('fast-csv');
|
||||
const customParseFormat = require('dayjs/plugin/customParseFormat');
|
||||
const utc = require('dayjs/plugin/utc');
|
||||
const dayjs = require('dayjs').extend(customParseFormat).extend(utc);
|
||||
const StreamBuf = require('../utils/stream-buf');
|
||||
|
||||
const {
|
||||
fs: {exists},
|
||||
} = require('../utils/utils');
|
||||
|
||||
/* eslint-disable quote-props */
|
||||
const SpecialValues = {
|
||||
true: true,
|
||||
false: false,
|
||||
'#N/A': {error: '#N/A'},
|
||||
'#REF!': {error: '#REF!'},
|
||||
'#NAME?': {error: '#NAME?'},
|
||||
'#DIV/0!': {error: '#DIV/0!'},
|
||||
'#NULL!': {error: '#NULL!'},
|
||||
'#VALUE!': {error: '#VALUE!'},
|
||||
'#NUM!': {error: '#NUM!'},
|
||||
};
|
||||
/* eslint-ensable quote-props */
|
||||
|
||||
class CSV {
|
||||
constructor(workbook) {
|
||||
this.workbook = workbook;
|
||||
this.worksheet = null;
|
||||
}
|
||||
|
||||
async readFile(filename, options) {
|
||||
options = options || {};
|
||||
if (!(await exists(filename))) {
|
||||
throw new Error(`File not found: ${filename}`);
|
||||
}
|
||||
const stream = fs.createReadStream(filename);
|
||||
const worksheet = await this.read(stream, options);
|
||||
stream.close();
|
||||
return worksheet;
|
||||
}
|
||||
|
||||
read(stream, options) {
|
||||
options = options || {};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const worksheet = this.workbook.addWorksheet(options.sheetName);
|
||||
|
||||
const dateFormats = options.dateFormats || [
|
||||
'YYYY-MM-DD[T]HH:mm:ssZ',
|
||||
'YYYY-MM-DD[T]HH:mm:ss',
|
||||
'MM-DD-YYYY',
|
||||
'YYYY-MM-DD',
|
||||
];
|
||||
const map =
|
||||
options.map ||
|
||||
function(datum) {
|
||||
if (datum === '') {
|
||||
return null;
|
||||
}
|
||||
const datumNumber = Number(datum);
|
||||
if (!Number.isNaN(datumNumber) && datumNumber !== Infinity) {
|
||||
return datumNumber;
|
||||
}
|
||||
const dt = dateFormats.reduce((matchingDate, currentDateFormat) => {
|
||||
if (matchingDate) {
|
||||
return matchingDate;
|
||||
}
|
||||
const dayjsObj = dayjs(datum, currentDateFormat, true);
|
||||
if (dayjsObj.isValid()) {
|
||||
return dayjsObj;
|
||||
}
|
||||
return null;
|
||||
}, null);
|
||||
if (dt) {
|
||||
return new Date(dt.valueOf());
|
||||
}
|
||||
const special = SpecialValues[datum];
|
||||
if (special !== undefined) {
|
||||
return special;
|
||||
}
|
||||
return datum;
|
||||
};
|
||||
|
||||
const csvStream = fastCsv
|
||||
.parse(options.parserOptions)
|
||||
.on('data', data => {
|
||||
worksheet.addRow(data.map(map));
|
||||
})
|
||||
.on('end', () => {
|
||||
csvStream.emit('worksheet', worksheet);
|
||||
});
|
||||
|
||||
csvStream.on('worksheet', resolve).on('error', reject);
|
||||
|
||||
stream.pipe(csvStream);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated since version 4.0. You should use `CSV#read` instead. Please follow upgrade instruction: https://github.com/exceljs/exceljs/blob/master/UPGRADE-4.0.md
|
||||
*/
|
||||
createInputStream() {
|
||||
throw new Error(
|
||||
'`CSV#createInputStream` is deprecated. You should use `CSV#read` instead. This method will be removed in version 5.0. Please follow upgrade instruction: https://github.com/exceljs/exceljs/blob/master/UPGRADE-4.0.md'
|
||||
);
|
||||
}
|
||||
|
||||
write(stream, options) {
|
||||
return new Promise((resolve, reject) => {
|
||||
options = options || {};
|
||||
// const encoding = options.encoding || 'utf8';
|
||||
// const separator = options.separator || ',';
|
||||
// const quoteChar = options.quoteChar || '\'';
|
||||
|
||||
const worksheet = this.workbook.getWorksheet(options.sheetName || options.sheetId);
|
||||
|
||||
const csvStream = fastCsv.format(options.formatterOptions);
|
||||
stream.on('finish', () => {
|
||||
resolve();
|
||||
});
|
||||
csvStream.on('error', reject);
|
||||
csvStream.pipe(stream);
|
||||
|
||||
const {dateFormat, dateUTC} = options;
|
||||
const map =
|
||||
options.map ||
|
||||
(value => {
|
||||
if (value) {
|
||||
if (value.text || value.hyperlink) {
|
||||
return value.hyperlink || value.text || '';
|
||||
}
|
||||
if (value.formula || value.result) {
|
||||
return value.result || '';
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
if (dateFormat) {
|
||||
return dateUTC
|
||||
? dayjs.utc(value).format(dateFormat)
|
||||
: dayjs(value).format(dateFormat);
|
||||
}
|
||||
return dateUTC ? dayjs.utc(value).format() : dayjs(value).format();
|
||||
}
|
||||
if (value.error) {
|
||||
return value.error;
|
||||
}
|
||||
if (typeof value === 'object') {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
}
|
||||
return value;
|
||||
});
|
||||
|
||||
const includeEmptyRows = options.includeEmptyRows === undefined || options.includeEmptyRows;
|
||||
let lastRow = 1;
|
||||
if (worksheet) {
|
||||
worksheet.eachRow((row, rowNumber) => {
|
||||
if (includeEmptyRows) {
|
||||
while (lastRow++ < rowNumber - 1) {
|
||||
csvStream.write([]);
|
||||
}
|
||||
}
|
||||
const {values} = row;
|
||||
values.shift();
|
||||
csvStream.write(values.map(map));
|
||||
lastRow = rowNumber;
|
||||
});
|
||||
}
|
||||
csvStream.end();
|
||||
});
|
||||
}
|
||||
|
||||
writeFile(filename, options) {
|
||||
options = options || {};
|
||||
|
||||
const streamOptions = {
|
||||
encoding: options.encoding || 'utf8',
|
||||
};
|
||||
const stream = fs.createWriteStream(filename, streamOptions);
|
||||
|
||||
return this.write(stream, options);
|
||||
}
|
||||
|
||||
async writeBuffer(options) {
|
||||
const stream = new StreamBuf();
|
||||
await this.write(stream, options);
|
||||
return stream.read();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CSV;
|
||||
74
doc/test-data/purchase_transaction/node_modules/exceljs/lib/csv/line-buffer.js
generated
vendored
Normal file
74
doc/test-data/purchase_transaction/node_modules/exceljs/lib/csv/line-buffer.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
const {EventEmitter} = require('events');
|
||||
|
||||
class LineBuffer extends EventEmitter {
|
||||
constructor(options) {
|
||||
super();
|
||||
|
||||
this.encoding = options.encoding;
|
||||
|
||||
this.buffer = null;
|
||||
|
||||
// part of cork/uncork
|
||||
this.corked = false;
|
||||
this.queue = [];
|
||||
}
|
||||
|
||||
// Events:
|
||||
// line: here is a line
|
||||
// done: all lines emitted
|
||||
|
||||
write(chunk) {
|
||||
// find line or lines in chunk and emit them if not corked
|
||||
// or queue them if corked
|
||||
const data = this.buffer ? this.buffer + chunk : chunk;
|
||||
const lines = data.split(/\r?\n/g);
|
||||
|
||||
// save the last line
|
||||
this.buffer = lines.pop();
|
||||
|
||||
lines.forEach(function(line) {
|
||||
if (this.corked) {
|
||||
this.queue.push(line);
|
||||
} else {
|
||||
this.emit('line', line);
|
||||
}
|
||||
});
|
||||
|
||||
return !this.corked;
|
||||
}
|
||||
|
||||
cork() {
|
||||
this.corked = true;
|
||||
}
|
||||
|
||||
uncork() {
|
||||
this.corked = false;
|
||||
this._flush();
|
||||
|
||||
// tell the source I'm ready again
|
||||
this.emit('drain');
|
||||
}
|
||||
|
||||
setDefaultEncoding() {
|
||||
// ?
|
||||
}
|
||||
|
||||
end() {
|
||||
if (this.buffer) {
|
||||
this.emit('line', this.buffer);
|
||||
this.buffer = null;
|
||||
}
|
||||
this.emit('done');
|
||||
}
|
||||
|
||||
_flush() {
|
||||
if (!this.corked) {
|
||||
this.queue.forEach(line => {
|
||||
this.emit('line', line);
|
||||
});
|
||||
this.queue = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LineBuffer;
|
||||
135
doc/test-data/purchase_transaction/node_modules/exceljs/lib/csv/stream-converter.js
generated
vendored
Normal file
135
doc/test-data/purchase_transaction/node_modules/exceljs/lib/csv/stream-converter.js
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
// =======================================================================================================
|
||||
// StreamConverter
|
||||
//
|
||||
// convert between encoding schemes in a stream
|
||||
// Work in Progress - Will complete this at some point
|
||||
let jconv;
|
||||
|
||||
class StreamConverter {
|
||||
constructor(inner, options) {
|
||||
this.inner = inner;
|
||||
|
||||
options = options || {};
|
||||
this.innerEncoding = (options.innerEncoding || 'UTF8').toUpperCase();
|
||||
this.outerEncoding = (options.outerEncoding || 'UTF8').toUpperCase();
|
||||
|
||||
this.innerBOM = options.innerBOM || null;
|
||||
this.outerBOM = options.outerBOM || null;
|
||||
|
||||
this.writeStarted = false;
|
||||
}
|
||||
|
||||
convertInwards(data) {
|
||||
if (data) {
|
||||
if (typeof data === 'string') {
|
||||
data = Buffer.from(data, this.outerEncoding);
|
||||
}
|
||||
|
||||
if (this.innerEncoding !== this.outerEncoding) {
|
||||
data = jconv.convert(data, this.outerEncoding, this.innerEncoding);
|
||||
}
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
convertOutwards(data) {
|
||||
if (typeof data === 'string') {
|
||||
data = Buffer.from(data, this.innerEncoding);
|
||||
}
|
||||
|
||||
if (this.innerEncoding !== this.outerEncoding) {
|
||||
data = jconv.convert(data, this.innerEncoding, this.outerEncoding);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
addListener(event, handler) {
|
||||
this.inner.addListener(event, handler);
|
||||
}
|
||||
|
||||
removeListener(event, handler) {
|
||||
this.inner.removeListener(event, handler);
|
||||
}
|
||||
|
||||
write(data, encoding, callback) {
|
||||
if (encoding instanceof Function) {
|
||||
callback = encoding;
|
||||
encoding = undefined;
|
||||
}
|
||||
|
||||
if (!this.writeStarted) {
|
||||
// if inner encoding has BOM, write it now
|
||||
if (this.innerBOM) {
|
||||
this.inner.write(this.innerBOM);
|
||||
}
|
||||
|
||||
// if outer encoding has BOM, delete it now
|
||||
if (this.outerBOM) {
|
||||
if (data.length <= this.outerBOM.length) {
|
||||
if (callback) {
|
||||
callback();
|
||||
}
|
||||
return;
|
||||
}
|
||||
const bomless = Buffer.alloc(data.length - this.outerBOM.length);
|
||||
data.copy(bomless, 0, this.outerBOM.length, data.length);
|
||||
data = bomless;
|
||||
}
|
||||
|
||||
this.writeStarted = true;
|
||||
}
|
||||
|
||||
this.inner.write(
|
||||
this.convertInwards(data),
|
||||
encoding ? this.innerEncoding : undefined,
|
||||
callback
|
||||
);
|
||||
}
|
||||
|
||||
read() {
|
||||
// TBD
|
||||
}
|
||||
|
||||
pipe(destination, options) {
|
||||
const reverseConverter = new StreamConverter(destination, {
|
||||
innerEncoding: this.outerEncoding,
|
||||
outerEncoding: this.innerEncoding,
|
||||
innerBOM: this.outerBOM,
|
||||
outerBOM: this.innerBOM,
|
||||
});
|
||||
|
||||
this.inner.pipe(reverseConverter, options);
|
||||
}
|
||||
|
||||
close() {
|
||||
this.inner.close();
|
||||
}
|
||||
|
||||
on(type, callback) {
|
||||
switch (type) {
|
||||
case 'data':
|
||||
this.inner.on('data', chunk => {
|
||||
callback(this.convertOutwards(chunk));
|
||||
});
|
||||
return this;
|
||||
default:
|
||||
this.inner.on(type, callback);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
once(type, callback) {
|
||||
this.inner.once(type, callback);
|
||||
}
|
||||
|
||||
end(chunk, encoding, callback) {
|
||||
this.inner.end(this.convertInwards(chunk), this.innerEncoding, callback);
|
||||
}
|
||||
|
||||
emit(type, value) {
|
||||
this.inner.emit(type, value);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = StreamConverter;
|
||||
Reference in New Issue
Block a user