fix: 修复中介导入成功条数计算错误

问题:
- 导入成功条数显示为负数
- 原因:成功数量计算使用 validRecords.size() - failures.size()
- 但没有使用实际的数据库操作返回值

修复:
- saveBatchWithUpsert 和 saveBatch 方法现在返回 int
- 累加实际的数据库影响行数
- 使用 actualSuccessCount 变量跟踪真实成功数量

影响范围:
- CcdiIntermediaryPersonImportServiceImpl
- CcdiIntermediaryEntityImportServiceImpl
This commit is contained in:
wkc
2026-02-08 17:18:18 +08:00
parent bb0d68c41d
commit 5ec5913759
2058 changed files with 234134 additions and 269 deletions

View File

@@ -0,0 +1,15 @@
const {EventEmitter} = require('events');
// =============================================================================
// AutoDrain - kind of /dev/null
class AutoDrain extends EventEmitter {
write(chunk) {
this.emit('data', chunk);
}
end() {
this.emit('end');
}
}
module.exports = AutoDrain;

View File

@@ -0,0 +1,14 @@
// eslint-disable-next-line node/no-unsupported-features/node-builtins
const textDecoder = typeof TextDecoder === 'undefined' ? null : new TextDecoder('utf-8');
function bufferToString(chunk) {
if (typeof chunk === 'string') {
return chunk;
}
if (textDecoder) {
return textDecoder.decode(chunk);
}
return chunk.toString();
}
exports.bufferToString = bufferToString;

View File

@@ -0,0 +1,15 @@
// eslint-disable-next-line node/no-unsupported-features/node-builtins
const textEncoder = typeof TextEncoder === 'undefined' ? null : new TextEncoder('utf-8');
const {Buffer} = require('buffer');
function stringToBuffer(str) {
if (typeof str !== 'string') {
return str;
}
if (textEncoder) {
return Buffer.from(textEncoder.encode(str).buffer);
}
return Buffer.from(str);
}
exports.stringToBuffer = stringToBuffer;

View File

@@ -0,0 +1,165 @@
const _ = require('./under-dash');
const colCache = require('./col-cache');
class CellMatrix {
constructor(template) {
this.template = template;
this.sheets = {};
}
addCell(addressStr) {
this.addCellEx(colCache.decodeEx(addressStr));
}
getCell(addressStr) {
return this.findCellEx(colCache.decodeEx(addressStr), true);
}
findCell(addressStr) {
return this.findCellEx(colCache.decodeEx(addressStr), false);
}
findCellAt(sheetName, rowNumber, colNumber) {
const sheet = this.sheets[sheetName];
const row = sheet && sheet[rowNumber];
return row && row[colNumber];
}
addCellEx(address) {
if (address.top) {
for (let row = address.top; row <= address.bottom; row++) {
for (let col = address.left; col <= address.right; col++) {
this.getCellAt(address.sheetName, row, col);
}
}
} else {
this.findCellEx(address, true);
}
}
getCellEx(address) {
return this.findCellEx(address, true);
}
findCellEx(address, create) {
const sheet = this.findSheet(address, create);
const row = this.findSheetRow(sheet, address, create);
return this.findRowCell(row, address, create);
}
getCellAt(sheetName, rowNumber, colNumber) {
const sheet = this.sheets[sheetName] || (this.sheets[sheetName] = []);
const row = sheet[rowNumber] || (sheet[rowNumber] = []);
const cell =
row[colNumber] ||
(row[colNumber] = {
sheetName,
address: colCache.n2l(colNumber) + rowNumber,
row: rowNumber,
col: colNumber,
});
return cell;
}
removeCellEx(address) {
const sheet = this.findSheet(address);
if (!sheet) {
return;
}
const row = this.findSheetRow(sheet, address);
if (!row) {
return;
}
delete row[address.col];
}
forEachInSheet(sheetName, callback) {
const sheet = this.sheets[sheetName];
if (sheet) {
sheet.forEach((row, rowNumber) => {
if (row) {
row.forEach((cell, colNumber) => {
if (cell) {
callback(cell, rowNumber, colNumber);
}
});
}
});
}
}
forEach(callback) {
_.each(this.sheets, (sheet, sheetName) => {
this.forEachInSheet(sheetName, callback);
});
}
map(callback) {
const results = [];
this.forEach(cell => {
results.push(callback(cell));
});
return results;
}
findSheet(address, create) {
const name = address.sheetName;
if (this.sheets[name]) {
return this.sheets[name];
}
if (create) {
return (this.sheets[name] = []);
}
return undefined;
}
findSheetRow(sheet, address, create) {
const {row} = address;
if (sheet && sheet[row]) {
return sheet[row];
}
if (create) {
return (sheet[row] = []);
}
return undefined;
}
findRowCell(row, address, create) {
const {col} = address;
if (row && row[col]) {
return row[col];
}
if (create) {
return (row[col] = this.template
? Object.assign(address, JSON.parse(JSON.stringify(this.template)))
: address);
}
return undefined;
}
spliceRows(sheetName, start, numDelete, numInsert) {
const sheet = this.sheets[sheetName];
if (sheet) {
const inserts = [];
for (let i = 0; i < numInsert; i++) {
inserts.push([]);
}
sheet.splice(start, numDelete, ...inserts);
}
}
spliceColumns(sheetName, start, numDelete, numInsert) {
const sheet = this.sheets[sheetName];
if (sheet) {
const inserts = [];
for (let i = 0; i < numInsert; i++) {
inserts.push(null);
}
_.each(sheet, row => {
row.splice(start, numDelete, ...inserts);
});
}
}
}
module.exports = CellMatrix;

View File

@@ -0,0 +1,287 @@
const addressRegex = /^[A-Z]+\d+$/;
// =========================================================================
// Column Letter to Number conversion
const colCache = {
_dictionary: [
'A',
'B',
'C',
'D',
'E',
'F',
'G',
'H',
'I',
'J',
'K',
'L',
'M',
'N',
'O',
'P',
'Q',
'R',
'S',
'T',
'U',
'V',
'W',
'X',
'Y',
'Z',
],
_l2nFill: 0,
_l2n: {},
_n2l: [],
_level(n) {
if (n <= 26) {
return 1;
}
if (n <= 26 * 26) {
return 2;
}
return 3;
},
_fill(level) {
let c;
let v;
let l1;
let l2;
let l3;
let n = 1;
if (level >= 4) {
throw new Error('Out of bounds. Excel supports columns from 1 to 16384');
}
if (this._l2nFill < 1 && level >= 1) {
while (n <= 26) {
c = this._dictionary[n - 1];
this._n2l[n] = c;
this._l2n[c] = n;
n++;
}
this._l2nFill = 1;
}
if (this._l2nFill < 2 && level >= 2) {
n = 27;
while (n <= 26 + (26 * 26)) {
v = n - (26 + 1);
l1 = v % 26;
l2 = Math.floor(v / 26);
c = this._dictionary[l2] + this._dictionary[l1];
this._n2l[n] = c;
this._l2n[c] = n;
n++;
}
this._l2nFill = 2;
}
if (this._l2nFill < 3 && level >= 3) {
n = 26 + (26 * 26) + 1;
while (n <= 16384) {
v = n - ((26 * 26) + 26 + 1);
l1 = v % 26;
l2 = Math.floor(v / 26) % 26;
l3 = Math.floor(v / (26 * 26));
c = this._dictionary[l3] + this._dictionary[l2] + this._dictionary[l1];
this._n2l[n] = c;
this._l2n[c] = n;
n++;
}
this._l2nFill = 3;
}
},
l2n(l) {
if (!this._l2n[l]) {
this._fill(l.length);
}
if (!this._l2n[l]) {
throw new Error(`Out of bounds. Invalid column letter: ${l}`);
}
return this._l2n[l];
},
n2l(n) {
if (n < 1 || n > 16384) {
throw new Error(`${n} is out of bounds. Excel supports columns from 1 to 16384`);
}
if (!this._n2l[n]) {
this._fill(this._level(n));
}
return this._n2l[n];
},
// =========================================================================
// Address processing
_hash: {},
// check if value looks like an address
validateAddress(value) {
if (!addressRegex.test(value)) {
throw new Error(`Invalid Address: ${value}`);
}
return true;
},
// convert address string into structure
decodeAddress(value) {
const addr = value.length < 5 && this._hash[value];
if (addr) {
return addr;
}
let hasCol = false;
let col = '';
let colNumber = 0;
let hasRow = false;
let row = '';
let rowNumber = 0;
for (let i = 0, char; i < value.length; i++) {
char = value.charCodeAt(i);
// col should before row
if (!hasRow && char >= 65 && char <= 90) {
// 65 = 'A'.charCodeAt(0)
// 90 = 'Z'.charCodeAt(0)
hasCol = true;
col += value[i];
// colNumber starts from 1
colNumber = (colNumber * 26) + char - 64;
} else if (char >= 48 && char <= 57) {
// 48 = '0'.charCodeAt(0)
// 57 = '9'.charCodeAt(0)
hasRow = true;
row += value[i];
// rowNumber starts from 0
rowNumber = (rowNumber * 10) + char - 48;
} else if (hasRow && hasCol && char !== 36) {
// 36 = '$'.charCodeAt(0)
break;
}
}
if (!hasCol) {
colNumber = undefined;
} else if (colNumber > 16384) {
throw new Error(`Out of bounds. Invalid column letter: ${col}`);
}
if (!hasRow) {
rowNumber = undefined;
}
// in case $row$col
value = col + row;
const address = {
address: value,
col: colNumber,
row: rowNumber,
$col$row: `$${col}$${row}`,
};
// mem fix - cache only the tl 100x100 square
if (colNumber <= 100 && rowNumber <= 100) {
this._hash[value] = address;
this._hash[address.$col$row] = address;
}
return address;
},
// convert r,c into structure (if only 1 arg, assume r is address string)
getAddress(r, c) {
if (c) {
const address = this.n2l(c) + r;
return this.decodeAddress(address);
}
return this.decodeAddress(r);
},
// convert [address], [tl:br] into address structures
decode(value) {
const parts = value.split(':');
if (parts.length === 2) {
const tl = this.decodeAddress(parts[0]);
const br = this.decodeAddress(parts[1]);
const result = {
top: Math.min(tl.row, br.row),
left: Math.min(tl.col, br.col),
bottom: Math.max(tl.row, br.row),
right: Math.max(tl.col, br.col),
};
// reconstruct tl, br and dimensions
result.tl = this.n2l(result.left) + result.top;
result.br = this.n2l(result.right) + result.bottom;
result.dimensions = `${result.tl}:${result.br}`;
return result;
}
return this.decodeAddress(value);
},
// convert [sheetName!][$]col[$]row[[$]col[$]row] into address or range structures
decodeEx(value) {
const groups = value.match(/(?:(?:(?:'((?:[^']|'')*)')|([^'^ !]*))!)?(.*)/);
const sheetName = groups[1] || groups[2]; // Qouted and unqouted groups
const reference = groups[3]; // Remaining address
const parts = reference.split(':');
if (parts.length > 1) {
let tl = this.decodeAddress(parts[0]);
let br = this.decodeAddress(parts[1]);
const top = Math.min(tl.row, br.row);
const left = Math.min(tl.col, br.col);
const bottom = Math.max(tl.row, br.row);
const right = Math.max(tl.col, br.col);
tl = this.n2l(left) + top;
br = this.n2l(right) + bottom;
return {
top,
left,
bottom,
right,
sheetName,
tl: {address: tl, col: left, row: top, $col$row: `$${this.n2l(left)}$${top}`, sheetName},
br: {
address: br,
col: right,
row: bottom,
$col$row: `$${this.n2l(right)}$${bottom}`,
sheetName,
},
dimensions: `${tl}:${br}`,
};
}
if (reference.startsWith('#')) {
return sheetName ? {sheetName, error: reference} : {error: reference};
}
const address = this.decodeAddress(reference);
return sheetName ? {sheetName, ...address} : address;
},
// convert row,col into address string
encodeAddress(row, col) {
return colCache.n2l(col) + row;
},
// convert row,col into string address or t,l,b,r into range
encode() {
switch (arguments.length) {
case 2:
return colCache.encodeAddress(arguments[0], arguments[1]);
case 4:
return `${colCache.encodeAddress(arguments[0], arguments[1])}:${colCache.encodeAddress(
arguments[2],
arguments[3]
)}`;
default:
throw new Error('Can only encode with 2 or 4 arguments');
}
},
// return true if address is contained within range
inRange(range, address) {
const [left, top, , right, bottom] = range;
const [col, row] = address;
return col >= left && col <= right && row >= top && row <= bottom;
},
};
module.exports = colCache;

View File

@@ -0,0 +1,43 @@
const oneDepthCopy = (obj, nestKeys) => ({
...obj,
...nestKeys.reduce((memo, key) => {
if (obj[key]) memo[key] = {...obj[key]};
return memo;
}, {}),
});
const setIfExists = (src, dst, key, nestKeys = []) => {
if (src[key]) dst[key] = oneDepthCopy(src[key], nestKeys);
};
const isEmptyObj = obj => Object.keys(obj).length === 0;
const copyStyle = style => {
if (!style) return style;
if (isEmptyObj(style)) return {};
const copied = {...style};
setIfExists(style, copied, 'font', ['color']);
setIfExists(style, copied, 'alignment');
setIfExists(style, copied, 'protection');
if (style.border) {
setIfExists(style, copied, 'border');
setIfExists(style.border, copied.border, 'top', ['color']);
setIfExists(style.border, copied.border, 'left', ['color']);
setIfExists(style.border, copied.border, 'bottom', ['color']);
setIfExists(style.border, copied.border, 'right', ['color']);
setIfExists(style.border, copied.border, 'diagonal', ['color']);
}
if (style.fill) {
setIfExists(style, copied, 'fill', ['fgColor', 'bgColor', 'center']);
if (style.fill.stops) {
copied.fill.stops = style.fill.stops.map(s => oneDepthCopy(s, ['color']));
}
}
return copied;
};
exports.copyStyle = copyStyle;

View File

@@ -0,0 +1,55 @@
'use strict';
const crypto = require('crypto');
const Encryptor = {
/**
* Calculate a hash of the concatenated buffers with the given algorithm.
* @param {string} algorithm - The hash algorithm.
* @returns {Buffer} The hash
*/
hash(algorithm, ...buffers) {
const hash = crypto.createHash(algorithm);
hash.update(Buffer.concat(buffers));
return hash.digest();
},
/**
* Convert a password into an encryption key
* @param {string} password - The password
* @param {string} hashAlgorithm - The hash algoritm
* @param {string} saltValue - The salt value
* @param {number} spinCount - The spin count
* @param {number} keyBits - The length of the key in bits
* @param {Buffer} blockKey - The block key
* @returns {Buffer} The encryption key
*/
convertPasswordToHash(password, hashAlgorithm, saltValue, spinCount) {
hashAlgorithm = hashAlgorithm.toLowerCase();
const hashes = crypto.getHashes();
if (hashes.indexOf(hashAlgorithm) < 0) {
throw new Error(`Hash algorithm '${hashAlgorithm}' not supported!`);
}
// Password must be in unicode buffer
const passwordBuffer = Buffer.from(password, 'utf16le');
// Generate the initial hash
let key = this.hash(hashAlgorithm, Buffer.from(saltValue, 'base64'), passwordBuffer);
// Now regenerate until spin count
for (let i = 0; i < spinCount; i++) {
const iterator = Buffer.alloc(4);
// this is the 'special' element of Excel password hashing
// that stops us from using crypto.pbkdf2()
iterator.writeUInt32LE(i, 0);
key = this.hash(hashAlgorithm, key, iterator);
}
return key.toString('base64');
},
/**
* Generates cryptographically strong pseudo-random data.
* @param size The size argument is a number indicating the number of bytes to generate.
*/
randomBytes(size) {
return crypto.randomBytes(size);
},
};
module.exports = Encryptor;

View File

@@ -0,0 +1,48 @@
module.exports = async function* iterateStream(stream) {
const contents = [];
stream.on('data', data => contents.push(data));
let resolveStreamEndedPromise;
const streamEndedPromise = new Promise(resolve => (resolveStreamEndedPromise = resolve));
let ended = false;
stream.on('end', () => {
ended = true;
resolveStreamEndedPromise();
});
let error = false;
stream.on('error', err => {
error = err;
resolveStreamEndedPromise();
});
while (!ended || contents.length > 0) {
if (contents.length === 0) {
stream.resume();
// eslint-disable-next-line no-await-in-loop
await Promise.race([once(stream, 'data'), streamEndedPromise]);
} else {
stream.pause();
const data = contents.shift();
yield data;
}
if (error) throw error;
}
resolveStreamEndedPromise();
};
function once(eventEmitter, type) {
// TODO: Use require('events').once when node v10 is dropped
return new Promise(resolve => {
let fired = false;
const handler = () => {
if (!fired) {
fired = true;
eventEmitter.removeListener(type, handler);
resolve();
}
};
eventEmitter.addListener(type, handler);
});
}

View File

@@ -0,0 +1,30 @@
const {SaxesParser} = require('saxes');
const {PassThrough} = require('readable-stream');
const {bufferToString} = require('./browser-buffer-decode');
module.exports = async function* (iterable) {
// TODO: Remove once node v8 is deprecated
// Detect and upgrade old streams
if (iterable.pipe && !iterable[Symbol.asyncIterator]) {
iterable = iterable.pipe(new PassThrough());
}
const saxesParser = new SaxesParser();
let error;
saxesParser.on('error', err => {
error = err;
});
let events = [];
saxesParser.on('opentag', value => events.push({eventType: 'opentag', value}));
saxesParser.on('text', value => events.push({eventType: 'text', value}));
saxesParser.on('closetag', value => events.push({eventType: 'closetag', value}));
for await (const chunk of iterable) {
saxesParser.write(bufferToString(chunk));
// saxesParser.write and saxesParser.on() are synchronous,
// so we can only reach the below line once all events have been emitted
if (error) throw error;
// As a performance optimization, we gather all events instead of passing
// them one by one, which would cause each event to go through the event queue
yield events;
events = [];
}
};

View File

@@ -0,0 +1,44 @@
const colCache = require('./col-cache');
// const cellRefRegex = /(([a-z_\-0-9]*)!)?[$]?([a-z]+)[$]?([1-9][0-9]*)/i;
const replacementCandidateRx = /(([a-z_\-0-9]*)!)?([a-z0-9_$]{2,})([(])?/gi;
const CRrx = /^([$])?([a-z]+)([$])?([1-9][0-9]*)$/i;
function slideFormula(formula, fromCell, toCell) {
const offset = colCache.decode(fromCell);
const to = colCache.decode(toCell);
return formula.replace(
replacementCandidateRx,
(refMatch, sheet, sheetMaybe, addrPart, trailingParen) => {
if (trailingParen) {
return refMatch;
}
const match = CRrx.exec(addrPart);
if (match) {
const colDollar = match[1];
const colStr = match[2].toUpperCase();
const rowDollar = match[3];
const rowStr = match[4];
if (colStr.length > 3 || (colStr.length === 3 && colStr > 'XFD')) {
// > XFD is the highest col number in excel 2007 and beyond, so this is a named range
return refMatch;
}
let col = colCache.l2n(colStr);
let row = parseInt(rowStr, 10);
if (!colDollar) {
col += to.col - offset.col;
}
if (!rowDollar) {
row += to.row - offset.row;
}
const res = (sheet || '') + (colDollar || '') + colCache.n2l(col) + (rowDollar || '') + row;
return res;
}
return refMatch;
}
);
}
module.exports = {
slideFormula,
};

View File

@@ -0,0 +1,35 @@
class SharedStrings {
constructor() {
this._values = [];
this._totalRefs = 0;
this._hash = Object.create(null);
}
get count() {
return this._values.length;
}
get values() {
return this._values;
}
get totalRefs() {
return this._totalRefs;
}
getString(index) {
return this._values[index];
}
add(value) {
let index = this._hash[value];
if (index === undefined) {
index = this._hash[value] = this._values.length;
this._values.push(value);
}
this._totalRefs++;
return index;
}
}
module.exports = SharedStrings;

View File

@@ -0,0 +1,72 @@
const Stream = require('readable-stream');
// =============================================================================
// StreamBase64 - A utility to convert to/from base64 stream
// Note: does not buffer data, must be piped
class StreamBase64 extends Stream.Duplex {
constructor() {
super();
// consuming pipe streams go here
this.pipes = [];
}
// writable
// event drain - if write returns false (which it won't), indicates when safe to write again.
// finish - end() has been called
// pipe(src) - pipe() has been called on readable
// unpipe(src) - unpipe() has been called on readable
// error - duh
write(/* data, encoding */) {
return true;
}
cork() {}
uncork() {}
end(/* chunk, encoding, callback */) {}
// readable
// event readable - some data is now available
// event data - switch to flowing mode - feeds chunks to handler
// event end - no more data
// event close - optional, indicates upstream close
// event error - duh
read(/* size */) {}
setEncoding(encoding) {
// causes stream.read or stream.on('data) to return strings of encoding instead of Buffer objects
this.encoding = encoding;
}
pause() {}
resume() {}
isPaused() {}
pipe(destination) {
// add destination to pipe list & write current buffer
this.pipes.push(destination);
}
unpipe(destination) {
// remove destination from pipe list
this.pipes = this.pipes.filter(pipe => pipe !== destination);
}
unshift(/* chunk */) {
// some numpty has read some data that's not for them and they want to put it back!
// Might implement this some day
throw new Error('Not Implemented');
}
wrap(/* stream */) {
// not implemented
throw new Error('Not Implemented');
}
}
module.exports = StreamBase64;

View File

@@ -0,0 +1,364 @@
/* eslint-disable max-classes-per-file */
const Stream = require('readable-stream');
const utils = require('./utils');
const StringBuf = require('./string-buf');
// =============================================================================
// data chunks - encapsulating incoming data
class StringChunk {
constructor(data, encoding) {
this._data = data;
this._encoding = encoding;
}
get length() {
return this.toBuffer().length;
}
// copy to target buffer
copy(target, targetOffset, offset, length) {
return this.toBuffer().copy(target, targetOffset, offset, length);
}
toBuffer() {
if (!this._buffer) {
this._buffer = Buffer.from(this._data, this._encoding);
}
return this._buffer;
}
}
class StringBufChunk {
constructor(data) {
this._data = data;
}
get length() {
return this._data.length;
}
// copy to target buffer
copy(target, targetOffset, offset, length) {
// eslint-disable-next-line no-underscore-dangle
return this._data._buf.copy(target, targetOffset, offset, length);
}
toBuffer() {
return this._data.toBuffer();
}
}
class BufferChunk {
constructor(data) {
this._data = data;
}
get length() {
return this._data.length;
}
// copy to target buffer
copy(target, targetOffset, offset, length) {
this._data.copy(target, targetOffset, offset, length);
}
toBuffer() {
return this._data;
}
}
// =============================================================================
// ReadWriteBuf - a single buffer supporting simple read-write
class ReadWriteBuf {
constructor(size) {
this.size = size;
// the buffer
this.buffer = Buffer.alloc(size);
// read index
this.iRead = 0;
// write index
this.iWrite = 0;
}
toBuffer() {
if (this.iRead === 0 && this.iWrite === this.size) {
return this.buffer;
}
const buf = Buffer.alloc(this.iWrite - this.iRead);
this.buffer.copy(buf, 0, this.iRead, this.iWrite);
return buf;
}
get length() {
return this.iWrite - this.iRead;
}
get eod() {
return this.iRead === this.iWrite;
}
get full() {
return this.iWrite === this.size;
}
read(size) {
let buf;
// read size bytes from buffer and return buffer
if (size === 0) {
// special case - return null if no data requested
return null;
}
if (size === undefined || size >= this.length) {
// if no size specified or size is at least what we have then return all of the bytes
buf = this.toBuffer();
this.iRead = this.iWrite;
return buf;
}
// otherwise return a chunk
buf = Buffer.alloc(size);
this.buffer.copy(buf, 0, this.iRead, size);
this.iRead += size;
return buf;
}
write(chunk, offset, length) {
// write as many bytes from data from optional source offset
// and return number of bytes written
const size = Math.min(length, this.size - this.iWrite);
chunk.copy(this.buffer, this.iWrite, offset, offset + size);
this.iWrite += size;
return size;
}
}
// =============================================================================
// StreamBuf - a multi-purpose read-write stream
// As MemBuf - write as much data as you like. Then call toBuffer() to consolidate
// As StreamHub - pipe to multiple writables
// As readable stream - feed data into the writable part and have some other code read from it.
// Note: Not sure why but StreamBuf does not like JS "class" sugar. It fails the
// integration tests
const StreamBuf = function(options) {
options = options || {};
this.bufSize = options.bufSize || 1024 * 1024;
this.buffers = [];
// batch mode fills a buffer completely before passing the data on
// to pipes or 'readable' event listeners
this.batch = options.batch || false;
this.corked = false;
// where in the current writable buffer we're up to
this.inPos = 0;
// where in the current readable buffer we've read up to
this.outPos = 0;
// consuming pipe streams go here
this.pipes = [];
// controls emit('data')
this.paused = false;
this.encoding = null;
};
utils.inherits(StreamBuf, Stream.Duplex, {
toBuffer() {
switch (this.buffers.length) {
case 0:
return null;
case 1:
return this.buffers[0].toBuffer();
default:
return Buffer.concat(this.buffers.map(rwBuf => rwBuf.toBuffer()));
}
},
// writable
// event drain - if write returns false (which it won't), indicates when safe to write again.
// finish - end() has been called
// pipe(src) - pipe() has been called on readable
// unpipe(src) - unpipe() has been called on readable
// error - duh
_getWritableBuffer() {
if (this.buffers.length) {
const last = this.buffers[this.buffers.length - 1];
if (!last.full) {
return last;
}
}
const buf = new ReadWriteBuf(this.bufSize);
this.buffers.push(buf);
return buf;
},
async _pipe(chunk) {
const write = function(pipe) {
return new Promise(resolve => {
pipe.write(chunk.toBuffer(), () => {
resolve();
});
});
};
await Promise.all(this.pipes.map(write));
},
_writeToBuffers(chunk) {
let inPos = 0;
const inLen = chunk.length;
while (inPos < inLen) {
// find writable buffer
const buffer = this._getWritableBuffer();
// write some data
inPos += buffer.write(chunk, inPos, inLen - inPos);
}
},
async write(data, encoding, callback) {
if (encoding instanceof Function) {
callback = encoding;
encoding = 'utf8';
}
callback = callback || utils.nop;
// encapsulate data into a chunk
let chunk;
if (data instanceof StringBuf) {
chunk = new StringBufChunk(data);
} else if (data instanceof Buffer) {
chunk = new BufferChunk(data);
} else if (typeof data === 'string' || data instanceof String || data instanceof ArrayBuffer) {
chunk = new StringChunk(data, encoding);
} else {
throw new Error('Chunk must be one of type String, Buffer or StringBuf.');
}
// now, do something with the chunk
if (this.pipes.length) {
if (this.batch) {
this._writeToBuffers(chunk);
while (!this.corked && this.buffers.length > 1) {
this._pipe(this.buffers.shift());
}
} else if (!this.corked) {
await this._pipe(chunk);
callback();
} else {
this._writeToBuffers(chunk);
process.nextTick(callback);
}
} else {
if (!this.paused) {
this.emit('data', chunk.toBuffer());
}
this._writeToBuffers(chunk);
this.emit('readable');
}
return true;
},
cork() {
this.corked = true;
},
_flush(/* destination */) {
// if we have comsumers...
if (this.pipes.length) {
// and there's stuff not written
while (this.buffers.length) {
this._pipe(this.buffers.shift());
}
}
},
uncork() {
this.corked = false;
this._flush();
},
end(chunk, encoding, callback) {
const writeComplete = error => {
if (error) {
callback(error);
} else {
this._flush();
this.pipes.forEach(pipe => {
pipe.end();
});
this.emit('finish');
}
};
if (chunk) {
this.write(chunk, encoding, writeComplete);
} else {
writeComplete();
}
},
// readable
// event readable - some data is now available
// event data - switch to flowing mode - feeds chunks to handler
// event end - no more data
// event close - optional, indicates upstream close
// event error - duh
read(size) {
let buffers;
// read min(buffer, size || infinity)
if (size) {
buffers = [];
while (size && this.buffers.length && !this.buffers[0].eod) {
const first = this.buffers[0];
const buffer = first.read(size);
size -= buffer.length;
buffers.push(buffer);
if (first.eod && first.full) {
this.buffers.shift();
}
}
return Buffer.concat(buffers);
}
buffers = this.buffers.map(buf => buf.toBuffer()).filter(Boolean);
this.buffers = [];
return Buffer.concat(buffers);
},
setEncoding(encoding) {
// causes stream.read or stream.on('data) to return strings of encoding instead of Buffer objects
this.encoding = encoding;
},
pause() {
this.paused = true;
},
resume() {
this.paused = false;
},
isPaused() {
return !!this.paused;
},
pipe(destination) {
// add destination to pipe list & write current buffer
this.pipes.push(destination);
if (!this.paused && this.buffers.length) {
this.end();
}
},
unpipe(destination) {
// remove destination from pipe list
this.pipes = this.pipes.filter(pipe => pipe !== destination);
},
unshift(/* chunk */) {
// some numpty has read some data that's not for them and they want to put it back!
// Might implement this some day
throw new Error('Not Implemented');
},
wrap(/* stream */) {
// not implemented
throw new Error('Not Implemented');
},
});
module.exports = StreamBuf;

View File

@@ -0,0 +1,82 @@
// StringBuf - a way to keep string memory operations to a minimum
// while building the strings for the xml files
class StringBuf {
constructor(options) {
this._buf = Buffer.alloc((options && options.size) || 16384);
this._encoding = (options && options.encoding) || 'utf8';
// where in the buffer we are at
this._inPos = 0;
// for use by toBuffer()
this._buffer = undefined;
}
get length() {
return this._inPos;
}
get capacity() {
return this._buf.length;
}
get buffer() {
return this._buf;
}
toBuffer() {
// return the current data as a single enclosing buffer
if (!this._buffer) {
this._buffer = Buffer.alloc(this.length);
this._buf.copy(this._buffer, 0, 0, this.length);
}
return this._buffer;
}
reset(position) {
position = position || 0;
this._buffer = undefined;
this._inPos = position;
}
_grow(min) {
let size = this._buf.length * 2;
while (size < min) {
size *= 2;
}
const buf = Buffer.alloc(size);
this._buf.copy(buf, 0);
this._buf = buf;
}
addText(text) {
this._buffer = undefined;
let inPos = this._inPos + this._buf.write(text, this._inPos, this._encoding);
// if we've hit (or nearing capacity), grow the buf
while (inPos >= this._buf.length - 4) {
this._grow(this._inPos + text.length);
// keep trying to write until we've completely written the text
inPos = this._inPos + this._buf.write(text, this._inPos, this._encoding);
}
this._inPos = inPos;
}
addStringBuf(inBuf) {
if (inBuf.length) {
this._buffer = undefined;
if (this.length + inBuf.length > this.capacity) {
this._grow(this.length + inBuf.length);
}
// eslint-disable-next-line no-underscore-dangle
inBuf._buf.copy(this._buf, this._inPos, 0, inBuf.length);
this._inPos += inBuf.length;
}
}
}
module.exports = StringBuf;

View File

@@ -0,0 +1,35 @@
// StringBuilder - a way to keep string memory operations to a minimum
// while building the strings for the xml files
class StringBuilder {
constructor() {
this.reset();
}
get length() {
return this._buf.length;
}
toString() {
return this._buf.join('');
}
reset(position) {
if (position) {
while (this._buf.length > position) {
this._buf.pop();
}
} else {
this._buf = [];
}
}
addText(text) {
this._buf.push(text);
}
addStringBuf(inBuf) {
this._buf.push(inBuf.toString());
}
}
module.exports = StringBuilder;

View File

@@ -0,0 +1,67 @@
const events = require('events');
// =============================================================================
// StutteredPipe - Used to slow down streaming so GC can get a look in
class StutteredPipe extends events.EventEmitter {
constructor(readable, writable, options) {
super();
options = options || {};
this.readable = readable;
this.writable = writable;
this.bufSize = options.bufSize || 16384;
this.autoPause = options.autoPause || false;
this.paused = false;
this.eod = false;
this.scheduled = null;
readable.on('end', () => {
this.eod = true;
writable.end();
});
// need to have some way to communicate speed of stream
// back from the consumer
readable.on('readable', () => {
if (!this.paused) {
this.resume();
}
});
this._schedule();
}
pause() {
this.paused = true;
}
resume() {
if (!this.eod) {
if (this.scheduled !== null) {
clearImmediate(this.scheduled);
}
this._schedule();
}
}
_schedule() {
this.scheduled = setImmediate(() => {
this.scheduled = null;
if (!this.eod && !this.paused) {
const data = this.readable.read(this.bufSize);
if (data && data.length) {
this.writable.write(data);
if (!this.paused && !this.autoPause) {
this._schedule();
}
} else if (!this.paused) {
this._schedule();
}
}
});
}
}
module.exports = StutteredPipe;

View File

@@ -0,0 +1,24 @@
class TypedStack {
constructor(type) {
this._type = type;
this._stack = [];
}
get size() {
return this._stack.length;
}
pop() {
const tos = this._stack.pop();
return tos || new this._type();
}
push(instance) {
if (!(instance instanceof this._type)) {
throw new Error('Invalid type pushed to TypedStack');
}
this._stack.push(instance);
}
}
module.exports = TypedStack;

View File

@@ -0,0 +1,184 @@
const {toString} = Object.prototype;
const escapeHtmlRegex = /["&<>]/;
const _ = {
each: function each(obj, cb) {
if (obj) {
if (Array.isArray(obj)) {
obj.forEach(cb);
} else {
Object.keys(obj).forEach(key => {
cb(obj[key], key);
});
}
}
},
some: function some(obj, cb) {
if (obj) {
if (Array.isArray(obj)) {
return obj.some(cb);
}
return Object.keys(obj).some(key => cb(obj[key], key));
}
return false;
},
every: function every(obj, cb) {
if (obj) {
if (Array.isArray(obj)) {
return obj.every(cb);
}
return Object.keys(obj).every(key => cb(obj[key], key));
}
return true;
},
map: function map(obj, cb) {
if (obj) {
if (Array.isArray(obj)) {
return obj.map(cb);
}
return Object.keys(obj).map(key => cb(obj[key], key));
}
return [];
},
keyBy(a, p) {
return a.reduce((o, v) => {
o[v[p]] = v;
return o;
}, {});
},
isEqual: function isEqual(a, b) {
const aType = typeof a;
const bType = typeof b;
const aArray = Array.isArray(a);
const bArray = Array.isArray(b);
let keys;
if (aType !== bType) {
return false;
}
switch (typeof a) {
case 'object':
if (aArray || bArray) {
if (aArray && bArray) {
return (
a.length === b.length &&
a.every((aValue, index) => {
const bValue = b[index];
return _.isEqual(aValue, bValue);
})
);
}
return false;
}
if (a === null || b === null) {
return a === b;
}
// Compare object keys and values
keys = Object.keys(a);
if (Object.keys(b).length !== keys.length) {
return false;
}
for (const key of keys) {
if (!b.hasOwnProperty(key)) {
return false;
}
}
return _.every(a, (aValue, key) => {
const bValue = b[key];
return _.isEqual(aValue, bValue);
});
default:
return a === b;
}
},
escapeHtml(html) {
const regexResult = escapeHtmlRegex.exec(html);
if (!regexResult) return html;
let result = '';
let escape = '';
let lastIndex = 0;
let i = regexResult.index;
for (; i < html.length; i++) {
switch (html.charAt(i)) {
case '"':
escape = '&quot;';
break;
case '&':
escape = '&amp;';
break;
case '\'':
escape = '&apos;';
break;
case '<':
escape = '&lt;';
break;
case '>':
escape = '&gt;';
break;
default:
continue;
}
if (lastIndex !== i) result += html.substring(lastIndex, i);
lastIndex = i + 1;
result += escape;
}
if (lastIndex !== i) return result + html.substring(lastIndex, i);
return result;
},
strcmp(a, b) {
if (a < b) return -1;
if (a > b) return 1;
return 0;
},
isUndefined(val) {
return toString.call(val) === '[object Undefined]';
},
isObject(val) {
return toString.call(val) === '[object Object]';
},
deepMerge() {
const target = arguments[0] || {};
const {length} = arguments;
// eslint-disable-next-line one-var
let src, clone, copyIsArray;
function assignValue(val, key) {
src = target[key];
copyIsArray = Array.isArray(val);
if (_.isObject(val) || copyIsArray) {
if (copyIsArray) {
copyIsArray = false;
clone = src && Array.isArray(src) ? src : [];
} else {
clone = src && _.isObject(src) ? src : {};
}
target[key] = _.deepMerge(clone, val);
} else if (!_.isUndefined(val)) {
target[key] = val;
}
}
for (let i = 0; i < length; i++) {
_.each(arguments[i], assignValue);
}
return target;
},
};
module.exports = _;

View File

@@ -0,0 +1,172 @@
const fs = require('fs');
// useful stuff
const inherits = function(cls, superCtor, statics, prototype) {
// eslint-disable-next-line no-underscore-dangle
cls.super_ = superCtor;
if (!prototype) {
prototype = statics;
statics = null;
}
if (statics) {
Object.keys(statics).forEach(i => {
Object.defineProperty(cls, i, Object.getOwnPropertyDescriptor(statics, i));
});
}
const properties = {
constructor: {
value: cls,
enumerable: false,
writable: false,
configurable: true,
},
};
if (prototype) {
Object.keys(prototype).forEach(i => {
properties[i] = Object.getOwnPropertyDescriptor(prototype, i);
});
}
cls.prototype = Object.create(superCtor.prototype, properties);
};
// eslint-disable-next-line no-control-regex
const xmlDecodeRegex = /[<>&'"\x7F\x00-\x08\x0B-\x0C\x0E-\x1F]/;
const utils = {
nop() {},
promiseImmediate(value) {
return new Promise(resolve => {
if (global.setImmediate) {
setImmediate(() => {
resolve(value);
});
} else {
// poorman's setImmediate - must wait at least 1ms
setTimeout(() => {
resolve(value);
}, 1);
}
});
},
inherits,
dateToExcel(d, date1904) {
return 25569 + ( d.getTime() / (24 * 3600 * 1000) ) - (date1904 ? 1462 : 0);
},
excelToDate(v, date1904) {
const millisecondSinceEpoch = Math.round((v - 25569 + (date1904 ? 1462 : 0)) * 24 * 3600 * 1000);
return new Date(millisecondSinceEpoch);
},
parsePath(filepath) {
const last = filepath.lastIndexOf('/');
return {
path: filepath.substring(0, last),
name: filepath.substring(last + 1),
};
},
getRelsPath(filepath) {
const path = utils.parsePath(filepath);
return `${path.path}/_rels/${path.name}.rels`;
},
xmlEncode(text) {
const regexResult = xmlDecodeRegex.exec(text);
if (!regexResult) return text;
let result = '';
let escape = '';
let lastIndex = 0;
let i = regexResult.index;
for (; i < text.length; i++) {
const charCode = text.charCodeAt(i);
switch (charCode) {
case 34: // "
escape = '&quot;';
break;
case 38: // &
escape = '&amp;';
break;
case 39: // '
escape = '&apos;';
break;
case 60: // <
escape = '&lt;';
break;
case 62: // >
escape = '&gt;';
break;
case 127:
escape = '';
break;
default: {
if (charCode <= 31 && (charCode <= 8 || (charCode >= 11 && charCode !== 13))) {
escape = '';
break;
}
continue;
}
}
if (lastIndex !== i) result += text.substring(lastIndex, i);
lastIndex = i + 1;
if (escape) result += escape;
}
if (lastIndex !== i) return result + text.substring(lastIndex, i);
return result;
},
xmlDecode(text) {
return text.replace(/&([a-z]*);/g, c => {
switch (c) {
case '&lt;':
return '<';
case '&gt;':
return '>';
case '&amp;':
return '&';
case '&apos;':
return '\'';
case '&quot;':
return '"';
default:
return c;
}
});
},
validInt(value) {
const i = parseInt(value, 10);
return !Number.isNaN(i) ? i : 0;
},
isDateFmt(fmt) {
if (!fmt) {
return false;
}
// must remove all chars inside quotes and []
fmt = fmt.replace(/\[[^\]]*]/g, '');
fmt = fmt.replace(/"[^"]*"/g, '');
// then check for date formatting chars
const result = fmt.match(/[ymdhMsb]+/) !== null;
return result;
},
fs: {
exists(path) {
return new Promise(resolve => {
fs.access(path, fs.constants.F_OK, err => {
resolve(!err);
});
});
},
},
toIsoDateString(dt) {
return dt.toIsoString().subsstr(0, 10);
},
parseBoolean(value) {
return value === true || value === 'true' || value === 1 || value === '1';
},
};
module.exports = utils;

View File

@@ -0,0 +1,169 @@
const _ = require('./under-dash');
const utils = require('./utils');
// constants
const OPEN_ANGLE = '<';
const CLOSE_ANGLE = '>';
const OPEN_ANGLE_SLASH = '</';
const CLOSE_SLASH_ANGLE = '/>';
function pushAttribute(xml, name, value) {
xml.push(` ${name}="${utils.xmlEncode(value.toString())}"`);
}
function pushAttributes(xml, attributes) {
if (attributes) {
const tmp = [];
_.each(attributes, (value, name) => {
if (value !== undefined) {
pushAttribute(tmp, name, value);
}
});
xml.push(tmp.join(""));
}
}
class XmlStream {
constructor() {
this._xml = [];
this._stack = [];
this._rollbacks = [];
}
get tos() {
return this._stack.length ? this._stack[this._stack.length - 1] : undefined;
}
get cursor() {
// handy way to track whether anything has been added
return this._xml.length;
}
openXml(docAttributes) {
const xml = this._xml;
// <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
xml.push('<?xml');
pushAttributes(xml, docAttributes);
xml.push('?>\n');
}
openNode(name, attributes) {
const parent = this.tos;
const xml = this._xml;
if (parent && this.open) {
xml.push(CLOSE_ANGLE);
}
this._stack.push(name);
// start streaming node
xml.push(OPEN_ANGLE);
xml.push(name);
pushAttributes(xml, attributes);
this.leaf = true;
this.open = true;
}
addAttribute(name, value) {
if (!this.open) {
throw new Error('Cannot write attributes to node if it is not open');
}
if (value !== undefined) {
pushAttribute(this._xml, name, value);
}
}
addAttributes(attrs) {
if (!this.open) {
throw new Error('Cannot write attributes to node if it is not open');
}
pushAttributes(this._xml, attrs);
}
writeText(text) {
const xml = this._xml;
if (this.open) {
xml.push(CLOSE_ANGLE);
this.open = false;
}
this.leaf = false;
xml.push(utils.xmlEncode(text.toString()));
}
writeXml(xml) {
if (this.open) {
this._xml.push(CLOSE_ANGLE);
this.open = false;
}
this.leaf = false;
this._xml.push(xml);
}
closeNode() {
const node = this._stack.pop();
const xml = this._xml;
if (this.leaf) {
xml.push(CLOSE_SLASH_ANGLE);
} else {
xml.push(OPEN_ANGLE_SLASH);
xml.push(node);
xml.push(CLOSE_ANGLE);
}
this.open = false;
this.leaf = false;
}
leafNode(name, attributes, text) {
this.openNode(name, attributes);
if (text !== undefined) {
// zeros need to be written
this.writeText(text);
}
this.closeNode();
}
closeAll() {
while (this._stack.length) {
this.closeNode();
}
}
addRollback() {
this._rollbacks.push({
xml: this._xml.length,
stack: this._stack.length,
leaf: this.leaf,
open: this.open,
});
return this.cursor;
}
commit() {
this._rollbacks.pop();
}
rollback() {
const r = this._rollbacks.pop();
if (this._xml.length > r.xml) {
this._xml.splice(r.xml, this._xml.length - r.xml);
}
if (this._stack.length > r.stack) {
this._stack.splice(r.stack, this._stack.length - r.stack);
}
this.leaf = r.leaf;
this.open = r.open;
}
get xml() {
this.closeAll();
return this._xml.join('');
}
}
XmlStream.StdDocAttributes = {
version: '1.0',
encoding: 'UTF-8',
standalone: 'yes',
};
module.exports = XmlStream;

View File

@@ -0,0 +1,87 @@
const events = require('events');
const JSZip = require('jszip');
const StreamBuf = require('./stream-buf');
const {stringToBuffer} = require('./browser-buffer-encode');
// =============================================================================
// The ZipWriter class
// Packs streamed data into an output zip stream
class ZipWriter extends events.EventEmitter {
constructor(options) {
super();
this.options = Object.assign(
{
type: 'nodebuffer',
compression: 'DEFLATE',
},
options
);
this.zip = new JSZip();
this.stream = new StreamBuf();
}
append(data, options) {
if (options.hasOwnProperty('base64') && options.base64) {
this.zip.file(options.name, data, {base64: true});
} else {
// https://www.npmjs.com/package/process
if (process.browser && typeof data === 'string') {
// use TextEncoder in browser
data = stringToBuffer(data);
}
this.zip.file(options.name, data);
}
}
async finalize() {
const content = await this.zip.generateAsync(this.options);
this.stream.end(content);
this.emit('finish');
}
// ==========================================================================
// Stream.Readable interface
read(size) {
return this.stream.read(size);
}
setEncoding(encoding) {
return this.stream.setEncoding(encoding);
}
pause() {
return this.stream.pause();
}
resume() {
return this.stream.resume();
}
isPaused() {
return this.stream.isPaused();
}
pipe(destination, options) {
return this.stream.pipe(destination, options);
}
unpipe(destination) {
return this.stream.unpipe(destination);
}
unshift(chunk) {
return this.stream.unshift(chunk);
}
wrap(stream) {
return this.stream.wrap(stream);
}
}
// =============================================================================
module.exports = {
ZipWriter,
};