fix: 修复中介导入成功条数计算错误

问题:
- 导入成功条数显示为负数
- 原因:成功数量计算使用 validRecords.size() - failures.size()
- 但没有使用实际的数据库操作返回值

修复:
- saveBatchWithUpsert 和 saveBatch 方法现在返回 int
- 累加实际的数据库影响行数
- 使用 actualSuccessCount 变量跟踪真实成功数量

影响范围:
- CcdiIntermediaryPersonImportServiceImpl
- CcdiIntermediaryEntityImportServiceImpl
This commit is contained in:
wkc
2026-02-08 17:18:18 +08:00
parent bb0d68c41d
commit 5ec5913759
2058 changed files with 234134 additions and 269 deletions

View File

@@ -0,0 +1,16 @@
---
engines:
duplication:
enabled: true
config:
languages:
- javascript
eslint:
enabled: true
fixme:
enabled: true
ratings:
paths:
- "lib/*.js"
exclude_paths:
- "dist/*"

View File

@@ -0,0 +1,8 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
charset = utf-8
indent_style = space
indent_size = 4

View File

@@ -0,0 +1,43 @@
"use strict";
module.exports = {
"env": {
"browser": true,
"commonjs": true,
"es2021": true,
"node": true
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaVersion": "latest"
},
"ignorePatterns": ["vendor/*.js", "dist/*.js", "test/jquery-1.8.3.min.js"],
"rules": {
"indent": [
"error",
4
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"double"
],
"semi": [
"error",
"always"
],
"curly": "error",
"eqeqeq": "error",
"no-new": "error",
"no-caller": "error",
"guard-for-in": "error",
"no-extend-native": "error",
"strict": [
"error",
"global"
],
}
};

View File

@@ -0,0 +1,58 @@
name: pull-request
on:
pull_request:
branches: [ main ]
push:
branches: [ main ]
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 'lts/*'
cache: 'npm'
- name: Cache Node modules
uses: actions/cache@v3
id: npm-cache
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Install dependencies
run: |
npm install
sudo npx playwright install-deps
- name: Lint
run: npm run lint
- name: Test
run: npm test
- name: Benchmark
run: npm run benchmark | tee benchmark.txt
- name: Download previous benchmark data
uses: actions/cache@v3
with:
path: ./cache
key: ${{ runner.os }}-benchmark
- name: Store benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
tool: 'benchmarkjs'
output-file-path: benchmark.txt
external-data-json-path: ./cache/benchmark-data.json
github-token: ${{ secrets.GITHUB_TOKEN }}
alert-threshold: '150%'
comment-on-alert: true
fail-on-alert: true
alert-comment-cc-users: '@Stuk'

Binary file not shown.

View File

@@ -0,0 +1,17 @@
language: node_js
sudo: false
matrix:
include:
- node_js: "stable"
env: COMMAND=lint
- node_js: "lts/*"
env: COMMAND=test-node
- node_js: "stable"
env: COMMAND=test-node
- node_js: "stable"
env: COMMAND=test-browser
env:
global:
- secure: MhA8GHU42X3GWTUMaqdZVvarx4BMjhQCUGNi3kvuD/iCmKVb7gMwj4jbds7AcJdsCRsRk8bBGzZs/E7HidBJMPDa5DhgLKy9EV1s42JlHq8lVzbJeWIGgrtyJvhVUkGRy2OJjnDSgh3U6elkQmvDn74jreSQc6m/yGoPFF1nqq8=
- secure: qREw6aUu2DnB+2reMuHgygSkumRiJvt7Z5Fz4uEVoraqbe65e4PGhtzypr9uIgCN43vxS2D5tAIeDbfid5VQrWFUQnrC9O5Z5qgVPsKN94zZ1tvYurXI4wRlAg58nNjkfGXWhLI3VUjjDTp5gYcMqgfe5hpEFYUPnUQkKGnaqAk=
script: npm run $COMMAND

View File

@@ -0,0 +1,204 @@
---
title: Changelog
layout: default
section: main
---
### v3.10.1 2022-08-02
- Add sponsorship files.
+ If you appreciate the time spent maintaining JSZip then I would really appreciate [your sponsorship](https://github.com/sponsors/Stuk).
- Consolidate metadata types and expose OnUpdateCallback [#851](https://github.com/Stuk/jszip/pull/851) and [#852](https://github.com/Stuk/jszip/pull/852)
- use `const` instead `var` in example from README.markdown [#828](https://github.com/Stuk/jszip/pull/828)
- Switch manual download link to HTTPS [#839](https://github.com/Stuk/jszip/pull/839)
Internals:
- Replace jshint with eslint [#842](https://github.com/Stuk/jszip/pull/842)
- Add performance tests [#834](https://github.com/Stuk/jszip/pull/834)
### v3.10.0 2022-05-20
- Change setimmediate dependency to more efficient one. Fixes https://github.com/Stuk/jszip/issues/617 (see [#829](https://github.com/Stuk/jszip/pull/829))
- Update types of `currentFile` metadata to include `null` (see [#826](https://github.com/Stuk/jszip/pull/826))
### v3.9.1 2022-04-06
- Fix recursive definition of `InputFileFormat` introduced in 3.9.0.
### v3.9.0 2022-04-04
- Update types JSZip#loadAsync to accept a promise for data, and remove arguments from `new JSZip()` (see [#752](https://github.com/Stuk/jszip/pull/752))
- Update types for `compressionOptions` to JSZipFileOptions and JSZipGeneratorOptions (see [#722](https://github.com/Stuk/jszip/pull/722))
- Add types for `generateInternalStream` (see [#774](https://github.com/Stuk/jszip/pull/774))
### v3.8.0 2022-03-30
- Santize filenames when files are loaded with `loadAsync`, to avoid ["zip slip" attacks](https://snyk.io/research/zip-slip-vulnerability). The original filename is available on each zip entry as `unsafeOriginalName`. See the [documentation](https://stuk.github.io/jszip/documentation/api_jszip/load_async.html). Many thanks to McCaulay Hudson for reporting.
### v3.7.1 2021-08-05
- Fix build of `dist` files.
+ Note: this version ensures the changes from 3.7.0 are actually included in the `dist` files. Thanks to Evan W for reporting.
### v3.7.0 2021-07-23
- Fix: Use a null prototype object for this.files (see [#766](https://github.com/Stuk/jszip/pull/766))
+ This change might break existing code if it uses prototype methods on the `.files` property of a zip object, for example `zip.files.toString()`. This approach is taken to prevent files in the zip overriding object methods that would exist on a normal object.
### v3.6.0 2021-02-09
- Fix: redirect main to dist on browsers (see [#742](https://github.com/Stuk/jszip/pull/742))
- Fix duplicate require DataLengthProbe, utils (see [#734](https://github.com/Stuk/jszip/pull/734))
- Fix small error in read_zip.md (see [#703](https://github.com/Stuk/jszip/pull/703))
### v3.5.0 2020-05-31
- Fix 'End of data reached' error when file extra field is invalid (see [#544](https://github.com/Stuk/jszip/pull/544)).
- Typescript definitions: Add null to return types of functions that may return null (see [#669](https://github.com/Stuk/jszip/pull/669)).
- Typescript definitions: Correct nodeStream's type (see [#682](https://github.com/Stuk/jszip/pull/682))
- Typescript definitions: Add string output type (see [#666](https://github.com/Stuk/jszip/pull/666))
### v3.4.0 2020-04-19
- Add Typescript type definitions (see [#601](https://github.com/Stuk/jszip/pull/601)).
### v3.3.0 2020-04-1
- Change browser module resolution to support Angular packager (see [#614](https://github.com/Stuk/jszip/pull/614)).
### v3.2.2 2019-07-04
- No public changes, but a number of testing dependencies have been updated.
- Tested browsers are now: Internet Explorer 11, Chrome (most recent) and Firefox (most recent). Other browsers (specifically Safari) are still supported however testing them on Saucelabs is broken and so they were removed from the test matrix.
### v3.2.1 2019-03-22
- Corrected built dist files
### v3.2.0 2019-02-21
- Update dependencies to reduce bundle size (see [#532](https://github.com/Stuk/jszip/pull/532)).
- Fix deprecated Buffer constructor usage and add safeguards (see [#506](https://github.com/Stuk/jszip/pull/506)).
### v3.1.5 2017-11-09
- Fix IE11 memory leak (see [#429](https://github.com/Stuk/jszip/pull/429)).
- Handle 2 nodejs deprecations (see [#459](https://github.com/Stuk/jszip/pull/459)).
- Improve the "unsupported format" error message (see [#461](https://github.com/Stuk/jszip/pull/461)).
- Improve webworker compatibility (see [#468](https://github.com/Stuk/jszip/pull/468)).
- Fix nodejs 0.10 compatibility (see [#480](https://github.com/Stuk/jszip/pull/480)).
- Improve the error without type in async() (see [#481](https://github.com/Stuk/jszip/pull/481)).
### v3.1.4 2017-08-24
- consistently use our own utils object for inheritance (see [#395](https://github.com/Stuk/jszip/pull/395)).
- lower the memory consumption in `generate*` with a lot of files (see [#449](https://github.com/Stuk/jszip/pull/449)).
### v3.1.3 2016-10-06
- instanceof failing in window / iframe contexts (see [#350](https://github.com/Stuk/jszip/pull/350)).
- remove a copy with blob output (see [#357](https://github.com/Stuk/jszip/pull/357)).
- fix crc32 check for empty entries (see [#358](https://github.com/Stuk/jszip/pull/358)).
- fix the base64 error message with data uri (see [#359](https://github.com/Stuk/jszip/pull/359)).
### v3.1.2 2016-08-23
- fix support of nodejs `process.platform` in `generate*` methods (see [#335](https://github.com/Stuk/jszip/pull/335)).
- improve browserify/webpack support (see [#333](https://github.com/Stuk/jszip/pull/333)).
- partial support of a promise of text (see [#337](https://github.com/Stuk/jszip/pull/337)).
- fix streamed zip files containing folders (see [#342](https://github.com/Stuk/jszip/pull/342)).
### v3.1.1 2016-08-08
- Use a hard-coded JSZip.version, fix an issue with webpack (see [#328](https://github.com/Stuk/jszip/pull/328)).
### v3.1.0 2016-08-03
- utils.delay: use macro tasks instead of micro tasks (see [#288](https://github.com/Stuk/jszip/pull/288)).
- Harden base64 decode (see [#316](https://github.com/Stuk/jszip/pull/316)).
- Add JSZip.version and the version in the header (see [#317](https://github.com/Stuk/jszip/pull/317)).
- Support Promise(Blob) (see [#318](https://github.com/Stuk/jszip/pull/318)).
- Change JSZip.external.Promise implementation (see [#321](https://github.com/Stuk/jszip/pull/321)).
- Update pako to v1.0.2 to fix a DEFLATE bug (see [#322](https://github.com/Stuk/jszip/pull/322)).
### v3.0.0 2016-04-13
This release changes a lot of methods, please see [the upgrade guide](http://stuk.github.io/jszip/documentation/upgrade_guide.html).
- replace sync getters and `generate()` with async methods (see [#195](https://github.com/Stuk/jszip/pull/195)).
- support nodejs streams (in `file()` and `generateAsync()`).
- support Blob and Promise in `file()` and `loadAsync()` (see [#275](https://github.com/Stuk/jszip/pull/275)).
- add `support.nodestream`.
- zip.filter: remove the defensive copy.
- remove the deprecated API (see [#253](https://github.com/Stuk/jszip/pull/253)).
- `type` is now mandatory in `generateAsync()`.
- change the createFolders default value (now `true`).
- Dates: use UTC instead of the local timezone.
- Add `base64` and `array` as possible output type.
- Add a forEach method.
- Drop node 0.8 support (see [#270](https://github.com/Stuk/jszip/pull/270)).
### v2.6.1 2016-07-28
- update pako to v1.0.2 to fix a DEFLATE bug (see [#322](https://github.com/Stuk/jszip/pull/322)).
### v2.6.0 2016-03-23
- publish `dist/` files in the npm package (see [#225](https://github.com/Stuk/jszip/pull/225)).
- update pako to v1.0.0 (see [#261](https://github.com/Stuk/jszip/pull/261)).
- add support of Array in JSZip#load (see [#252](https://github.com/Stuk/jszip/pull/252)).
- improve file name / comment encoding support (see [#211](https://github.com/Stuk/jszip/pull/211)).
- handle prepended data (see [#266](https://github.com/Stuk/jszip/pull/266)).
- improve platform coverage in tests (see [#233](https://github.com/Stuk/jszip/pull/233) and [#269](https://github.com/Stuk/jszip/pull/269)).
### v2.5.0 2015-03-10
- add support for custom mime-types (see [#199](https://github.com/Stuk/jszip/issues/199)).
- add an option to set the DEFLATE level (see [#201](https://github.com/Stuk/jszip/issues/201)).
- improve the error message with corrupted zip (see [#202](https://github.com/Stuk/jszip/issues/202)).
- add support for UNIX / DOS permissions (see [#200](https://github.com/Stuk/jszip/issues/200) and [#205](https://github.com/Stuk/jszip/issues/205)).
### v2.4.0 2014-07-24
- update pako to 0.2.5 (see [#156](https://github.com/Stuk/jszip/issues/156)).
- make JSZip work in a Firefox addon context (see [#151](https://github.com/Stuk/jszip/issues/151)).
- add an option (`createFolders`) to control the subfolder generation (see [#154](https://github.com/Stuk/jszip/issues/154)).
- allow `Buffer` polyfill in the browser (see [#139](https://github.com/Stuk/jszip/issues/139)).
### v2.3.0 2014-06-18
- don't generate subfolders (see [#130](https://github.com/Stuk/jszip/issues/130)).
- add comment support (see [#134](https://github.com/Stuk/jszip/issues/134)).
- on `ZipObject#options`, the attributes `date` and `dir` have been deprecated and are now on `ZipObject` (see [the upgrade guide](http://stuk.github.io/jszip/documentation/upgrade_guide.html)).
- on `ZipObject#options`, the attributes `base64` and `binary` have been deprecated (see [the upgrade guide](http://stuk.github.io/jszip/documentation/upgrade_guide.html)).
- deprecate internal functions exposed in the public API (see [#123](https://github.com/Stuk/jszip/issues/123)).
- improve UTF-8 support (see [#142](https://github.com/Stuk/jszip/issues/142)).
### v2.2.2, 2014-05-01
- update pako to v0.2.1, fix an error when decompressing some files (see [#126](https://github.com/Stuk/jszip/issues/126)).
### v2.2.1, 2014-04-23
- fix unreadable generated file on Windows 8 (see [#112](https://github.com/Stuk/jszip/issues/112)).
- replace zlibjs with pako.
### v2.2.0, 2014-02-25
- make the `new` operator optional before the `JSZip` constructor (see [#93](https://github.com/Stuk/jszip/pull/93)).
- update zlibjs to v0.2.0.
### v2.1.1, 2014-02-13
- use the npm package for zlib.js instead of the github url.
### v2.1.0, 2014-02-06
- split the files and use Browserify to generate the final file (see [#74](https://github.com/Stuk/jszip/pull/74))
- packaging change : instead of 4 files (jszip.js, jszip-load.js, jszip-inflate.js, jszip-deflate.js) we now have 2 files : dist/jszip.js and dist/jszip.min.js
- add component/bower support
- rename variable: 'byte' is a reserved word (see [#76](https://github.com/Stuk/jszip/pull/76))
- add support for the unicode path extra field (see [#82](https://github.com/Stuk/jszip/pull/82))
- ensure that the generated files have a header with the licenses (see [#80](https://github.com/Stuk/jszip/pull/80))
# v2.0.0, 2013-10-20
- `JSZipBase64` has been renamed to `JSZip.base64`.
- The `data` attribute on the object returned by `zip.file(name)` has been removed. Use `asText()`, `asBinary()`, `asUint8Array()`, `asArrayBuffer()` or `asNodeBuffer()`.
- [Fix issue with Android browser](https://github.com/Stuk/jszip/pull/60)
- The compression/decompression methods now give their input type with the `compressInputType` and `uncompressInputType` attributes.
- Lazily decompress data when needed and [improve performance in general](https://github.com/Stuk/jszip/pull/56)
- [Add support for `Buffer` in Node.js](https://github.com/Stuk/jszip/pull/57).
- Package for CommonJS/npm.
### v1.0.1, 2013-03-04
- Fixed an issue when generating a compressed zip file with empty files or folders, see #33.
- With bad data (null or undefined), asText/asBinary/asUint8Array/asArrayBuffer methods now return an empty string, see #36.
# v1.0.0, 2013-02-14
- First release after a long period without version.

View File

@@ -0,0 +1,651 @@
JSZip is dual licensed. At your choice you may use it under the MIT license *or* the GPLv3
license.
The MIT License
===============
Copyright (c) 2009-2016 Stuart Knightley, David Duponchel, Franz Buchinger, António Afonso
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
GPL version 3
=============
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS

View File

@@ -0,0 +1,33 @@
JSZip
=====
A library for creating, reading and editing .zip files with JavaScript, with a
lovely and simple API.
See https://stuk.github.io/jszip for all the documentation.
```javascript
const zip = new JSZip();
zip.file("Hello.txt", "Hello World\n");
const img = zip.folder("images");
img.file("smile.gif", imgData, {base64: true});
zip.generateAsync({type:"blob"}).then(function(content) {
// see FileSaver.js
saveAs(content, "example.zip");
});
/*
Results in a zip containing
Hello.txt
images/
smile.gif
*/
```
License
-------
JSZip is dual-licensed. You may use it under the MIT license *or* the GPLv3
license. See [LICENSE.markdown](LICENSE.markdown).

View File

@@ -0,0 +1,37 @@
"use strict";
const madge = require("madge");
const path = require("path");
const ts = require("typescript");
const DIR = "./lib";
async function main() {
const tsconfigPath = ts.findConfigFile("./", ts.sys.fileExists);
const tsconfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile).config;
const parsedConfig = ts.parseJsonConfigFileContent(tsconfig, ts.sys, "./");
const typedPaths = parsedConfig.fileNames.map(filename => path.resolve("./", filename));
const excludeTypedPaths = path => !typedPaths.includes(path);
const res = await madge(DIR, {
dependencyFilter: excludeTypedPaths
});
const untypedLeaves = res.leaves().map(filename => path.resolve(DIR, filename)).filter(excludeTypedPaths);
if (untypedLeaves.length) {
console.log("Convert next:");
console.log(untypedLeaves.join("\n"));
} else {
console.log("No untyped leaf dependencies found.");
console.log("Try looking at circular dependencies, or the image to decide what to convert next:");
const untypedCircular = res.circular().flat().map(filename => path.resolve(DIR, filename)).filter(excludeTypedPaths);
console.log(untypedCircular.join("\n"));
}
const imagePath = await res.image("graph.svg");
console.log();
console.log("Image written to " + imagePath);
}
main();

View File

@@ -0,0 +1,601 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 5.0.0 (20220707.1540)
-->
<!-- Title: G Pages: 1 -->
<svg width="1779pt" height="983pt"
viewBox="0.00 0.00 1779.20 982.66" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(21.6 961.06)">
<title>G</title>
<polygon fill="#111111" stroke="transparent" points="-21.6,21.6 -21.6,-961.06 1757.6,-961.06 1757.6,21.6 -21.6,21.6"/>
<!-- base64.js -->
<g id="node1" class="node">
<title>base64.js</title>
<path fill="none" stroke="#cfffac" d="M119.83,-306C119.83,-306 61.17,-306 61.17,-306 57.33,-306 53.5,-302.17 53.5,-298.33 53.5,-298.33 53.5,-290.67 53.5,-290.67 53.5,-286.83 57.33,-283 61.17,-283 61.17,-283 119.83,-283 119.83,-283 123.67,-283 127.5,-286.83 127.5,-290.67 127.5,-290.67 127.5,-298.33 127.5,-298.33 127.5,-302.17 123.67,-306 119.83,-306"/>
<text text-anchor="middle" x="90.5" y="-290.8" font-family="Arial" font-size="14.00" fill="#cfffac">base64.js</text>
</g>
<!-- compressedObject.js -->
<g id="node2" class="node">
<title>compressedObject.js</title>
<path fill="none" stroke="#c6c5fe" d="M1141.33,-296C1141.33,-296 1012.67,-296 1012.67,-296 1008.83,-296 1005,-292.17 1005,-288.33 1005,-288.33 1005,-280.67 1005,-280.67 1005,-276.83 1008.83,-273 1012.67,-273 1012.67,-273 1141.33,-273 1141.33,-273 1145.17,-273 1149,-276.83 1149,-280.67 1149,-280.67 1149,-288.33 1149,-288.33 1149,-292.17 1145.17,-296 1141.33,-296"/>
<text text-anchor="middle" x="1077" y="-280.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">compressedObject.js</text>
</g>
<!-- stream/Crc32Probe.js -->
<g id="node3" class="node">
<title>stream/Crc32Probe.js</title>
<path fill="none" stroke="#c6c5fe" d="M1348.83,-415C1348.83,-415 1216.17,-415 1216.17,-415 1212.33,-415 1208.5,-411.17 1208.5,-407.33 1208.5,-407.33 1208.5,-399.67 1208.5,-399.67 1208.5,-395.83 1212.33,-392 1216.17,-392 1216.17,-392 1348.83,-392 1348.83,-392 1352.67,-392 1356.5,-395.83 1356.5,-399.67 1356.5,-399.67 1356.5,-407.33 1356.5,-407.33 1356.5,-411.17 1352.67,-415 1348.83,-415"/>
<text text-anchor="middle" x="1282.5" y="-399.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/Crc32Probe.js</text>
</g>
<!-- compressedObject.js&#45;&gt;stream/Crc32Probe.js -->
<g id="edge1" class="edge">
<title>compressedObject.js&#45;&gt;stream/Crc32Probe.js</title>
<path fill="none" stroke="#757575" d="M1089.24,-296.19C1109.84,-316.93 1155.86,-360.17 1203,-383.5 1206.76,-385.36 1210.7,-387.06 1214.75,-388.61"/>
<polygon fill="#757575" stroke="#757575" points="1213.71,-391.95 1224.3,-391.95 1216.02,-385.35 1213.71,-391.95"/>
</g>
<!-- stream/DataLengthProbe.js -->
<g id="node4" class="node">
<title>stream/DataLengthProbe.js</title>
<path fill="none" stroke="#c6c5fe" d="M1605.33,-337C1605.33,-337 1438.67,-337 1438.67,-337 1434.83,-337 1431,-333.17 1431,-329.33 1431,-329.33 1431,-321.67 1431,-321.67 1431,-317.83 1434.83,-314 1438.67,-314 1438.67,-314 1605.33,-314 1605.33,-314 1609.17,-314 1613,-317.83 1613,-321.67 1613,-321.67 1613,-329.33 1613,-329.33 1613,-333.17 1609.17,-337 1605.33,-337"/>
<text text-anchor="middle" x="1522" y="-321.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/DataLengthProbe.js</text>
</g>
<!-- compressedObject.js&#45;&gt;stream/DataLengthProbe.js -->
<g id="edge2" class="edge">
<title>compressedObject.js&#45;&gt;stream/DataLengthProbe.js</title>
<path fill="none" stroke="#757575" d="M1146.96,-296.01C1165.15,-298.77 1184.78,-301.49 1203,-303.5 1275.56,-311.51 1357.77,-317.11 1420.42,-320.65"/>
<polygon fill="#757575" stroke="#757575" points="1420.52,-324.16 1430.7,-321.22 1420.91,-317.17 1420.52,-324.16"/>
</g>
<!-- stream/DataWorker.js -->
<g id="node5" class="node">
<title>stream/DataWorker.js</title>
<path fill="none" stroke="#c6c5fe" d="M1348.33,-294C1348.33,-294 1216.67,-294 1216.67,-294 1212.83,-294 1209,-290.17 1209,-286.33 1209,-286.33 1209,-278.67 1209,-278.67 1209,-274.83 1212.83,-271 1216.67,-271 1216.67,-271 1348.33,-271 1348.33,-271 1352.17,-271 1356,-274.83 1356,-278.67 1356,-278.67 1356,-286.33 1356,-286.33 1356,-290.17 1352.17,-294 1348.33,-294"/>
<text text-anchor="middle" x="1282.5" y="-278.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/DataWorker.js</text>
</g>
<!-- compressedObject.js&#45;&gt;stream/DataWorker.js -->
<g id="edge3" class="edge">
<title>compressedObject.js&#45;&gt;stream/DataWorker.js</title>
<path fill="none" stroke="#757575" d="M1149.17,-283.8C1165.22,-283.64 1182.37,-283.47 1198.84,-283.31"/>
<polygon fill="#757575" stroke="#757575" points="1199.02,-286.81 1208.99,-283.21 1198.95,-279.81 1199.02,-286.81"/>
</g>
<!-- crc32.js -->
<g id="node8" class="node">
<title>crc32.js</title>
<path fill="none" stroke="#c6c5fe" d="M1545.83,-495C1545.83,-495 1498.17,-495 1498.17,-495 1494.33,-495 1490.5,-491.17 1490.5,-487.33 1490.5,-487.33 1490.5,-479.67 1490.5,-479.67 1490.5,-475.83 1494.33,-472 1498.17,-472 1498.17,-472 1545.83,-472 1545.83,-472 1549.67,-472 1553.5,-475.83 1553.5,-479.67 1553.5,-479.67 1553.5,-487.33 1553.5,-487.33 1553.5,-491.17 1549.67,-495 1545.83,-495"/>
<text text-anchor="middle" x="1522" y="-479.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">crc32.js</text>
</g>
<!-- stream/Crc32Probe.js&#45;&gt;crc32.js -->
<g id="edge42" class="edge">
<title>stream/Crc32Probe.js&#45;&gt;crc32.js</title>
<path fill="none" stroke="#757575" d="M1317.78,-415.05C1360.98,-429.6 1435.12,-454.57 1480.9,-469.99"/>
<polygon fill="#757575" stroke="#757575" points="1479.91,-473.35 1490.5,-473.23 1482.14,-466.72 1479.91,-473.35"/>
</g>
<!-- utils.js -->
<g id="node9" class="node">
<title>utils.js</title>
<path fill="none" stroke="#cfffac" d="M1728.33,-495C1728.33,-495 1689.67,-495 1689.67,-495 1685.83,-495 1682,-491.17 1682,-487.33 1682,-487.33 1682,-479.67 1682,-479.67 1682,-475.83 1685.83,-472 1689.67,-472 1689.67,-472 1728.33,-472 1728.33,-472 1732.17,-472 1736,-475.83 1736,-479.67 1736,-479.67 1736,-487.33 1736,-487.33 1736,-491.17 1732.17,-495 1728.33,-495"/>
<text text-anchor="middle" x="1709" y="-479.8" font-family="Arial" font-size="14.00" fill="#cfffac">utils.js</text>
</g>
<!-- stream/Crc32Probe.js&#45;&gt;utils.js -->
<g id="edge43" class="edge">
<title>stream/Crc32Probe.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1356.76,-395.52C1432.27,-390.27 1551.91,-390.38 1646,-429.5 1663.95,-436.96 1680.34,-451.77 1691.76,-463.99"/>
<polygon fill="#757575" stroke="#757575" points="1689.24,-466.42 1698.51,-471.54 1694.46,-461.75 1689.24,-466.42"/>
</g>
<!-- stream/DataLengthProbe.js&#45;&gt;utils.js -->
<g id="edge44" class="edge">
<title>stream/DataLengthProbe.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1613.06,-335.12C1624.95,-339.19 1636.34,-344.81 1646,-352.5 1680.91,-380.29 1697.21,-432.45 1703.92,-461.87"/>
<polygon fill="#757575" stroke="#757575" points="1700.56,-462.88 1706.05,-471.94 1707.41,-461.43 1700.56,-462.88"/>
</g>
<!-- stream/DataWorker.js&#45;&gt;utils.js -->
<g id="edge45" class="edge">
<title>stream/DataWorker.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1328.44,-270.94C1402.75,-254.67 1552.34,-234.37 1646,-305.5 1695.57,-343.15 1705.71,-423.13 1707.64,-461.74"/>
<polygon fill="#757575" stroke="#757575" points="1704.15,-462.02 1708.01,-471.89 1711.15,-461.77 1704.15,-462.02"/>
</g>
<!-- compressions.js -->
<g id="node6" class="node">
<title>compressions.js</title>
<path fill="none" stroke="#c6c5fe" d="M1126.33,-608C1126.33,-608 1027.67,-608 1027.67,-608 1023.83,-608 1020,-604.17 1020,-600.33 1020,-600.33 1020,-592.67 1020,-592.67 1020,-588.83 1023.83,-585 1027.67,-585 1027.67,-585 1126.33,-585 1126.33,-585 1130.17,-585 1134,-588.83 1134,-592.67 1134,-592.67 1134,-600.33 1134,-600.33 1134,-604.17 1130.17,-608 1126.33,-608"/>
<text text-anchor="middle" x="1077" y="-592.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">compressions.js</text>
</g>
<!-- flate.js -->
<g id="node7" class="node">
<title>flate.js</title>
<path fill="none" stroke="#c6c5fe" d="M1542.33,-608C1542.33,-608 1501.67,-608 1501.67,-608 1497.83,-608 1494,-604.17 1494,-600.33 1494,-600.33 1494,-592.67 1494,-592.67 1494,-588.83 1497.83,-585 1501.67,-585 1501.67,-585 1542.33,-585 1542.33,-585 1546.17,-585 1550,-588.83 1550,-592.67 1550,-592.67 1550,-600.33 1550,-600.33 1550,-604.17 1546.17,-608 1542.33,-608"/>
<text text-anchor="middle" x="1522" y="-592.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">flate.js</text>
</g>
<!-- compressions.js&#45;&gt;flate.js -->
<g id="edge4" class="edge">
<title>compressions.js&#45;&gt;flate.js</title>
<path fill="none" stroke="#757575" d="M1134.11,-596.5C1225.94,-596.5 1405.91,-596.5 1483.7,-596.5"/>
<polygon fill="#757575" stroke="#757575" points="1483.98,-600 1493.98,-596.5 1483.98,-593 1483.98,-600"/>
</g>
<!-- flate.js&#45;&gt;utils.js -->
<g id="edge6" class="edge">
<title>flate.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1550.09,-596.32C1576.55,-594.94 1616.97,-589.52 1646,-570.5 1670.69,-554.33 1688.64,-524.65 1698.69,-504.5"/>
<polygon fill="#757575" stroke="#757575" points="1701.97,-505.75 1703.1,-495.22 1695.65,-502.75 1701.97,-505.75"/>
</g>
<!-- crc32.js&#45;&gt;utils.js -->
<g id="edge5" class="edge">
<title>crc32.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1553.55,-483.5C1586.01,-483.5 1637.3,-483.5 1671.78,-483.5"/>
<polygon fill="#757575" stroke="#757575" points="1671.9,-487 1681.9,-483.5 1671.9,-480 1671.9,-487"/>
</g>
<!-- defaults.js -->
<g id="node10" class="node">
<title>defaults.js</title>
<path fill="none" stroke="#cfffac" d="M121.33,-388C121.33,-388 59.67,-388 59.67,-388 55.83,-388 52,-384.17 52,-380.33 52,-380.33 52,-372.67 52,-372.67 52,-368.83 55.83,-365 59.67,-365 59.67,-365 121.33,-365 121.33,-365 125.17,-365 129,-368.83 129,-372.67 129,-372.67 129,-380.33 129,-380.33 129,-384.17 125.17,-388 121.33,-388"/>
<text text-anchor="middle" x="90.5" y="-372.8" font-family="Arial" font-size="14.00" fill="#cfffac">defaults.js</text>
</g>
<!-- external.js -->
<g id="node11" class="node">
<title>external.js</title>
<path fill="none" stroke="#cfffac" d="M121.83,-429C121.83,-429 59.17,-429 59.17,-429 55.33,-429 51.5,-425.17 51.5,-421.33 51.5,-421.33 51.5,-413.67 51.5,-413.67 51.5,-409.83 55.33,-406 59.17,-406 59.17,-406 121.83,-406 121.83,-406 125.67,-406 129.5,-409.83 129.5,-413.67 129.5,-413.67 129.5,-421.33 129.5,-421.33 129.5,-425.17 125.67,-429 121.83,-429"/>
<text text-anchor="middle" x="90.5" y="-413.8" font-family="Arial" font-size="14.00" fill="#cfffac">external.js</text>
</g>
<!-- generate/ZipFileWorker.js -->
<g id="node12" class="node">
<title>generate/ZipFileWorker.js</title>
<path fill="none" stroke="#c6c5fe" d="M1154.33,-490C1154.33,-490 999.67,-490 999.67,-490 995.83,-490 992,-486.17 992,-482.33 992,-482.33 992,-474.67 992,-474.67 992,-470.83 995.83,-467 999.67,-467 999.67,-467 1154.33,-467 1154.33,-467 1158.17,-467 1162,-470.83 1162,-474.67 1162,-474.67 1162,-482.33 1162,-482.33 1162,-486.17 1158.17,-490 1154.33,-490"/>
<text text-anchor="middle" x="1077" y="-474.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">generate/ZipFileWorker.js</text>
</g>
<!-- generate/ZipFileWorker.js&#45;&gt;crc32.js -->
<g id="edge7" class="edge">
<title>generate/ZipFileWorker.js&#45;&gt;crc32.js</title>
<path fill="none" stroke="#757575" d="M1162.11,-479.45C1257.3,-480.52 1408.45,-482.23 1480.38,-483.04"/>
<polygon fill="#757575" stroke="#757575" points="1480.36,-486.54 1490.4,-483.15 1480.44,-479.54 1480.36,-486.54"/>
</g>
<!-- generate/ZipFileWorker.js&#45;&gt;utils.js -->
<g id="edge9" class="edge">
<title>generate/ZipFileWorker.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1160.91,-466.95C1272.08,-453.58 1475.22,-436.64 1646,-463.5 1654.67,-464.86 1663.79,-467.24 1672.28,-469.89"/>
<polygon fill="#757575" stroke="#757575" points="1671.3,-473.25 1681.89,-473.08 1673.5,-466.61 1671.3,-473.25"/>
</g>
<!-- utf8.js -->
<g id="node13" class="node">
<title>utf8.js</title>
<path fill="none" stroke="#c6c5fe" d="M1301.83,-374C1301.83,-374 1263.17,-374 1263.17,-374 1259.33,-374 1255.5,-370.17 1255.5,-366.33 1255.5,-366.33 1255.5,-358.67 1255.5,-358.67 1255.5,-354.83 1259.33,-351 1263.17,-351 1263.17,-351 1301.83,-351 1301.83,-351 1305.67,-351 1309.5,-354.83 1309.5,-358.67 1309.5,-358.67 1309.5,-366.33 1309.5,-366.33 1309.5,-370.17 1305.67,-374 1301.83,-374"/>
<text text-anchor="middle" x="1282.5" y="-358.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">utf8.js</text>
</g>
<!-- generate/ZipFileWorker.js&#45;&gt;utf8.js -->
<g id="edge8" class="edge">
<title>generate/ZipFileWorker.js&#45;&gt;utf8.js</title>
<path fill="none" stroke="#757575" d="M1095.05,-466.9C1113.21,-454.39 1142.63,-433.7 1167,-414.5 1183.59,-401.44 1184.33,-393.37 1203,-383.5 1216.02,-376.62 1231.48,-371.86 1245.21,-368.63"/>
<polygon fill="#757575" stroke="#757575" points="1246.28,-371.98 1255.32,-366.45 1244.8,-365.14 1246.28,-371.98"/>
</g>
<!-- utf8.js&#45;&gt;utils.js -->
<g id="edge49" class="edge">
<title>utf8.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1309.66,-361.43C1383.03,-358.9 1589.11,-355.01 1646,-388.5 1674.11,-405.05 1691.8,-439.95 1700.79,-462.53"/>
<polygon fill="#757575" stroke="#757575" points="1697.52,-463.79 1704.31,-471.93 1704.08,-461.34 1697.52,-463.79"/>
</g>
<!-- generate/index.js -->
<g id="node14" class="node">
<title>generate/index.js</title>
<path fill="none" stroke="#c6c5fe" d="M884.83,-490C884.83,-490 781.17,-490 781.17,-490 777.33,-490 773.5,-486.17 773.5,-482.33 773.5,-482.33 773.5,-474.67 773.5,-474.67 773.5,-470.83 777.33,-467 781.17,-467 781.17,-467 884.83,-467 884.83,-467 888.67,-467 892.5,-470.83 892.5,-474.67 892.5,-474.67 892.5,-482.33 892.5,-482.33 892.5,-486.17 888.67,-490 884.83,-490"/>
<text text-anchor="middle" x="833" y="-474.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">generate/index.js</text>
</g>
<!-- generate/index.js&#45;&gt;compressions.js -->
<g id="edge10" class="edge">
<title>generate/index.js&#45;&gt;compressions.js</title>
<path fill="none" stroke="#757575" d="M857.64,-490.03C901.11,-511.22 993.77,-556.4 1043.25,-580.53"/>
<polygon fill="#757575" stroke="#757575" points="1041.74,-583.69 1052.26,-584.92 1044.81,-577.4 1041.74,-583.69"/>
</g>
<!-- generate/index.js&#45;&gt;generate/ZipFileWorker.js -->
<g id="edge11" class="edge">
<title>generate/index.js&#45;&gt;generate/ZipFileWorker.js</title>
<path fill="none" stroke="#757575" d="M892.58,-478.5C919.35,-478.5 951.73,-478.5 981.79,-478.5"/>
<polygon fill="#757575" stroke="#757575" points="981.89,-482 991.89,-478.5 981.89,-475 981.89,-482"/>
</g>
<!-- index.js -->
<g id="node15" class="node">
<title>index.js</title>
<path fill="none" stroke="#c6c5fe" d="M113.83,-347C113.83,-347 67.17,-347 67.17,-347 63.33,-347 59.5,-343.17 59.5,-339.33 59.5,-339.33 59.5,-331.67 59.5,-331.67 59.5,-327.83 63.33,-324 67.17,-324 67.17,-324 113.83,-324 113.83,-324 117.67,-324 121.5,-327.83 121.5,-331.67 121.5,-331.67 121.5,-339.33 121.5,-339.33 121.5,-343.17 117.67,-347 113.83,-347"/>
<text text-anchor="middle" x="90.5" y="-331.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">index.js</text>
</g>
<!-- load.js -->
<g id="node16" class="node">
<title>load.js</title>
<path fill="none" stroke="#c6c5fe" d="M265.33,-416C265.33,-416 224.67,-416 224.67,-416 220.83,-416 217,-412.17 217,-408.33 217,-408.33 217,-400.67 217,-400.67 217,-396.83 220.83,-393 224.67,-393 224.67,-393 265.33,-393 265.33,-393 269.17,-393 273,-396.83 273,-400.67 273,-400.67 273,-408.33 273,-408.33 273,-412.17 269.17,-416 265.33,-416"/>
<text text-anchor="middle" x="245" y="-400.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">load.js</text>
</g>
<!-- index.js&#45;&gt;load.js -->
<g id="edge12" class="edge">
<title>index.js&#45;&gt;load.js</title>
<path fill="none" stroke="#757575" d="M121.56,-339.32C139.45,-342.37 162.23,-347.65 181,-356.5 196.81,-363.96 212.46,-375.85 224.2,-385.93"/>
<polygon fill="#757575" stroke="#757575" points="222.21,-388.84 232.01,-392.86 226.85,-383.6 222.21,-388.84"/>
</g>
<!-- object.js -->
<g id="node17" class="node">
<title>object.js</title>
<path fill="none" stroke="#c6c5fe" d="M638.83,-240C638.83,-240 588.17,-240 588.17,-240 584.33,-240 580.5,-236.17 580.5,-232.33 580.5,-232.33 580.5,-224.67 580.5,-224.67 580.5,-220.83 584.33,-217 588.17,-217 588.17,-217 638.83,-217 638.83,-217 642.67,-217 646.5,-220.83 646.5,-224.67 646.5,-224.67 646.5,-232.33 646.5,-232.33 646.5,-236.17 642.67,-240 638.83,-240"/>
<text text-anchor="middle" x="613.5" y="-224.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">object.js</text>
</g>
<!-- index.js&#45;&gt;object.js -->
<g id="edge13" class="edge">
<title>index.js&#45;&gt;object.js</title>
<path fill="none" stroke="#757575" d="M121.76,-329.28C210.16,-311.13 468.14,-258.15 570.22,-237.18"/>
<polygon fill="#757575" stroke="#757575" points="571.08,-240.58 580.17,-235.14 569.67,-233.72 571.08,-240.58"/>
</g>
<!-- load.js&#45;&gt;stream/Crc32Probe.js -->
<g id="edge14" class="edge">
<title>load.js&#45;&gt;stream/Crc32Probe.js</title>
<path fill="none" stroke="#757575" d="M273.16,-404.47C405.01,-404.35 980.76,-403.79 1198.06,-403.58"/>
<polygon fill="#757575" stroke="#757575" points="1198.14,-407.08 1208.14,-403.57 1198.14,-400.08 1198.14,-407.08"/>
</g>
<!-- load.js&#45;&gt;utils.js -->
<g id="edge16" class="edge">
<title>load.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M273.2,-411.94C344.82,-431.09 544.77,-481.85 715,-499.5 1016.95,-530.8 1094.44,-503.39 1398,-504.5 1508.22,-504.9 1537.24,-522.42 1646,-504.5 1654.7,-503.07 1663.83,-500.57 1672.32,-497.79"/>
<polygon fill="#757575" stroke="#757575" points="1673.64,-501.04 1681.94,-494.44 1671.34,-494.43 1673.64,-501.04"/>
</g>
<!-- load.js&#45;&gt;utf8.js -->
<g id="edge15" class="edge">
<title>load.js&#45;&gt;utf8.js</title>
<path fill="none" stroke="#757575" d="M273.18,-396.1C315.32,-383.84 399.17,-362.5 472,-362.5 472,-362.5 472,-362.5 834,-362.5 986.85,-362.5 1169.08,-362.5 1245.24,-362.5"/>
<polygon fill="#757575" stroke="#757575" points="1245.27,-366 1255.27,-362.5 1245.27,-359 1245.27,-366"/>
</g>
<!-- zipEntries.js -->
<g id="node19" class="node">
<title>zipEntries.js</title>
<path fill="none" stroke="#c6c5fe" d="M390.33,-648C390.33,-648 316.67,-648 316.67,-648 312.83,-648 309,-644.17 309,-640.33 309,-640.33 309,-632.67 309,-632.67 309,-628.83 312.83,-625 316.67,-625 316.67,-625 390.33,-625 390.33,-625 394.17,-625 398,-628.83 398,-632.67 398,-632.67 398,-640.33 398,-640.33 398,-644.17 394.17,-648 390.33,-648"/>
<text text-anchor="middle" x="353.5" y="-632.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">zipEntries.js</text>
</g>
<!-- load.js&#45;&gt;zipEntries.js -->
<g id="edge17" class="edge">
<title>load.js&#45;&gt;zipEntries.js</title>
<path fill="none" stroke="#757575" d="M251.28,-416C268.17,-452.8 321.05,-568 342.9,-615.6"/>
<polygon fill="#757575" stroke="#757575" points="339.83,-617.3 347.19,-624.93 346.2,-614.38 339.83,-617.3"/>
</g>
<!-- object.js&#45;&gt;compressedObject.js -->
<g id="edge20" class="edge">
<title>object.js&#45;&gt;compressedObject.js</title>
<path fill="none" stroke="#757575" d="M646.59,-232.39C717.9,-241.05 891.41,-262.1 994.73,-274.64"/>
<polygon fill="#757575" stroke="#757575" points="994.46,-278.13 1004.81,-275.86 995.31,-271.18 994.46,-278.13"/>
</g>
<!-- object.js&#45;&gt;utils.js -->
<g id="edge25" class="edge">
<title>object.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M637.38,-216.91C657.32,-207.43 687.31,-194.61 715,-188.5 871.92,-153.89 915.31,-168.5 1076,-168.5 1076,-168.5 1076,-168.5 1283.5,-168.5 1448.68,-168.5 1527.02,-135.92 1646,-250.5 1676.62,-279.99 1697.68,-409.34 1705.07,-461.54"/>
<polygon fill="#757575" stroke="#757575" points="1701.64,-462.29 1706.48,-471.71 1708.58,-461.33 1701.64,-462.29"/>
</g>
<!-- object.js&#45;&gt;utf8.js -->
<g id="edge24" class="edge">
<title>object.js&#45;&gt;utf8.js</title>
<path fill="none" stroke="#757575" d="M639.18,-240.05C659.22,-249 688.41,-261.09 715,-268.5 816.54,-296.78 1136.85,-342.48 1245.24,-357.52"/>
<polygon fill="#757575" stroke="#757575" points="1244.96,-361.02 1255.35,-358.92 1245.92,-354.08 1244.96,-361.02"/>
</g>
<!-- object.js&#45;&gt;generate/index.js -->
<g id="edge21" class="edge">
<title>object.js&#45;&gt;generate/index.js</title>
<path fill="none" stroke="#757575" d="M624.7,-240.22C659.23,-279.92 772.01,-409.55 815.08,-459.05"/>
<polygon fill="#757575" stroke="#757575" points="812.62,-461.56 821.82,-466.8 817.9,-456.96 812.62,-461.56"/>
</g>
<!-- nodejs/NodejsStreamInputAdapter.js -->
<g id="node20" class="node">
<title>nodejs/NodejsStreamInputAdapter.js</title>
<path fill="none" stroke="#c6c5fe" d="M943.33,-123C943.33,-123 722.67,-123 722.67,-123 718.83,-123 715,-119.17 715,-115.33 715,-115.33 715,-107.67 715,-107.67 715,-103.83 718.83,-100 722.67,-100 722.67,-100 943.33,-100 943.33,-100 947.17,-100 951,-103.83 951,-107.67 951,-107.67 951,-115.33 951,-115.33 951,-119.17 947.17,-123 943.33,-123"/>
<text text-anchor="middle" x="833" y="-107.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">nodejs/NodejsStreamInputAdapter.js</text>
</g>
<!-- object.js&#45;&gt;nodejs/NodejsStreamInputAdapter.js -->
<g id="edge22" class="edge">
<title>object.js&#45;&gt;nodejs/NodejsStreamInputAdapter.js</title>
<path fill="none" stroke="#757575" d="M627.66,-216.9C646.11,-201.09 681.22,-172.67 715,-154.5 736.24,-143.08 761.16,-133.45 782.53,-126.22"/>
<polygon fill="#757575" stroke="#757575" points="783.67,-129.53 792.07,-123.07 781.47,-122.88 783.67,-129.53"/>
</g>
<!-- stream/StreamHelper.js -->
<g id="node23" class="node">
<title>stream/StreamHelper.js</title>
<path fill="none" stroke="#c6c5fe" d="M1354.33,-66C1354.33,-66 1210.67,-66 1210.67,-66 1206.83,-66 1203,-62.17 1203,-58.33 1203,-58.33 1203,-50.67 1203,-50.67 1203,-46.83 1206.83,-43 1210.67,-43 1210.67,-43 1354.33,-43 1354.33,-43 1358.17,-43 1362,-46.83 1362,-50.67 1362,-50.67 1362,-58.33 1362,-58.33 1362,-62.17 1358.17,-66 1354.33,-66"/>
<text text-anchor="middle" x="1282.5" y="-50.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/StreamHelper.js</text>
</g>
<!-- object.js&#45;&gt;stream/StreamHelper.js -->
<g id="edge23" class="edge">
<title>object.js&#45;&gt;stream/StreamHelper.js</title>
<path fill="none" stroke="#757575" d="M618.86,-216.93C629.97,-189.27 662.27,-119.47 715,-90.5 875.6,-2.26 1101.77,-23.21 1213.53,-41.29"/>
<polygon fill="#757575" stroke="#757575" points="1213.22,-44.78 1223.66,-42.97 1214.37,-37.88 1213.22,-44.78"/>
</g>
<!-- zipObject.js -->
<g id="node24" class="node">
<title>zipObject.js</title>
<path fill="none" stroke="#c6c5fe" d="M868.83,-220C868.83,-220 797.17,-220 797.17,-220 793.33,-220 789.5,-216.17 789.5,-212.33 789.5,-212.33 789.5,-204.67 789.5,-204.67 789.5,-200.83 793.33,-197 797.17,-197 797.17,-197 868.83,-197 868.83,-197 872.67,-197 876.5,-200.83 876.5,-204.67 876.5,-204.67 876.5,-212.33 876.5,-212.33 876.5,-216.17 872.67,-220 868.83,-220"/>
<text text-anchor="middle" x="833" y="-204.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">zipObject.js</text>
</g>
<!-- object.js&#45;&gt;zipObject.js -->
<g id="edge26" class="edge">
<title>object.js&#45;&gt;zipObject.js</title>
<path fill="none" stroke="#757575" d="M646.59,-225.55C681.45,-222.34 737.69,-217.17 779.1,-213.36"/>
<polygon fill="#757575" stroke="#757575" points="779.65,-216.83 789.29,-212.43 779.01,-209.86 779.65,-216.83"/>
</g>
<!-- license_header.js -->
<g id="node18" class="node">
<title>license_header.js</title>
<path fill="none" stroke="#cfffac" d="M143.33,-470C143.33,-470 37.67,-470 37.67,-470 33.83,-470 30,-466.17 30,-462.33 30,-462.33 30,-454.67 30,-454.67 30,-450.83 33.83,-447 37.67,-447 37.67,-447 143.33,-447 143.33,-447 147.17,-447 151,-450.83 151,-454.67 151,-454.67 151,-462.33 151,-462.33 151,-466.17 147.17,-470 143.33,-470"/>
<text text-anchor="middle" x="90.5" y="-454.8" font-family="Arial" font-size="14.00" fill="#cfffac">license_header.js</text>
</g>
<!-- zipEntries.js&#45;&gt;utils.js -->
<g id="edge51" class="edge">
<title>zipEntries.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M358.78,-648.1C378.66,-699.78 467.27,-906.5 612.5,-906.5 612.5,-906.5 612.5,-906.5 1283.5,-906.5 1444.91,-906.5 1526.7,-993.22 1646,-884.5 1702.34,-833.16 1707.73,-582.1 1708.05,-505.64"/>
<polygon fill="#757575" stroke="#757575" points="1711.55,-505.3 1708.07,-495.3 1704.55,-505.29 1711.55,-505.3"/>
</g>
<!-- reader/readerFor.js -->
<g id="node31" class="node">
<title>reader/readerFor.js</title>
<path fill="none" stroke="#c6c5fe" d="M671.33,-728C671.33,-728 555.67,-728 555.67,-728 551.83,-728 548,-724.17 548,-720.33 548,-720.33 548,-712.67 548,-712.67 548,-708.83 551.83,-705 555.67,-705 555.67,-705 671.33,-705 671.33,-705 675.17,-705 679,-708.83 679,-712.67 679,-712.67 679,-720.33 679,-720.33 679,-724.17 675.17,-728 671.33,-728"/>
<text text-anchor="middle" x="613.5" y="-712.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/readerFor.js</text>
</g>
<!-- zipEntries.js&#45;&gt;reader/readerFor.js -->
<g id="edge50" class="edge">
<title>zipEntries.js&#45;&gt;reader/readerFor.js</title>
<path fill="none" stroke="#757575" d="M391.74,-648.05C437.66,-662.29 515.74,-686.5 565.68,-701.98"/>
<polygon fill="#757575" stroke="#757575" points="564.68,-705.34 575.27,-704.96 566.76,-698.65 564.68,-705.34"/>
</g>
<!-- zipEntry.js -->
<g id="node36" class="node">
<title>zipEntry.js</title>
<path fill="none" stroke="#c6c5fe" d="M504.33,-608C504.33,-608 441.67,-608 441.67,-608 437.83,-608 434,-604.17 434,-600.33 434,-600.33 434,-592.67 434,-592.67 434,-588.83 437.83,-585 441.67,-585 441.67,-585 504.33,-585 504.33,-585 508.17,-585 512,-588.83 512,-592.67 512,-592.67 512,-600.33 512,-600.33 512,-604.17 508.17,-608 504.33,-608"/>
<text text-anchor="middle" x="473" y="-592.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">zipEntry.js</text>
</g>
<!-- zipEntries.js&#45;&gt;zipEntry.js -->
<g id="edge52" class="edge">
<title>zipEntries.js&#45;&gt;zipEntry.js</title>
<path fill="none" stroke="#757575" d="M388.49,-624.93C400.91,-620.7 415.14,-615.86 428.29,-611.38"/>
<polygon fill="#757575" stroke="#757575" points="429.75,-614.58 438.09,-608.04 427.5,-607.95 429.75,-614.58"/>
</g>
<!-- nodejs/NodejsStreamInputAdapter.js&#45;&gt;utils.js -->
<g id="edge18" class="edge">
<title>nodejs/NodejsStreamInputAdapter.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M951.31,-109.85C1163.75,-108 1595,-109.88 1646,-154.5 1692.42,-195.11 1704.48,-394.49 1707.25,-461.49"/>
<polygon fill="#757575" stroke="#757575" points="1703.77,-461.87 1707.65,-471.73 1710.76,-461.61 1703.77,-461.87"/>
</g>
<!-- nodejs/NodejsStreamOutputAdapter.js -->
<g id="node21" class="node">
<title>nodejs/NodejsStreamOutputAdapter.js</title>
<path fill="none" stroke="#c6c5fe" d="M1638.33,-64C1638.33,-64 1405.67,-64 1405.67,-64 1401.83,-64 1398,-60.17 1398,-56.33 1398,-56.33 1398,-48.67 1398,-48.67 1398,-44.83 1401.83,-41 1405.67,-41 1405.67,-41 1638.33,-41 1638.33,-41 1642.17,-41 1646,-44.83 1646,-48.67 1646,-48.67 1646,-56.33 1646,-56.33 1646,-60.17 1642.17,-64 1638.33,-64"/>
<text text-anchor="middle" x="1522" y="-48.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">nodejs/NodejsStreamOutputAdapter.js</text>
</g>
<!-- nodejs/NodejsStreamOutputAdapter.js&#45;&gt;utils.js -->
<g id="edge19" class="edge">
<title>nodejs/NodejsStreamOutputAdapter.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1626.86,-64.14C1633.94,-67.92 1640.45,-72.63 1646,-78.5 1698.86,-134.43 1706.66,-385.18 1707.8,-461.43"/>
<polygon fill="#757575" stroke="#757575" points="1704.31,-461.79 1707.93,-471.75 1711.31,-461.7 1704.31,-461.79"/>
</g>
<!-- nodejsUtils.js -->
<g id="node22" class="node">
<title>nodejsUtils.js</title>
<path fill="none" stroke="#cfffac" d="M130.83,-511C130.83,-511 50.17,-511 50.17,-511 46.33,-511 42.5,-507.17 42.5,-503.33 42.5,-503.33 42.5,-495.67 42.5,-495.67 42.5,-491.83 46.33,-488 50.17,-488 50.17,-488 130.83,-488 130.83,-488 134.67,-488 138.5,-491.83 138.5,-495.67 138.5,-495.67 138.5,-503.33 138.5,-503.33 138.5,-507.17 134.67,-511 130.83,-511"/>
<text text-anchor="middle" x="90.5" y="-495.8" font-family="Arial" font-size="14.00" fill="#cfffac">nodejsUtils.js</text>
</g>
<!-- stream/StreamHelper.js&#45;&gt;utils.js -->
<g id="edge48" class="edge">
<title>stream/StreamHelper.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1354.06,-66.05C1458.79,-83.26 1643.42,-113.91 1646,-116.5 1694.96,-165.56 1705.4,-390.11 1707.5,-461.58"/>
<polygon fill="#757575" stroke="#757575" points="1704,-461.79 1707.77,-471.69 1711,-461.6 1704,-461.79"/>
</g>
<!-- stream/StreamHelper.js&#45;&gt;nodejs/NodejsStreamOutputAdapter.js -->
<g id="edge46" class="edge">
<title>stream/StreamHelper.js&#45;&gt;nodejs/NodejsStreamOutputAdapter.js</title>
<path fill="none" stroke="#757575" d="M1362.25,-53.84C1370.5,-53.77 1379.04,-53.7 1387.69,-53.62"/>
<polygon fill="#757575" stroke="#757575" points="1387.91,-57.12 1397.88,-53.54 1387.85,-50.12 1387.91,-57.12"/>
</g>
<!-- stream/ConvertWorker.js -->
<g id="node33" class="node">
<title>stream/ConvertWorker.js</title>
<path fill="none" stroke="#c6c5fe" d="M1597.33,-23C1597.33,-23 1446.67,-23 1446.67,-23 1442.83,-23 1439,-19.17 1439,-15.33 1439,-15.33 1439,-7.67 1439,-7.67 1439,-3.83 1442.83,0 1446.67,0 1446.67,0 1597.33,0 1597.33,0 1601.17,0 1605,-3.83 1605,-7.67 1605,-7.67 1605,-15.33 1605,-15.33 1605,-19.17 1601.17,-23 1597.33,-23"/>
<text text-anchor="middle" x="1522" y="-7.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/ConvertWorker.js</text>
</g>
<!-- stream/StreamHelper.js&#45;&gt;stream/ConvertWorker.js -->
<g id="edge47" class="edge">
<title>stream/StreamHelper.js&#45;&gt;stream/ConvertWorker.js</title>
<path fill="none" stroke="#757575" d="M1338.75,-42.97C1357.52,-39.17 1378.65,-35.02 1398,-31.5 1410.72,-29.19 1424.21,-26.87 1437.41,-24.66"/>
<polygon fill="#757575" stroke="#757575" points="1438.13,-28.09 1447.43,-23.01 1436.99,-21.19 1438.13,-28.09"/>
</g>
<!-- zipObject.js&#45;&gt;compressedObject.js -->
<g id="edge59" class="edge">
<title>zipObject.js&#45;&gt;compressedObject.js</title>
<path fill="none" stroke="#757575" d="M876.59,-214.08C899.3,-218.01 927.36,-224.41 951,-234.5 968.69,-242.05 969.57,-250.38 987,-258.5 996.4,-262.88 1006.7,-266.7 1016.86,-269.97"/>
<polygon fill="#757575" stroke="#757575" points="1015.93,-273.35 1026.52,-272.93 1017.99,-266.65 1015.93,-273.35"/>
</g>
<!-- zipObject.js&#45;&gt;stream/DataWorker.js -->
<g id="edge60" class="edge">
<title>zipObject.js&#45;&gt;stream/DataWorker.js</title>
<path fill="none" stroke="#757575" d="M876.54,-206.49C940.6,-204.53 1065.15,-204.91 1167,-230.5 1197.57,-238.18 1230.08,-253.74 1252.61,-265.84"/>
<polygon fill="#757575" stroke="#757575" points="1251.18,-269.05 1261.63,-270.79 1254.54,-262.91 1251.18,-269.05"/>
</g>
<!-- zipObject.js&#45;&gt;utf8.js -->
<g id="edge62" class="edge">
<title>zipObject.js&#45;&gt;utf8.js</title>
<path fill="none" stroke="#757575" d="M876.56,-214.91C963.47,-228.1 1154.29,-257.62 1167,-264.5 1187.75,-275.73 1185.5,-287.68 1203,-303.5 1219.75,-318.64 1240.19,-333.91 1256.01,-345.12"/>
<polygon fill="#757575" stroke="#757575" points="1254.07,-348.04 1264.27,-350.91 1258.08,-342.3 1254.07,-348.04"/>
</g>
<!-- zipObject.js&#45;&gt;stream/StreamHelper.js -->
<g id="edge61" class="edge">
<title>zipObject.js&#45;&gt;stream/StreamHelper.js</title>
<path fill="none" stroke="#757575" d="M867.64,-196.92C947.67,-169.38 1149.36,-99.97 1238.27,-69.38"/>
<polygon fill="#757575" stroke="#757575" points="1239.46,-72.67 1247.78,-66.1 1237.19,-66.05 1239.46,-72.67"/>
</g>
<!-- readable&#45;stream&#45;browser.js -->
<g id="node25" class="node">
<title>readable&#45;stream&#45;browser.js</title>
<path fill="none" stroke="#cfffac" d="M173.33,-552C173.33,-552 7.67,-552 7.67,-552 3.83,-552 0,-548.17 0,-544.33 0,-544.33 0,-536.67 0,-536.67 0,-532.83 3.83,-529 7.67,-529 7.67,-529 173.33,-529 173.33,-529 177.17,-529 181,-532.83 181,-536.67 181,-536.67 181,-544.33 181,-544.33 181,-548.17 177.17,-552 173.33,-552"/>
<text text-anchor="middle" x="90.5" y="-536.8" font-family="Arial" font-size="14.00" fill="#cfffac">readable&#45;stream&#45;browser.js</text>
</g>
<!-- reader/ArrayReader.js -->
<g id="node26" class="node">
<title>reader/ArrayReader.js</title>
<path fill="none" stroke="#c6c5fe" d="M1349.33,-802C1349.33,-802 1215.67,-802 1215.67,-802 1211.83,-802 1208,-798.17 1208,-794.33 1208,-794.33 1208,-786.67 1208,-786.67 1208,-782.83 1211.83,-779 1215.67,-779 1215.67,-779 1349.33,-779 1349.33,-779 1353.17,-779 1357,-782.83 1357,-786.67 1357,-786.67 1357,-794.33 1357,-794.33 1357,-798.17 1353.17,-802 1349.33,-802"/>
<text text-anchor="middle" x="1282.5" y="-786.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/ArrayReader.js</text>
</g>
<!-- reader/ArrayReader.js&#45;&gt;utils.js -->
<g id="edge28" class="edge">
<title>reader/ArrayReader.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1357.26,-795.2C1450.61,-799.5 1604.29,-800.54 1646,-764.5 1685.42,-730.44 1701.76,-565.37 1706.46,-505.36"/>
<polygon fill="#757575" stroke="#757575" points="1709.97,-505.37 1707.23,-495.13 1702.99,-504.84 1709.97,-505.37"/>
</g>
<!-- reader/DataReader.js -->
<g id="node27" class="node">
<title>reader/DataReader.js</title>
<path fill="none" stroke="#c6c5fe" d="M1586.83,-840C1586.83,-840 1457.17,-840 1457.17,-840 1453.33,-840 1449.5,-836.17 1449.5,-832.33 1449.5,-832.33 1449.5,-824.67 1449.5,-824.67 1449.5,-820.83 1453.33,-817 1457.17,-817 1457.17,-817 1586.83,-817 1586.83,-817 1590.67,-817 1594.5,-820.83 1594.5,-824.67 1594.5,-824.67 1594.5,-832.33 1594.5,-832.33 1594.5,-836.17 1590.67,-840 1586.83,-840"/>
<text text-anchor="middle" x="1522" y="-824.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/DataReader.js</text>
</g>
<!-- reader/ArrayReader.js&#45;&gt;reader/DataReader.js -->
<g id="edge27" class="edge">
<title>reader/ArrayReader.js&#45;&gt;reader/DataReader.js</title>
<path fill="none" stroke="#757575" d="M1355.4,-802C1381.91,-806.25 1412.13,-811.08 1439.43,-815.45"/>
<polygon fill="#757575" stroke="#757575" points="1438.91,-818.91 1449.34,-817.03 1440.02,-812 1438.91,-818.91"/>
</g>
<!-- reader/DataReader.js&#45;&gt;utils.js -->
<g id="edge29" class="edge">
<title>reader/DataReader.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1594.67,-827.36C1613.57,-823.63 1632.45,-816.27 1646,-802.5 1687.8,-760.01 1702.84,-570.31 1706.81,-505.38"/>
<polygon fill="#757575" stroke="#757575" points="1710.33,-505.28 1707.41,-495.1 1703.34,-504.87 1710.33,-505.28"/>
</g>
<!-- reader/NodeBufferReader.js -->
<g id="node28" class="node">
<title>reader/NodeBufferReader.js</title>
<path fill="none" stroke="#c6c5fe" d="M917.83,-688C917.83,-688 748.17,-688 748.17,-688 744.33,-688 740.5,-684.17 740.5,-680.33 740.5,-680.33 740.5,-672.67 740.5,-672.67 740.5,-668.83 744.33,-665 748.17,-665 748.17,-665 917.83,-665 917.83,-665 921.67,-665 925.5,-668.83 925.5,-672.67 925.5,-672.67 925.5,-680.33 925.5,-680.33 925.5,-684.17 921.67,-688 917.83,-688"/>
<text text-anchor="middle" x="833" y="-672.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/NodeBufferReader.js</text>
</g>
<!-- reader/NodeBufferReader.js&#45;&gt;utils.js -->
<g id="edge31" class="edge">
<title>reader/NodeBufferReader.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M925.72,-679.23C1125.93,-684.4 1588.79,-691.66 1646,-650.5 1693.09,-616.62 1704.46,-542.72 1707.17,-505.64"/>
<polygon fill="#757575" stroke="#757575" points="1710.68,-505.59 1707.77,-495.4 1703.69,-505.18 1710.68,-505.59"/>
</g>
<!-- reader/Uint8ArrayReader.js -->
<g id="node29" class="node">
<title>reader/Uint8ArrayReader.js</title>
<path fill="none" stroke="#c6c5fe" d="M1159.33,-726C1159.33,-726 994.67,-726 994.67,-726 990.83,-726 987,-722.17 987,-718.33 987,-718.33 987,-710.67 987,-710.67 987,-706.83 990.83,-703 994.67,-703 994.67,-703 1159.33,-703 1159.33,-703 1163.17,-703 1167,-706.83 1167,-710.67 1167,-710.67 1167,-718.33 1167,-718.33 1167,-722.17 1163.17,-726 1159.33,-726"/>
<text text-anchor="middle" x="1077" y="-710.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/Uint8ArrayReader.js</text>
</g>
<!-- reader/NodeBufferReader.js&#45;&gt;reader/Uint8ArrayReader.js -->
<g id="edge30" class="edge">
<title>reader/NodeBufferReader.js&#45;&gt;reader/Uint8ArrayReader.js</title>
<path fill="none" stroke="#757575" d="M907.26,-688C934.15,-692.23 964.78,-697.04 992.5,-701.39"/>
<polygon fill="#757575" stroke="#757575" points="992.15,-704.88 1002.57,-702.97 993.23,-697.96 992.15,-704.88"/>
</g>
<!-- reader/Uint8ArrayReader.js&#45;&gt;utils.js -->
<g id="edge35" class="edge">
<title>reader/Uint8ArrayReader.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1167.07,-722.47C1307.97,-733.13 1573.75,-744.74 1646,-688.5 1703.1,-644.05 1709.22,-548.71 1708.86,-505.54"/>
<polygon fill="#757575" stroke="#757575" points="1712.36,-505.22 1708.65,-495.3 1705.36,-505.37 1712.36,-505.22"/>
</g>
<!-- reader/Uint8ArrayReader.js&#45;&gt;reader/ArrayReader.js -->
<g id="edge34" class="edge">
<title>reader/Uint8ArrayReader.js&#45;&gt;reader/ArrayReader.js</title>
<path fill="none" stroke="#757575" d="M1108.91,-726.04C1144.27,-739.25 1202.3,-760.92 1241.19,-775.44"/>
<polygon fill="#757575" stroke="#757575" points="1240.04,-778.75 1250.64,-778.97 1242.49,-772.2 1240.04,-778.75"/>
</g>
<!-- reader/StringReader.js -->
<g id="node30" class="node">
<title>reader/StringReader.js</title>
<path fill="none" stroke="#c6c5fe" d="M1350.83,-843C1350.83,-843 1214.17,-843 1214.17,-843 1210.33,-843 1206.5,-839.17 1206.5,-835.33 1206.5,-835.33 1206.5,-827.67 1206.5,-827.67 1206.5,-823.83 1210.33,-820 1214.17,-820 1214.17,-820 1350.83,-820 1350.83,-820 1354.67,-820 1358.5,-823.83 1358.5,-827.67 1358.5,-827.67 1358.5,-835.33 1358.5,-835.33 1358.5,-839.17 1354.67,-843 1350.83,-843"/>
<text text-anchor="middle" x="1282.5" y="-827.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/StringReader.js</text>
</g>
<!-- reader/StringReader.js&#45;&gt;utils.js -->
<g id="edge33" class="edge">
<title>reader/StringReader.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1308.71,-843.08C1373.39,-870.87 1547.09,-933.07 1646,-849.5 1698.92,-804.79 1706.68,-577.03 1707.81,-505.22"/>
<polygon fill="#757575" stroke="#757575" points="1711.31,-505.13 1707.94,-495.08 1704.31,-505.04 1711.31,-505.13"/>
</g>
<!-- reader/StringReader.js&#45;&gt;reader/DataReader.js -->
<g id="edge32" class="edge">
<title>reader/StringReader.js&#45;&gt;reader/DataReader.js</title>
<path fill="none" stroke="#757575" d="M1358.65,-830.55C1384.2,-830.23 1412.88,-829.87 1438.95,-829.54"/>
<polygon fill="#757575" stroke="#757575" points="1439.22,-833.03 1449.18,-829.41 1439.13,-826.03 1439.22,-833.03"/>
</g>
<!-- reader/readerFor.js&#45;&gt;utils.js -->
<g id="edge40" class="edge">
<title>reader/readerFor.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M679,-725.88C690.95,-727.32 703.33,-728.62 715,-729.5 835.58,-738.6 866.09,-733.89 987,-735.5 1060.23,-736.48 1589.14,-769.66 1646,-723.5 1680.16,-695.77 1699.3,-559.25 1705.63,-505.4"/>
<polygon fill="#757575" stroke="#757575" points="1709.13,-505.56 1706.78,-495.23 1702.17,-504.77 1709.13,-505.56"/>
</g>
<!-- reader/readerFor.js&#45;&gt;reader/ArrayReader.js -->
<g id="edge36" class="edge">
<title>reader/readerFor.js&#45;&gt;reader/ArrayReader.js</title>
<path fill="none" stroke="#757575" d="M631.34,-728.18C650.57,-740.78 683.47,-760.17 715,-768.5 881.57,-812.48 1085.19,-806.6 1197.33,-798.49"/>
<polygon fill="#757575" stroke="#757575" points="1197.85,-801.96 1207.56,-797.72 1197.33,-794.98 1197.85,-801.96"/>
</g>
<!-- reader/readerFor.js&#45;&gt;reader/NodeBufferReader.js -->
<g id="edge37" class="edge">
<title>reader/readerFor.js&#45;&gt;reader/NodeBufferReader.js</title>
<path fill="none" stroke="#757575" d="M677.12,-704.98C702.82,-700.26 732.75,-694.75 759.41,-689.85"/>
<polygon fill="#757575" stroke="#757575" points="760.25,-693.26 769.45,-688 758.98,-686.37 760.25,-693.26"/>
</g>
<!-- reader/readerFor.js&#45;&gt;reader/Uint8ArrayReader.js -->
<g id="edge39" class="edge">
<title>reader/readerFor.js&#45;&gt;reader/Uint8ArrayReader.js</title>
<path fill="none" stroke="#757575" d="M679.16,-716.22C756.43,-715.88 886.56,-715.32 976.64,-714.93"/>
<polygon fill="#757575" stroke="#757575" points="976.88,-718.43 986.87,-714.89 976.85,-711.43 976.88,-718.43"/>
</g>
<!-- reader/readerFor.js&#45;&gt;reader/StringReader.js -->
<g id="edge38" class="edge">
<title>reader/readerFor.js&#45;&gt;reader/StringReader.js</title>
<path fill="none" stroke="#757575" d="M624.19,-728.1C640.63,-746.82 676.03,-783.15 715,-798.5 875.83,-861.85 1082.3,-853.86 1196.31,-842.57"/>
<polygon fill="#757575" stroke="#757575" points="1196.79,-846.04 1206.38,-841.54 1196.07,-839.07 1196.79,-846.04"/>
</g>
<!-- signature.js -->
<g id="node32" class="node">
<title>signature.js</title>
<path fill="none" stroke="#cfffac" d="M125.83,-593C125.83,-593 55.17,-593 55.17,-593 51.33,-593 47.5,-589.17 47.5,-585.33 47.5,-585.33 47.5,-577.67 47.5,-577.67 47.5,-573.83 51.33,-570 55.17,-570 55.17,-570 125.83,-570 125.83,-570 129.67,-570 133.5,-573.83 133.5,-577.67 133.5,-577.67 133.5,-585.33 133.5,-585.33 133.5,-589.17 129.67,-593 125.83,-593"/>
<text text-anchor="middle" x="90.5" y="-577.8" font-family="Arial" font-size="14.00" fill="#cfffac">signature.js</text>
</g>
<!-- stream/ConvertWorker.js&#45;&gt;utils.js -->
<g id="edge41" class="edge">
<title>stream/ConvertWorker.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M1605.11,-10.29C1620.43,-13.8 1635.05,-20.27 1646,-31.5 1706.48,-93.51 1708.87,-379.48 1708.29,-461.47"/>
<polygon fill="#757575" stroke="#757575" points="1704.79,-461.61 1708.19,-471.64 1711.78,-461.68 1704.79,-461.61"/>
</g>
<!-- stream/GenericWorker.js -->
<g id="node34" class="node">
<title>stream/GenericWorker.js</title>
<path fill="none" stroke="#cfffac" d="M166.33,-634C166.33,-634 14.67,-634 14.67,-634 10.83,-634 7,-630.17 7,-626.33 7,-626.33 7,-618.67 7,-618.67 7,-614.83 10.83,-611 14.67,-611 14.67,-611 166.33,-611 166.33,-611 170.17,-611 174,-614.83 174,-618.67 174,-618.67 174,-626.33 174,-626.33 174,-630.17 170.17,-634 166.33,-634"/>
<text text-anchor="middle" x="90.5" y="-618.8" font-family="Arial" font-size="14.00" fill="#cfffac">stream/GenericWorker.js</text>
</g>
<!-- support.js -->
<g id="node35" class="node">
<title>support.js</title>
<path fill="none" stroke="#cfffac" d="M120.33,-675C120.33,-675 60.67,-675 60.67,-675 56.83,-675 53,-671.17 53,-667.33 53,-667.33 53,-659.67 53,-659.67 53,-655.83 56.83,-652 60.67,-652 60.67,-652 120.33,-652 120.33,-652 124.17,-652 128,-655.83 128,-659.67 128,-659.67 128,-667.33 128,-667.33 128,-671.17 124.17,-675 120.33,-675"/>
<text text-anchor="middle" x="90.5" y="-659.8" font-family="Arial" font-size="14.00" fill="#cfffac">support.js</text>
</g>
<!-- zipEntry.js&#45;&gt;compressedObject.js -->
<g id="edge53" class="edge">
<title>zipEntry.js&#45;&gt;compressedObject.js</title>
<path fill="none" stroke="#757575" d="M481.75,-584.89C494.31,-566.55 520.51,-530.33 548,-504.5 614.23,-442.27 632.98,-425.64 715,-386.5 808.66,-341.8 925.59,-313.23 1000.53,-298.03"/>
<polygon fill="#757575" stroke="#757575" points="1001.25,-301.45 1010.37,-296.06 999.87,-294.59 1001.25,-301.45"/>
</g>
<!-- zipEntry.js&#45;&gt;compressions.js -->
<g id="edge54" class="edge">
<title>zipEntry.js&#45;&gt;compressions.js</title>
<path fill="none" stroke="#757575" d="M512.11,-596.5C612.17,-596.5 883.28,-596.5 1009.58,-596.5"/>
<polygon fill="#757575" stroke="#757575" points="1009.88,-600 1019.88,-596.5 1009.88,-593 1009.88,-600"/>
</g>
<!-- zipEntry.js&#45;&gt;crc32.js -->
<g id="edge55" class="edge">
<title>zipEntry.js&#45;&gt;crc32.js</title>
<path fill="none" stroke="#757575" d="M512.19,-592.38C675.12,-574.79 1311.33,-506.13 1479.85,-487.94"/>
<polygon fill="#757575" stroke="#757575" points="1480.47,-491.39 1490.04,-486.84 1479.72,-484.44 1480.47,-491.39"/>
</g>
<!-- zipEntry.js&#45;&gt;utils.js -->
<g id="edge58" class="edge">
<title>zipEntry.js&#45;&gt;utils.js</title>
<path fill="none" stroke="#757575" d="M512.29,-604.47C523.74,-606.65 536.35,-608.87 548,-610.5 673.53,-628.08 705.25,-636.5 832,-636.5 832,-636.5 832,-636.5 1283.5,-636.5 1364.17,-636.5 1579.19,-662.71 1646,-617.5 1684,-591.78 1699.17,-536.51 1704.86,-505.59"/>
<polygon fill="#757575" stroke="#757575" points="1708.37,-505.84 1706.55,-495.4 1701.46,-504.69 1708.37,-505.84"/>
</g>
<!-- zipEntry.js&#45;&gt;utf8.js -->
<g id="edge57" class="edge">
<title>zipEntry.js&#45;&gt;utf8.js</title>
<path fill="none" stroke="#757575" d="M488.53,-584.79C503.1,-573.32 526.42,-555.68 548,-542.5 618.91,-499.2 636.06,-484.39 715,-458.5 815.21,-425.63 1136.39,-381.56 1245.12,-367.22"/>
<polygon fill="#757575" stroke="#757575" points="1245.8,-370.66 1255.26,-365.89 1244.89,-363.72 1245.8,-370.66"/>
</g>
<!-- zipEntry.js&#45;&gt;reader/readerFor.js -->
<g id="edge56" class="edge">
<title>zipEntry.js&#45;&gt;reader/readerFor.js</title>
<path fill="none" stroke="#757575" d="M487.28,-608.01C511.31,-628.83 562.21,-672.93 591.17,-698.02"/>
<polygon fill="#757575" stroke="#757575" points="589.21,-700.95 599.06,-704.85 593.79,-695.66 589.21,-700.95"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 44 KiB

View File

@@ -0,0 +1,330 @@
// Type definitions for JSZip 3.1
// Project: http://stuk.github.com/jszip/, https://github.com/stuk/jszip
// Definitions by: mzeiher <https://github.com/mzeiher>, forabi <https://github.com/forabi>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
/// <reference types="node" />
interface JSZipSupport {
arraybuffer: boolean;
uint8array: boolean;
blob: boolean;
nodebuffer: boolean;
}
type Compression = 'STORE' | 'DEFLATE';
/**
* Depends on the compression type. With `STORE` (no compression), these options are ignored. With
* `DEFLATE`, you can give the compression level between 1 (best speed) and 9 (best compression).
*/
interface CompressionOptions {
level: number;
}
interface InputByType {
base64: string;
string: string;
text: string;
binarystring: string;
array: number[];
uint8array: Uint8Array;
arraybuffer: ArrayBuffer;
blob: Blob;
stream: NodeJS.ReadableStream;
}
interface OutputByType {
base64: string;
string: string;
text: string;
binarystring: string;
array: number[];
uint8array: Uint8Array;
arraybuffer: ArrayBuffer;
blob: Blob;
nodebuffer: Buffer;
}
// This private `_data` property on a JSZipObject uses this interface.
// If/when it is made public this should be uncommented.
// interface CompressedObject {
// compressedSize: number;
// uncompressedSize: number;
// crc32: number;
// compression: object;
// compressedContent: string|ArrayBuffer|Uint8Array|Buffer;
// }
type InputFileFormat = InputByType[keyof InputByType] | Promise<InputByType[keyof InputByType]>;
declare namespace JSZip {
type InputType = keyof InputByType;
type OutputType = keyof OutputByType;
interface JSZipMetadata {
percent: number;
currentFile: string | null;
}
type OnUpdateCallback = (metadata: JSZipMetadata) => void;
interface JSZipObject {
name: string;
/**
* Present for files loadded with `loadAsync`. May contain ".." path components that could
* result in a zip-slip attack. See https://snyk.io/research/zip-slip-vulnerability
*/
unsafeOriginalName?: string;
dir: boolean;
date: Date;
comment: string;
/** The UNIX permissions of the file, if any. */
unixPermissions: number | string | null;
/** The UNIX permissions of the file, if any. */
dosPermissions: number | null;
options: JSZipObjectOptions;
/**
* Prepare the content in the asked type.
* @param type the type of the result.
* @param onUpdate a function to call on each internal update.
* @return Promise the promise of the result.
*/
async<T extends OutputType>(type: T, onUpdate?: OnUpdateCallback): Promise<OutputByType[T]>;
nodeStream(type?: 'nodebuffer', onUpdate?: OnUpdateCallback): NodeJS.ReadableStream;
}
interface JSZipFileOptions {
/** Set to `true` if the data is `base64` encoded. For example image data from a `<canvas>` element. Plain text and HTML do not need this option. */
base64?: boolean;
/**
* Set to `true` if the data should be treated as raw content, `false` if this is a text. If `base64` is used,
* this defaults to `true`, if the data is not a `string`, this will be set to `true`.
*/
binary?: boolean;
/**
* The last modification date, defaults to the current date.
*/
date?: Date;
/**
* Sets per file compression. The `compressionOptions` parameter depends on the compression type.
*/
compression?: Compression;
/**
* Sets per file compression level for `DEFLATE` compression.
*/
compressionOptions?: null | CompressionOptions;
comment?: string;
/** Set to `true` if (and only if) the input is a "binary string" and has already been prepared with a `0xFF` mask. */
optimizedBinaryString?: boolean;
/** Set to `true` if folders in the file path should be automatically created, otherwise there will only be virtual folders that represent the path to the file. */
createFolders?: boolean;
/** Set to `true` if this is a directory and content should be ignored. */
dir?: boolean;
/** 6 bits number. The DOS permissions of the file, if any. */
dosPermissions?: number | null;
/**
* 16 bits number. The UNIX permissions of the file, if any.
* Also accepts a `string` representing the octal value: `"644"`, `"755"`, etc.
*/
unixPermissions?: number | string | null;
}
interface JSZipObjectOptions {
compression: Compression;
}
interface JSZipGeneratorOptions<T extends OutputType = OutputType> {
/**
* Sets compression option for all entries that have not specified their own `compression` option
*/
compression?: Compression;
/**
* Sets compression level for `DEFLATE` compression.
*/
compressionOptions?: null | CompressionOptions;
type?: T;
comment?: string;
/**
* mime-type for the generated file.
* Useful when you need to generate a file with a different extension, ie: “.ods”.
* @default 'application/zip'
*/
mimeType?: string;
encodeFileName?(filename: string): string;
/** Stream the files and create file descriptors */
streamFiles?: boolean;
/** DOS (default) or UNIX */
platform?: 'DOS' | 'UNIX';
}
interface JSZipLoadOptions {
base64?: boolean;
checkCRC32?: boolean;
optimizedBinaryString?: boolean;
createFolders?: boolean;
decodeFileName?: (bytes: string[] | Uint8Array | Buffer) => string;
}
type DataEventCallback<T> = (dataChunk: T, metadata: JSZipMetadata) => void
type EndEventCallback = () => void
type ErrorEventCallback = (error: Error) => void
interface JSZipStreamHelper<T> {
/**
* Register a listener on an event
*/
on(event: 'data', callback: DataEventCallback<T>): this;
on(event: 'end', callback: EndEventCallback): this;
on(event: 'error', callback: ErrorEventCallback): this;
/**
* Read the whole stream and call a callback with the complete content
*
* @param updateCallback The function called every time the stream updates
* @return A Promise of the full content
*/
accumulate(updateCallback?: (metadata: JSZipMetadata) => void): Promise<T>;
/**
* Resume the stream if the stream is paused. Once resumed, the stream starts sending data events again
*
* @return The current StreamHelper object, for chaining
*/
resume(): this;
/**
* Pause the stream if the stream is running. Once paused, the stream stops sending data events
*
* @return The current StreamHelper object, for chaining
*/
pause(): this;
}
}
interface JSZip {
files: {[key: string]: JSZip.JSZipObject};
/**
* Get a file from the archive
*
* @param Path relative path to file
* @return File matching path, null if no file found
*/
file(path: string): JSZip.JSZipObject | null;
/**
* Get files matching a RegExp from archive
*
* @param path RegExp to match
* @return Return all matching files or an empty array
*/
file(path: RegExp): JSZip.JSZipObject[];
/**
* Add a file to the archive
*
* @param path Relative path to file
* @param data Content of the file
* @param options Optional information about the file
* @return JSZip object
*/
file<T extends JSZip.InputType>(path: string, data: InputByType[T] | Promise<InputByType[T]>, options?: JSZip.JSZipFileOptions): this;
file<T extends JSZip.InputType>(path: string, data: null, options?: JSZip.JSZipFileOptions & { dir: true }): this;
/**
* Returns an new JSZip instance with the given folder as root
*
* @param name Name of the folder
* @return New JSZip object with the given folder as root or null
*/
folder(name: string): JSZip | null;
/**
* Returns new JSZip instances with the matching folders as root
*
* @param name RegExp to match
* @return New array of JSZipFile objects which match the RegExp
*/
folder(name: RegExp): JSZip.JSZipObject[];
/**
* Call a callback function for each entry at this folder level.
*
* @param callback function
*/
forEach(callback: (relativePath: string, file: JSZip.JSZipObject) => void): void;
/**
* Get all files which match the given filter function
*
* @param predicate Filter function
* @return Array of matched elements
*/
filter(predicate: (relativePath: string, file: JSZip.JSZipObject) => boolean): JSZip.JSZipObject[];
/**
* Removes the file or folder from the archive
*
* @param path Relative path of file or folder
* @return Returns the JSZip instance
*/
remove(path: string): JSZip;
/**
* Generates a new archive asynchronously
*
* @param options Optional options for the generator
* @param onUpdate The optional function called on each internal update with the metadata.
* @return The serialized archive
*/
generateAsync<T extends JSZip.OutputType>(options?: JSZip.JSZipGeneratorOptions<T>, onUpdate?: JSZip.OnUpdateCallback): Promise<OutputByType[T]>;
/**
* Generates a new archive asynchronously
*
* @param options Optional options for the generator
* @param onUpdate The optional function called on each internal update with the metadata.
* @return A Node.js `ReadableStream`
*/
generateNodeStream(options?: JSZip.JSZipGeneratorOptions<'nodebuffer'>, onUpdate?: JSZip.OnUpdateCallback): NodeJS.ReadableStream;
/**
* Generates the complete zip file with the internal stream implementation
*
* @param options Optional options for the generator
* @return a StreamHelper
*/
generateInternalStream<T extends JSZip.OutputType>(options?: JSZip.JSZipGeneratorOptions<T>): JSZip.JSZipStreamHelper<OutputByType[T]>;
/**
* Deserialize zip file asynchronously
*
* @param data Serialized zip file
* @param options Options for deserializing
* @return Returns promise
*/
loadAsync(data: InputFileFormat, options?: JSZip.JSZipLoadOptions): Promise<JSZip>;
/**
* Create JSZip instance
*/
new(): this;
(): JSZip;
prototype: JSZip;
support: JSZipSupport;
external: {
Promise: PromiseConstructorLike;
};
version: string;
}
declare var JSZip: JSZip;
export = JSZip;

View File

@@ -0,0 +1,106 @@
"use strict";
var utils = require("./utils");
var support = require("./support");
// private property
var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
// public method for encoding
exports.encode = function(input) {
var output = [];
var chr1, chr2, chr3, enc1, enc2, enc3, enc4;
var i = 0, len = input.length, remainingBytes = len;
var isArray = utils.getTypeOf(input) !== "string";
while (i < input.length) {
remainingBytes = len - i;
if (!isArray) {
chr1 = input.charCodeAt(i++);
chr2 = i < len ? input.charCodeAt(i++) : 0;
chr3 = i < len ? input.charCodeAt(i++) : 0;
} else {
chr1 = input[i++];
chr2 = i < len ? input[i++] : 0;
chr3 = i < len ? input[i++] : 0;
}
enc1 = chr1 >> 2;
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
enc3 = remainingBytes > 1 ? (((chr2 & 15) << 2) | (chr3 >> 6)) : 64;
enc4 = remainingBytes > 2 ? (chr3 & 63) : 64;
output.push(_keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4));
}
return output.join("");
};
// public method for decoding
exports.decode = function(input) {
var chr1, chr2, chr3;
var enc1, enc2, enc3, enc4;
var i = 0, resultIndex = 0;
var dataUrlPrefix = "data:";
if (input.substr(0, dataUrlPrefix.length) === dataUrlPrefix) {
// This is a common error: people give a data url
// (data:image/png;base64,iVBOR...) with a {base64: true} and
// wonders why things don't work.
// We can detect that the string input looks like a data url but we
// *can't* be sure it is one: removing everything up to the comma would
// be too dangerous.
throw new Error("Invalid base64 input, it looks like a data url.");
}
input = input.replace(/[^A-Za-z0-9+/=]/g, "");
var totalLength = input.length * 3 / 4;
if(input.charAt(input.length - 1) === _keyStr.charAt(64)) {
totalLength--;
}
if(input.charAt(input.length - 2) === _keyStr.charAt(64)) {
totalLength--;
}
if (totalLength % 1 !== 0) {
// totalLength is not an integer, the length does not match a valid
// base64 content. That can happen if:
// - the input is not a base64 content
// - the input is *almost* a base64 content, with a extra chars at the
// beginning or at the end
// - the input uses a base64 variant (base64url for example)
throw new Error("Invalid base64 input, bad content length.");
}
var output;
if (support.uint8array) {
output = new Uint8Array(totalLength|0);
} else {
output = new Array(totalLength|0);
}
while (i < input.length) {
enc1 = _keyStr.indexOf(input.charAt(i++));
enc2 = _keyStr.indexOf(input.charAt(i++));
enc3 = _keyStr.indexOf(input.charAt(i++));
enc4 = _keyStr.indexOf(input.charAt(i++));
chr1 = (enc1 << 2) | (enc2 >> 4);
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
chr3 = ((enc3 & 3) << 6) | enc4;
output[resultIndex++] = chr1;
if (enc3 !== 64) {
output[resultIndex++] = chr2;
}
if (enc4 !== 64) {
output[resultIndex++] = chr3;
}
}
return output;
};

View File

@@ -0,0 +1,74 @@
"use strict";
var external = require("./external");
var DataWorker = require("./stream/DataWorker");
var Crc32Probe = require("./stream/Crc32Probe");
var DataLengthProbe = require("./stream/DataLengthProbe");
/**
* Represent a compressed object, with everything needed to decompress it.
* @constructor
* @param {number} compressedSize the size of the data compressed.
* @param {number} uncompressedSize the size of the data after decompression.
* @param {number} crc32 the crc32 of the decompressed file.
* @param {object} compression the type of compression, see lib/compressions.js.
* @param {String|ArrayBuffer|Uint8Array|Buffer} data the compressed data.
*/
function CompressedObject(compressedSize, uncompressedSize, crc32, compression, data) {
this.compressedSize = compressedSize;
this.uncompressedSize = uncompressedSize;
this.crc32 = crc32;
this.compression = compression;
this.compressedContent = data;
}
CompressedObject.prototype = {
/**
* Create a worker to get the uncompressed content.
* @return {GenericWorker} the worker.
*/
getContentWorker: function () {
var worker = new DataWorker(external.Promise.resolve(this.compressedContent))
.pipe(this.compression.uncompressWorker())
.pipe(new DataLengthProbe("data_length"));
var that = this;
worker.on("end", function () {
if (this.streamInfo["data_length"] !== that.uncompressedSize) {
throw new Error("Bug : uncompressed data size mismatch");
}
});
return worker;
},
/**
* Create a worker to get the compressed content.
* @return {GenericWorker} the worker.
*/
getCompressedWorker: function () {
return new DataWorker(external.Promise.resolve(this.compressedContent))
.withStreamInfo("compressedSize", this.compressedSize)
.withStreamInfo("uncompressedSize", this.uncompressedSize)
.withStreamInfo("crc32", this.crc32)
.withStreamInfo("compression", this.compression)
;
}
};
/**
* Chain the given worker with other workers to compress the content with the
* given compression.
* @param {GenericWorker} uncompressedWorker the worker to pipe.
* @param {Object} compression the compression object.
* @param {Object} compressionOptions the options to use when compressing.
* @return {GenericWorker} the new worker compressing the content.
*/
CompressedObject.createWorkerFrom = function (uncompressedWorker, compression, compressionOptions) {
return uncompressedWorker
.pipe(new Crc32Probe())
.pipe(new DataLengthProbe("uncompressedSize"))
.pipe(compression.compressWorker(compressionOptions))
.pipe(new DataLengthProbe("compressedSize"))
.withStreamInfo("compression", compression);
};
module.exports = CompressedObject;

View File

@@ -0,0 +1,14 @@
"use strict";
var GenericWorker = require("./stream/GenericWorker");
exports.STORE = {
magic: "\x00\x00",
compressWorker : function () {
return new GenericWorker("STORE compression");
},
uncompressWorker : function () {
return new GenericWorker("STORE decompression");
}
};
exports.DEFLATE = require("./flate");

View File

@@ -0,0 +1,77 @@
"use strict";
var utils = require("./utils");
/**
* The following functions come from pako, from pako/lib/zlib/crc32.js
* released under the MIT license, see pako https://github.com/nodeca/pako/
*/
// Use ordinary array, since untyped makes no boost here
function makeTable() {
var c, table = [];
for(var n =0; n < 256; n++){
c = n;
for(var k =0; k < 8; k++){
c = ((c&1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));
}
table[n] = c;
}
return table;
}
// Create table on load. Just 255 signed longs. Not a problem.
var crcTable = makeTable();
function crc32(crc, buf, len, pos) {
var t = crcTable, end = pos + len;
crc = crc ^ (-1);
for (var i = pos; i < end; i++ ) {
crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF];
}
return (crc ^ (-1)); // >>> 0;
}
// That's all for the pako functions.
/**
* Compute the crc32 of a string.
* This is almost the same as the function crc32, but for strings. Using the
* same function for the two use cases leads to horrible performances.
* @param {Number} crc the starting value of the crc.
* @param {String} str the string to use.
* @param {Number} len the length of the string.
* @param {Number} pos the starting position for the crc32 computation.
* @return {Number} the computed crc32.
*/
function crc32str(crc, str, len, pos) {
var t = crcTable, end = pos + len;
crc = crc ^ (-1);
for (var i = pos; i < end; i++ ) {
crc = (crc >>> 8) ^ t[(crc ^ str.charCodeAt(i)) & 0xFF];
}
return (crc ^ (-1)); // >>> 0;
}
module.exports = function crc32wrapper(input, crc) {
if (typeof input === "undefined" || !input.length) {
return 0;
}
var isArray = utils.getTypeOf(input) !== "string";
if(isArray) {
return crc32(crc|0, input, input.length, 0);
} else {
return crc32str(crc|0, input, input.length, 0);
}
};

View File

@@ -0,0 +1,11 @@
"use strict";
exports.base64 = false;
exports.binary = false;
exports.dir = false;
exports.createFolders = true;
exports.date = null;
exports.compression = null;
exports.compressionOptions = null;
exports.comment = null;
exports.unixPermissions = null;
exports.dosPermissions = null;

View File

@@ -0,0 +1,18 @@
"use strict";
// load the global object first:
// - it should be better integrated in the system (unhandledRejection in node)
// - the environment may have a custom Promise implementation (see zone.js)
var ES6Promise = null;
if (typeof Promise !== "undefined") {
ES6Promise = Promise;
} else {
ES6Promise = require("lie");
}
/**
* Let the user use/change some implementations.
*/
module.exports = {
Promise: ES6Promise
};

View File

@@ -0,0 +1,85 @@
"use strict";
var USE_TYPEDARRAY = (typeof Uint8Array !== "undefined") && (typeof Uint16Array !== "undefined") && (typeof Uint32Array !== "undefined");
var pako = require("pako");
var utils = require("./utils");
var GenericWorker = require("./stream/GenericWorker");
var ARRAY_TYPE = USE_TYPEDARRAY ? "uint8array" : "array";
exports.magic = "\x08\x00";
/**
* Create a worker that uses pako to inflate/deflate.
* @constructor
* @param {String} action the name of the pako function to call : either "Deflate" or "Inflate".
* @param {Object} options the options to use when (de)compressing.
*/
function FlateWorker(action, options) {
GenericWorker.call(this, "FlateWorker/" + action);
this._pako = null;
this._pakoAction = action;
this._pakoOptions = options;
// the `meta` object from the last chunk received
// this allow this worker to pass around metadata
this.meta = {};
}
utils.inherits(FlateWorker, GenericWorker);
/**
* @see GenericWorker.processChunk
*/
FlateWorker.prototype.processChunk = function (chunk) {
this.meta = chunk.meta;
if (this._pako === null) {
this._createPako();
}
this._pako.push(utils.transformTo(ARRAY_TYPE, chunk.data), false);
};
/**
* @see GenericWorker.flush
*/
FlateWorker.prototype.flush = function () {
GenericWorker.prototype.flush.call(this);
if (this._pako === null) {
this._createPako();
}
this._pako.push([], true);
};
/**
* @see GenericWorker.cleanUp
*/
FlateWorker.prototype.cleanUp = function () {
GenericWorker.prototype.cleanUp.call(this);
this._pako = null;
};
/**
* Create the _pako object.
* TODO: lazy-loading this object isn't the best solution but it's the
* quickest. The best solution is to lazy-load the worker list. See also the
* issue #446.
*/
FlateWorker.prototype._createPako = function () {
this._pako = new pako[this._pakoAction]({
raw: true,
level: this._pakoOptions.level || -1 // default compression
});
var self = this;
this._pako.onData = function(data) {
self.push({
data : data,
meta : self.meta
});
};
};
exports.compressWorker = function (compressionOptions) {
return new FlateWorker("Deflate", compressionOptions);
};
exports.uncompressWorker = function () {
return new FlateWorker("Inflate", {});
};

View File

@@ -0,0 +1,539 @@
"use strict";
var utils = require("../utils");
var GenericWorker = require("../stream/GenericWorker");
var utf8 = require("../utf8");
var crc32 = require("../crc32");
var signature = require("../signature");
/**
* Transform an integer into a string in hexadecimal.
* @private
* @param {number} dec the number to convert.
* @param {number} bytes the number of bytes to generate.
* @returns {string} the result.
*/
var decToHex = function(dec, bytes) {
var hex = "", i;
for (i = 0; i < bytes; i++) {
hex += String.fromCharCode(dec & 0xff);
dec = dec >>> 8;
}
return hex;
};
/**
* Generate the UNIX part of the external file attributes.
* @param {Object} unixPermissions the unix permissions or null.
* @param {Boolean} isDir true if the entry is a directory, false otherwise.
* @return {Number} a 32 bit integer.
*
* adapted from http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute :
*
* TTTTsstrwxrwxrwx0000000000ADVSHR
* ^^^^____________________________ file type, see zipinfo.c (UNX_*)
* ^^^_________________________ setuid, setgid, sticky
* ^^^^^^^^^________________ permissions
* ^^^^^^^^^^______ not used ?
* ^^^^^^ DOS attribute bits : Archive, Directory, Volume label, System file, Hidden, Read only
*/
var generateUnixExternalFileAttr = function (unixPermissions, isDir) {
var result = unixPermissions;
if (!unixPermissions) {
// I can't use octal values in strict mode, hence the hexa.
// 040775 => 0x41fd
// 0100664 => 0x81b4
result = isDir ? 0x41fd : 0x81b4;
}
return (result & 0xFFFF) << 16;
};
/**
* Generate the DOS part of the external file attributes.
* @param {Object} dosPermissions the dos permissions or null.
* @param {Boolean} isDir true if the entry is a directory, false otherwise.
* @return {Number} a 32 bit integer.
*
* Bit 0 Read-Only
* Bit 1 Hidden
* Bit 2 System
* Bit 3 Volume Label
* Bit 4 Directory
* Bit 5 Archive
*/
var generateDosExternalFileAttr = function (dosPermissions) {
// the dir flag is already set for compatibility
return (dosPermissions || 0) & 0x3F;
};
/**
* Generate the various parts used in the construction of the final zip file.
* @param {Object} streamInfo the hash with information about the compressed file.
* @param {Boolean} streamedContent is the content streamed ?
* @param {Boolean} streamingEnded is the stream finished ?
* @param {number} offset the current offset from the start of the zip file.
* @param {String} platform let's pretend we are this platform (change platform dependents fields)
* @param {Function} encodeFileName the function to encode the file name / comment.
* @return {Object} the zip parts.
*/
var generateZipParts = function(streamInfo, streamedContent, streamingEnded, offset, platform, encodeFileName) {
var file = streamInfo["file"],
compression = streamInfo["compression"],
useCustomEncoding = encodeFileName !== utf8.utf8encode,
encodedFileName = utils.transformTo("string", encodeFileName(file.name)),
utfEncodedFileName = utils.transformTo("string", utf8.utf8encode(file.name)),
comment = file.comment,
encodedComment = utils.transformTo("string", encodeFileName(comment)),
utfEncodedComment = utils.transformTo("string", utf8.utf8encode(comment)),
useUTF8ForFileName = utfEncodedFileName.length !== file.name.length,
useUTF8ForComment = utfEncodedComment.length !== comment.length,
dosTime,
dosDate,
extraFields = "",
unicodePathExtraField = "",
unicodeCommentExtraField = "",
dir = file.dir,
date = file.date;
var dataInfo = {
crc32 : 0,
compressedSize : 0,
uncompressedSize : 0
};
// if the content is streamed, the sizes/crc32 are only available AFTER
// the end of the stream.
if (!streamedContent || streamingEnded) {
dataInfo.crc32 = streamInfo["crc32"];
dataInfo.compressedSize = streamInfo["compressedSize"];
dataInfo.uncompressedSize = streamInfo["uncompressedSize"];
}
var bitflag = 0;
if (streamedContent) {
// Bit 3: the sizes/crc32 are set to zero in the local header.
// The correct values are put in the data descriptor immediately
// following the compressed data.
bitflag |= 0x0008;
}
if (!useCustomEncoding && (useUTF8ForFileName || useUTF8ForComment)) {
// Bit 11: Language encoding flag (EFS).
bitflag |= 0x0800;
}
var extFileAttr = 0;
var versionMadeBy = 0;
if (dir) {
// dos or unix, we set the dos dir flag
extFileAttr |= 0x00010;
}
if(platform === "UNIX") {
versionMadeBy = 0x031E; // UNIX, version 3.0
extFileAttr |= generateUnixExternalFileAttr(file.unixPermissions, dir);
} else { // DOS or other, fallback to DOS
versionMadeBy = 0x0014; // DOS, version 2.0
extFileAttr |= generateDosExternalFileAttr(file.dosPermissions, dir);
}
// date
// @see http://www.delorie.com/djgpp/doc/rbinter/it/52/13.html
// @see http://www.delorie.com/djgpp/doc/rbinter/it/65/16.html
// @see http://www.delorie.com/djgpp/doc/rbinter/it/66/16.html
dosTime = date.getUTCHours();
dosTime = dosTime << 6;
dosTime = dosTime | date.getUTCMinutes();
dosTime = dosTime << 5;
dosTime = dosTime | date.getUTCSeconds() / 2;
dosDate = date.getUTCFullYear() - 1980;
dosDate = dosDate << 4;
dosDate = dosDate | (date.getUTCMonth() + 1);
dosDate = dosDate << 5;
dosDate = dosDate | date.getUTCDate();
if (useUTF8ForFileName) {
// set the unicode path extra field. unzip needs at least one extra
// field to correctly handle unicode path, so using the path is as good
// as any other information. This could improve the situation with
// other archive managers too.
// This field is usually used without the utf8 flag, with a non
// unicode path in the header (winrar, winzip). This helps (a bit)
// with the messy Windows' default compressed folders feature but
// breaks on p7zip which doesn't seek the unicode path extra field.
// So for now, UTF-8 everywhere !
unicodePathExtraField =
// Version
decToHex(1, 1) +
// NameCRC32
decToHex(crc32(encodedFileName), 4) +
// UnicodeName
utfEncodedFileName;
extraFields +=
// Info-ZIP Unicode Path Extra Field
"\x75\x70" +
// size
decToHex(unicodePathExtraField.length, 2) +
// content
unicodePathExtraField;
}
if(useUTF8ForComment) {
unicodeCommentExtraField =
// Version
decToHex(1, 1) +
// CommentCRC32
decToHex(crc32(encodedComment), 4) +
// UnicodeName
utfEncodedComment;
extraFields +=
// Info-ZIP Unicode Path Extra Field
"\x75\x63" +
// size
decToHex(unicodeCommentExtraField.length, 2) +
// content
unicodeCommentExtraField;
}
var header = "";
// version needed to extract
header += "\x0A\x00";
// general purpose bit flag
header += decToHex(bitflag, 2);
// compression method
header += compression.magic;
// last mod file time
header += decToHex(dosTime, 2);
// last mod file date
header += decToHex(dosDate, 2);
// crc-32
header += decToHex(dataInfo.crc32, 4);
// compressed size
header += decToHex(dataInfo.compressedSize, 4);
// uncompressed size
header += decToHex(dataInfo.uncompressedSize, 4);
// file name length
header += decToHex(encodedFileName.length, 2);
// extra field length
header += decToHex(extraFields.length, 2);
var fileRecord = signature.LOCAL_FILE_HEADER + header + encodedFileName + extraFields;
var dirRecord = signature.CENTRAL_FILE_HEADER +
// version made by (00: DOS)
decToHex(versionMadeBy, 2) +
// file header (common to file and central directory)
header +
// file comment length
decToHex(encodedComment.length, 2) +
// disk number start
"\x00\x00" +
// internal file attributes TODO
"\x00\x00" +
// external file attributes
decToHex(extFileAttr, 4) +
// relative offset of local header
decToHex(offset, 4) +
// file name
encodedFileName +
// extra field
extraFields +
// file comment
encodedComment;
return {
fileRecord: fileRecord,
dirRecord: dirRecord
};
};
/**
* Generate the EOCD record.
* @param {Number} entriesCount the number of entries in the zip file.
* @param {Number} centralDirLength the length (in bytes) of the central dir.
* @param {Number} localDirLength the length (in bytes) of the local dir.
* @param {String} comment the zip file comment as a binary string.
* @param {Function} encodeFileName the function to encode the comment.
* @return {String} the EOCD record.
*/
var generateCentralDirectoryEnd = function (entriesCount, centralDirLength, localDirLength, comment, encodeFileName) {
var dirEnd = "";
var encodedComment = utils.transformTo("string", encodeFileName(comment));
// end of central dir signature
dirEnd = signature.CENTRAL_DIRECTORY_END +
// number of this disk
"\x00\x00" +
// number of the disk with the start of the central directory
"\x00\x00" +
// total number of entries in the central directory on this disk
decToHex(entriesCount, 2) +
// total number of entries in the central directory
decToHex(entriesCount, 2) +
// size of the central directory 4 bytes
decToHex(centralDirLength, 4) +
// offset of start of central directory with respect to the starting disk number
decToHex(localDirLength, 4) +
// .ZIP file comment length
decToHex(encodedComment.length, 2) +
// .ZIP file comment
encodedComment;
return dirEnd;
};
/**
* Generate data descriptors for a file entry.
* @param {Object} streamInfo the hash generated by a worker, containing information
* on the file entry.
* @return {String} the data descriptors.
*/
var generateDataDescriptors = function (streamInfo) {
var descriptor = "";
descriptor = signature.DATA_DESCRIPTOR +
// crc-32 4 bytes
decToHex(streamInfo["crc32"], 4) +
// compressed size 4 bytes
decToHex(streamInfo["compressedSize"], 4) +
// uncompressed size 4 bytes
decToHex(streamInfo["uncompressedSize"], 4);
return descriptor;
};
/**
* A worker to concatenate other workers to create a zip file.
* @param {Boolean} streamFiles `true` to stream the content of the files,
* `false` to accumulate it.
* @param {String} comment the comment to use.
* @param {String} platform the platform to use, "UNIX" or "DOS".
* @param {Function} encodeFileName the function to encode file names and comments.
*/
function ZipFileWorker(streamFiles, comment, platform, encodeFileName) {
GenericWorker.call(this, "ZipFileWorker");
// The number of bytes written so far. This doesn't count accumulated chunks.
this.bytesWritten = 0;
// The comment of the zip file
this.zipComment = comment;
// The platform "generating" the zip file.
this.zipPlatform = platform;
// the function to encode file names and comments.
this.encodeFileName = encodeFileName;
// Should we stream the content of the files ?
this.streamFiles = streamFiles;
// If `streamFiles` is false, we will need to accumulate the content of the
// files to calculate sizes / crc32 (and write them *before* the content).
// This boolean indicates if we are accumulating chunks (it will change a lot
// during the lifetime of this worker).
this.accumulate = false;
// The buffer receiving chunks when accumulating content.
this.contentBuffer = [];
// The list of generated directory records.
this.dirRecords = [];
// The offset (in bytes) from the beginning of the zip file for the current source.
this.currentSourceOffset = 0;
// The total number of entries in this zip file.
this.entriesCount = 0;
// the name of the file currently being added, null when handling the end of the zip file.
// Used for the emitted metadata.
this.currentFile = null;
this._sources = [];
}
utils.inherits(ZipFileWorker, GenericWorker);
/**
* @see GenericWorker.push
*/
ZipFileWorker.prototype.push = function (chunk) {
var currentFilePercent = chunk.meta.percent || 0;
var entriesCount = this.entriesCount;
var remainingFiles = this._sources.length;
if(this.accumulate) {
this.contentBuffer.push(chunk);
} else {
this.bytesWritten += chunk.data.length;
GenericWorker.prototype.push.call(this, {
data : chunk.data,
meta : {
currentFile : this.currentFile,
percent : entriesCount ? (currentFilePercent + 100 * (entriesCount - remainingFiles - 1)) / entriesCount : 100
}
});
}
};
/**
* The worker started a new source (an other worker).
* @param {Object} streamInfo the streamInfo object from the new source.
*/
ZipFileWorker.prototype.openedSource = function (streamInfo) {
this.currentSourceOffset = this.bytesWritten;
this.currentFile = streamInfo["file"].name;
var streamedContent = this.streamFiles && !streamInfo["file"].dir;
// don't stream folders (because they don't have any content)
if(streamedContent) {
var record = generateZipParts(streamInfo, streamedContent, false, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);
this.push({
data : record.fileRecord,
meta : {percent:0}
});
} else {
// we need to wait for the whole file before pushing anything
this.accumulate = true;
}
};
/**
* The worker finished a source (an other worker).
* @param {Object} streamInfo the streamInfo object from the finished source.
*/
ZipFileWorker.prototype.closedSource = function (streamInfo) {
this.accumulate = false;
var streamedContent = this.streamFiles && !streamInfo["file"].dir;
var record = generateZipParts(streamInfo, streamedContent, true, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);
this.dirRecords.push(record.dirRecord);
if(streamedContent) {
// after the streamed file, we put data descriptors
this.push({
data : generateDataDescriptors(streamInfo),
meta : {percent:100}
});
} else {
// the content wasn't streamed, we need to push everything now
// first the file record, then the content
this.push({
data : record.fileRecord,
meta : {percent:0}
});
while(this.contentBuffer.length) {
this.push(this.contentBuffer.shift());
}
}
this.currentFile = null;
};
/**
* @see GenericWorker.flush
*/
ZipFileWorker.prototype.flush = function () {
var localDirLength = this.bytesWritten;
for(var i = 0; i < this.dirRecords.length; i++) {
this.push({
data : this.dirRecords[i],
meta : {percent:100}
});
}
var centralDirLength = this.bytesWritten - localDirLength;
var dirEnd = generateCentralDirectoryEnd(this.dirRecords.length, centralDirLength, localDirLength, this.zipComment, this.encodeFileName);
this.push({
data : dirEnd,
meta : {percent:100}
});
};
/**
* Prepare the next source to be read.
*/
ZipFileWorker.prototype.prepareNextSource = function () {
this.previous = this._sources.shift();
this.openedSource(this.previous.streamInfo);
if (this.isPaused) {
this.previous.pause();
} else {
this.previous.resume();
}
};
/**
* @see GenericWorker.registerPrevious
*/
ZipFileWorker.prototype.registerPrevious = function (previous) {
this._sources.push(previous);
var self = this;
previous.on("data", function (chunk) {
self.processChunk(chunk);
});
previous.on("end", function () {
self.closedSource(self.previous.streamInfo);
if(self._sources.length) {
self.prepareNextSource();
} else {
self.end();
}
});
previous.on("error", function (e) {
self.error(e);
});
return this;
};
/**
* @see GenericWorker.resume
*/
ZipFileWorker.prototype.resume = function () {
if(!GenericWorker.prototype.resume.call(this)) {
return false;
}
if (!this.previous && this._sources.length) {
this.prepareNextSource();
return true;
}
if (!this.previous && !this._sources.length && !this.generatedError) {
this.end();
return true;
}
};
/**
* @see GenericWorker.error
*/
ZipFileWorker.prototype.error = function (e) {
var sources = this._sources;
if(!GenericWorker.prototype.error.call(this, e)) {
return false;
}
for(var i = 0; i < sources.length; i++) {
try {
sources[i].error(e);
} catch(e) {
// the `error` exploded, nothing to do
}
}
return true;
};
/**
* @see GenericWorker.lock
*/
ZipFileWorker.prototype.lock = function () {
GenericWorker.prototype.lock.call(this);
var sources = this._sources;
for(var i = 0; i < sources.length; i++) {
sources[i].lock();
}
};
module.exports = ZipFileWorker;

View File

@@ -0,0 +1,57 @@
"use strict";
var compressions = require("../compressions");
var ZipFileWorker = require("./ZipFileWorker");
/**
* Find the compression to use.
* @param {String} fileCompression the compression defined at the file level, if any.
* @param {String} zipCompression the compression defined at the load() level.
* @return {Object} the compression object to use.
*/
var getCompression = function (fileCompression, zipCompression) {
var compressionName = fileCompression || zipCompression;
var compression = compressions[compressionName];
if (!compression) {
throw new Error(compressionName + " is not a valid compression method !");
}
return compression;
};
/**
* Create a worker to generate a zip file.
* @param {JSZip} zip the JSZip instance at the right root level.
* @param {Object} options to generate the zip file.
* @param {String} comment the comment to use.
*/
exports.generateWorker = function (zip, options, comment) {
var zipFileWorker = new ZipFileWorker(options.streamFiles, comment, options.platform, options.encodeFileName);
var entriesCount = 0;
try {
zip.forEach(function (relativePath, file) {
entriesCount++;
var compression = getCompression(file.options.compression, options.compression);
var compressionOptions = file.options.compressionOptions || options.compressionOptions || {};
var dir = file.dir, date = file.date;
file._compressWorker(compression, compressionOptions)
.withStreamInfo("file", {
name : relativePath,
dir : dir,
date : date,
comment : file.comment || "",
unixPermissions : file.unixPermissions,
dosPermissions : file.dosPermissions
})
.pipe(zipFileWorker);
});
zipFileWorker.entriesCount = entriesCount;
} catch (e) {
zipFileWorker.error(e);
}
return zipFileWorker;
};

View File

@@ -0,0 +1,55 @@
"use strict";
/**
* Representation a of zip file in js
* @constructor
*/
function JSZip() {
// if this constructor is used without `new`, it adds `new` before itself:
if(!(this instanceof JSZip)) {
return new JSZip();
}
if(arguments.length) {
throw new Error("The constructor with parameters has been removed in JSZip 3.0, please check the upgrade guide.");
}
// object containing the files :
// {
// "folder/" : {...},
// "folder/data.txt" : {...}
// }
// NOTE: we use a null prototype because we do not
// want filenames like "toString" coming from a zip file
// to overwrite methods and attributes in a normal Object.
this.files = Object.create(null);
this.comment = null;
// Where we are in the hierarchy
this.root = "";
this.clone = function() {
var newObj = new JSZip();
for (var i in this) {
if (typeof this[i] !== "function") {
newObj[i] = this[i];
}
}
return newObj;
};
}
JSZip.prototype = require("./object");
JSZip.prototype.loadAsync = require("./load");
JSZip.support = require("./support");
JSZip.defaults = require("./defaults");
// TODO find a better way to handle this version,
// a require('package.json').version doesn't work with webpack, see #327
JSZip.version = "3.10.1";
JSZip.loadAsync = function (content, options) {
return new JSZip().loadAsync(content, options);
};
JSZip.external = require("./external");
module.exports = JSZip;

View File

@@ -0,0 +1,11 @@
/*!
JSZip v__VERSION__ - A JavaScript class for generating and reading zip files
<http://stuartk.com/jszip>
(c) 2009-2016 Stuart Knightley <stuart [at] stuartk.com>
Dual licenced under the MIT license or GPLv3. See https://raw.github.com/Stuk/jszip/main/LICENSE.markdown.
JSZip uses the library pako released under the MIT license :
https://github.com/nodeca/pako/blob/main/LICENSE
*/

View File

@@ -0,0 +1,88 @@
"use strict";
var utils = require("./utils");
var external = require("./external");
var utf8 = require("./utf8");
var ZipEntries = require("./zipEntries");
var Crc32Probe = require("./stream/Crc32Probe");
var nodejsUtils = require("./nodejsUtils");
/**
* Check the CRC32 of an entry.
* @param {ZipEntry} zipEntry the zip entry to check.
* @return {Promise} the result.
*/
function checkEntryCRC32(zipEntry) {
return new external.Promise(function (resolve, reject) {
var worker = zipEntry.decompressed.getContentWorker().pipe(new Crc32Probe());
worker.on("error", function (e) {
reject(e);
})
.on("end", function () {
if (worker.streamInfo.crc32 !== zipEntry.decompressed.crc32) {
reject(new Error("Corrupted zip : CRC32 mismatch"));
} else {
resolve();
}
})
.resume();
});
}
module.exports = function (data, options) {
var zip = this;
options = utils.extend(options || {}, {
base64: false,
checkCRC32: false,
optimizedBinaryString: false,
createFolders: false,
decodeFileName: utf8.utf8decode
});
if (nodejsUtils.isNode && nodejsUtils.isStream(data)) {
return external.Promise.reject(new Error("JSZip can't accept a stream when loading a zip file."));
}
return utils.prepareContent("the loaded zip file", data, true, options.optimizedBinaryString, options.base64)
.then(function (data) {
var zipEntries = new ZipEntries(options);
zipEntries.load(data);
return zipEntries;
}).then(function checkCRC32(zipEntries) {
var promises = [external.Promise.resolve(zipEntries)];
var files = zipEntries.files;
if (options.checkCRC32) {
for (var i = 0; i < files.length; i++) {
promises.push(checkEntryCRC32(files[i]));
}
}
return external.Promise.all(promises);
}).then(function addFiles(results) {
var zipEntries = results.shift();
var files = zipEntries.files;
for (var i = 0; i < files.length; i++) {
var input = files[i];
var unsafeName = input.fileNameStr;
var safeName = utils.resolve(input.fileNameStr);
zip.file(safeName, input.decompressed, {
binary: true,
optimizedBinaryString: true,
date: input.date,
dir: input.dir,
comment: input.fileCommentStr.length ? input.fileCommentStr : null,
unixPermissions: input.unixPermissions,
dosPermissions: input.dosPermissions,
createFolders: options.createFolders
});
if (!input.dir) {
zip.file(safeName).unsafeOriginalName = unsafeName;
}
}
if (zipEntries.zipComment.length) {
zip.comment = zipEntries.zipComment;
}
return zip;
});
};

View File

@@ -0,0 +1,74 @@
"use strict";
var utils = require("../utils");
var GenericWorker = require("../stream/GenericWorker");
/**
* A worker that use a nodejs stream as source.
* @constructor
* @param {String} filename the name of the file entry for this stream.
* @param {Readable} stream the nodejs stream.
*/
function NodejsStreamInputAdapter(filename, stream) {
GenericWorker.call(this, "Nodejs stream input adapter for " + filename);
this._upstreamEnded = false;
this._bindStream(stream);
}
utils.inherits(NodejsStreamInputAdapter, GenericWorker);
/**
* Prepare the stream and bind the callbacks on it.
* Do this ASAP on node 0.10 ! A lazy binding doesn't always work.
* @param {Stream} stream the nodejs stream to use.
*/
NodejsStreamInputAdapter.prototype._bindStream = function (stream) {
var self = this;
this._stream = stream;
stream.pause();
stream
.on("data", function (chunk) {
self.push({
data: chunk,
meta : {
percent : 0
}
});
})
.on("error", function (e) {
if(self.isPaused) {
this.generatedError = e;
} else {
self.error(e);
}
})
.on("end", function () {
if(self.isPaused) {
self._upstreamEnded = true;
} else {
self.end();
}
});
};
NodejsStreamInputAdapter.prototype.pause = function () {
if(!GenericWorker.prototype.pause.call(this)) {
return false;
}
this._stream.pause();
return true;
};
NodejsStreamInputAdapter.prototype.resume = function () {
if(!GenericWorker.prototype.resume.call(this)) {
return false;
}
if(this._upstreamEnded) {
this.end();
} else {
this._stream.resume();
}
return true;
};
module.exports = NodejsStreamInputAdapter;

View File

@@ -0,0 +1,42 @@
"use strict";
var Readable = require("readable-stream").Readable;
var utils = require("../utils");
utils.inherits(NodejsStreamOutputAdapter, Readable);
/**
* A nodejs stream using a worker as source.
* @see the SourceWrapper in http://nodejs.org/api/stream.html
* @constructor
* @param {StreamHelper} helper the helper wrapping the worker
* @param {Object} options the nodejs stream options
* @param {Function} updateCb the update callback.
*/
function NodejsStreamOutputAdapter(helper, options, updateCb) {
Readable.call(this, options);
this._helper = helper;
var self = this;
helper.on("data", function (data, meta) {
if (!self.push(data)) {
self._helper.pause();
}
if(updateCb) {
updateCb(meta);
}
})
.on("error", function(e) {
self.emit("error", e);
})
.on("end", function () {
self.push(null);
});
}
NodejsStreamOutputAdapter.prototype._read = function() {
this._helper.resume();
};
module.exports = NodejsStreamOutputAdapter;

View File

@@ -0,0 +1,57 @@
"use strict";
module.exports = {
/**
* True if this is running in Nodejs, will be undefined in a browser.
* In a browser, browserify won't include this file and the whole module
* will be resolved an empty object.
*/
isNode : typeof Buffer !== "undefined",
/**
* Create a new nodejs Buffer from an existing content.
* @param {Object} data the data to pass to the constructor.
* @param {String} encoding the encoding to use.
* @return {Buffer} a new Buffer.
*/
newBufferFrom: function(data, encoding) {
if (Buffer.from && Buffer.from !== Uint8Array.from) {
return Buffer.from(data, encoding);
} else {
if (typeof data === "number") {
// Safeguard for old Node.js versions. On newer versions,
// Buffer.from(number) / Buffer(number, encoding) already throw.
throw new Error("The \"data\" argument must not be a number");
}
return new Buffer(data, encoding);
}
},
/**
* Create a new nodejs Buffer with the specified size.
* @param {Integer} size the size of the buffer.
* @return {Buffer} a new Buffer.
*/
allocBuffer: function (size) {
if (Buffer.alloc) {
return Buffer.alloc(size);
} else {
var buf = new Buffer(size);
buf.fill(0);
return buf;
}
},
/**
* Find out if an object is a Buffer.
* @param {Object} b the object to test.
* @return {Boolean} true if the object is a Buffer, false otherwise.
*/
isBuffer : function(b){
return Buffer.isBuffer(b);
},
isStream : function (obj) {
return obj &&
typeof obj.on === "function" &&
typeof obj.pause === "function" &&
typeof obj.resume === "function";
}
};

View File

@@ -0,0 +1,384 @@
"use strict";
var utf8 = require("./utf8");
var utils = require("./utils");
var GenericWorker = require("./stream/GenericWorker");
var StreamHelper = require("./stream/StreamHelper");
var defaults = require("./defaults");
var CompressedObject = require("./compressedObject");
var ZipObject = require("./zipObject");
var generate = require("./generate");
var nodejsUtils = require("./nodejsUtils");
var NodejsStreamInputAdapter = require("./nodejs/NodejsStreamInputAdapter");
/**
* Add a file in the current folder.
* @private
* @param {string} name the name of the file
* @param {String|ArrayBuffer|Uint8Array|Buffer} data the data of the file
* @param {Object} originalOptions the options of the file
* @return {Object} the new file.
*/
var fileAdd = function(name, data, originalOptions) {
// be sure sub folders exist
var dataType = utils.getTypeOf(data),
parent;
/*
* Correct options.
*/
var o = utils.extend(originalOptions || {}, defaults);
o.date = o.date || new Date();
if (o.compression !== null) {
o.compression = o.compression.toUpperCase();
}
if (typeof o.unixPermissions === "string") {
o.unixPermissions = parseInt(o.unixPermissions, 8);
}
// UNX_IFDIR 0040000 see zipinfo.c
if (o.unixPermissions && (o.unixPermissions & 0x4000)) {
o.dir = true;
}
// Bit 4 Directory
if (o.dosPermissions && (o.dosPermissions & 0x0010)) {
o.dir = true;
}
if (o.dir) {
name = forceTrailingSlash(name);
}
if (o.createFolders && (parent = parentFolder(name))) {
folderAdd.call(this, parent, true);
}
var isUnicodeString = dataType === "string" && o.binary === false && o.base64 === false;
if (!originalOptions || typeof originalOptions.binary === "undefined") {
o.binary = !isUnicodeString;
}
var isCompressedEmpty = (data instanceof CompressedObject) && data.uncompressedSize === 0;
if (isCompressedEmpty || o.dir || !data || data.length === 0) {
o.base64 = false;
o.binary = true;
data = "";
o.compression = "STORE";
dataType = "string";
}
/*
* Convert content to fit.
*/
var zipObjectContent = null;
if (data instanceof CompressedObject || data instanceof GenericWorker) {
zipObjectContent = data;
} else if (nodejsUtils.isNode && nodejsUtils.isStream(data)) {
zipObjectContent = new NodejsStreamInputAdapter(name, data);
} else {
zipObjectContent = utils.prepareContent(name, data, o.binary, o.optimizedBinaryString, o.base64);
}
var object = new ZipObject(name, zipObjectContent, o);
this.files[name] = object;
/*
TODO: we can't throw an exception because we have async promises
(we can have a promise of a Date() for example) but returning a
promise is useless because file(name, data) returns the JSZip
object for chaining. Should we break that to allow the user
to catch the error ?
return external.Promise.resolve(zipObjectContent)
.then(function () {
return object;
});
*/
};
/**
* Find the parent folder of the path.
* @private
* @param {string} path the path to use
* @return {string} the parent folder, or ""
*/
var parentFolder = function (path) {
if (path.slice(-1) === "/") {
path = path.substring(0, path.length - 1);
}
var lastSlash = path.lastIndexOf("/");
return (lastSlash > 0) ? path.substring(0, lastSlash) : "";
};
/**
* Returns the path with a slash at the end.
* @private
* @param {String} path the path to check.
* @return {String} the path with a trailing slash.
*/
var forceTrailingSlash = function(path) {
// Check the name ends with a /
if (path.slice(-1) !== "/") {
path += "/"; // IE doesn't like substr(-1)
}
return path;
};
/**
* Add a (sub) folder in the current folder.
* @private
* @param {string} name the folder's name
* @param {boolean=} [createFolders] If true, automatically create sub
* folders. Defaults to false.
* @return {Object} the new folder.
*/
var folderAdd = function(name, createFolders) {
createFolders = (typeof createFolders !== "undefined") ? createFolders : defaults.createFolders;
name = forceTrailingSlash(name);
// Does this folder already exist?
if (!this.files[name]) {
fileAdd.call(this, name, null, {
dir: true,
createFolders: createFolders
});
}
return this.files[name];
};
/**
* Cross-window, cross-Node-context regular expression detection
* @param {Object} object Anything
* @return {Boolean} true if the object is a regular expression,
* false otherwise
*/
function isRegExp(object) {
return Object.prototype.toString.call(object) === "[object RegExp]";
}
// return the actual prototype of JSZip
var out = {
/**
* @see loadAsync
*/
load: function() {
throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.");
},
/**
* Call a callback function for each entry at this folder level.
* @param {Function} cb the callback function:
* function (relativePath, file) {...}
* It takes 2 arguments : the relative path and the file.
*/
forEach: function(cb) {
var filename, relativePath, file;
// ignore warning about unwanted properties because this.files is a null prototype object
/* eslint-disable-next-line guard-for-in */
for (filename in this.files) {
file = this.files[filename];
relativePath = filename.slice(this.root.length, filename.length);
if (relativePath && filename.slice(0, this.root.length) === this.root) { // the file is in the current root
cb(relativePath, file); // TODO reverse the parameters ? need to be clean AND consistent with the filter search fn...
}
}
},
/**
* Filter nested files/folders with the specified function.
* @param {Function} search the predicate to use :
* function (relativePath, file) {...}
* It takes 2 arguments : the relative path and the file.
* @return {Array} An array of matching elements.
*/
filter: function(search) {
var result = [];
this.forEach(function (relativePath, entry) {
if (search(relativePath, entry)) { // the file matches the function
result.push(entry);
}
});
return result;
},
/**
* Add a file to the zip file, or search a file.
* @param {string|RegExp} name The name of the file to add (if data is defined),
* the name of the file to find (if no data) or a regex to match files.
* @param {String|ArrayBuffer|Uint8Array|Buffer} data The file data, either raw or base64 encoded
* @param {Object} o File options
* @return {JSZip|Object|Array} this JSZip object (when adding a file),
* a file (when searching by string) or an array of files (when searching by regex).
*/
file: function(name, data, o) {
if (arguments.length === 1) {
if (isRegExp(name)) {
var regexp = name;
return this.filter(function(relativePath, file) {
return !file.dir && regexp.test(relativePath);
});
}
else { // text
var obj = this.files[this.root + name];
if (obj && !obj.dir) {
return obj;
} else {
return null;
}
}
}
else { // more than one argument : we have data !
name = this.root + name;
fileAdd.call(this, name, data, o);
}
return this;
},
/**
* Add a directory to the zip file, or search.
* @param {String|RegExp} arg The name of the directory to add, or a regex to search folders.
* @return {JSZip} an object with the new directory as the root, or an array containing matching folders.
*/
folder: function(arg) {
if (!arg) {
return this;
}
if (isRegExp(arg)) {
return this.filter(function(relativePath, file) {
return file.dir && arg.test(relativePath);
});
}
// else, name is a new folder
var name = this.root + arg;
var newFolder = folderAdd.call(this, name);
// Allow chaining by returning a new object with this folder as the root
var ret = this.clone();
ret.root = newFolder.name;
return ret;
},
/**
* Delete a file, or a directory and all sub-files, from the zip
* @param {string} name the name of the file to delete
* @return {JSZip} this JSZip object
*/
remove: function(name) {
name = this.root + name;
var file = this.files[name];
if (!file) {
// Look for any folders
if (name.slice(-1) !== "/") {
name += "/";
}
file = this.files[name];
}
if (file && !file.dir) {
// file
delete this.files[name];
} else {
// maybe a folder, delete recursively
var kids = this.filter(function(relativePath, file) {
return file.name.slice(0, name.length) === name;
});
for (var i = 0; i < kids.length; i++) {
delete this.files[kids[i].name];
}
}
return this;
},
/**
* @deprecated This method has been removed in JSZip 3.0, please check the upgrade guide.
*/
generate: function() {
throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.");
},
/**
* Generate the complete zip file as an internal stream.
* @param {Object} options the options to generate the zip file :
* - compression, "STORE" by default.
* - type, "base64" by default. Values are : string, base64, uint8array, arraybuffer, blob.
* @return {StreamHelper} the streamed zip file.
*/
generateInternalStream: function(options) {
var worker, opts = {};
try {
opts = utils.extend(options || {}, {
streamFiles: false,
compression: "STORE",
compressionOptions : null,
type: "",
platform: "DOS",
comment: null,
mimeType: "application/zip",
encodeFileName: utf8.utf8encode
});
opts.type = opts.type.toLowerCase();
opts.compression = opts.compression.toUpperCase();
// "binarystring" is preferred but the internals use "string".
if(opts.type === "binarystring") {
opts.type = "string";
}
if (!opts.type) {
throw new Error("No output type specified.");
}
utils.checkSupport(opts.type);
// accept nodejs `process.platform`
if(
opts.platform === "darwin" ||
opts.platform === "freebsd" ||
opts.platform === "linux" ||
opts.platform === "sunos"
) {
opts.platform = "UNIX";
}
if (opts.platform === "win32") {
opts.platform = "DOS";
}
var comment = opts.comment || this.comment || "";
worker = generate.generateWorker(this, opts, comment);
} catch (e) {
worker = new GenericWorker("error");
worker.error(e);
}
return new StreamHelper(worker, opts.type || "string", opts.mimeType);
},
/**
* Generate the complete zip file asynchronously.
* @see generateInternalStream
*/
generateAsync: function(options, onUpdate) {
return this.generateInternalStream(options).accumulate(onUpdate);
},
/**
* Generate the complete zip file asynchronously.
* @see generateInternalStream
*/
generateNodeStream: function(options, onUpdate) {
options = options || {};
if (!options.type) {
options.type = "nodebuffer";
}
return this.generateInternalStream(options).toNodejsStream(onUpdate);
}
};
module.exports = out;

View File

@@ -0,0 +1,10 @@
"use strict";
/*
* This file is used by module bundlers (browserify/webpack/etc) when
* including a stream implementation. We use "readable-stream" to get a
* consistent behavior between nodejs versions but bundlers often have a shim
* for "stream". Using this shim greatly improve the compatibility and greatly
* reduce the final size of the bundle (only one stream implementation, not
* two).
*/
module.exports = require("stream");

View File

@@ -0,0 +1,57 @@
"use strict";
var DataReader = require("./DataReader");
var utils = require("../utils");
function ArrayReader(data) {
DataReader.call(this, data);
for(var i = 0; i < this.data.length; i++) {
data[i] = data[i] & 0xFF;
}
}
utils.inherits(ArrayReader, DataReader);
/**
* @see DataReader.byteAt
*/
ArrayReader.prototype.byteAt = function(i) {
return this.data[this.zero + i];
};
/**
* @see DataReader.lastIndexOfSignature
*/
ArrayReader.prototype.lastIndexOfSignature = function(sig) {
var sig0 = sig.charCodeAt(0),
sig1 = sig.charCodeAt(1),
sig2 = sig.charCodeAt(2),
sig3 = sig.charCodeAt(3);
for (var i = this.length - 4; i >= 0; --i) {
if (this.data[i] === sig0 && this.data[i + 1] === sig1 && this.data[i + 2] === sig2 && this.data[i + 3] === sig3) {
return i - this.zero;
}
}
return -1;
};
/**
* @see DataReader.readAndCheckSignature
*/
ArrayReader.prototype.readAndCheckSignature = function (sig) {
var sig0 = sig.charCodeAt(0),
sig1 = sig.charCodeAt(1),
sig2 = sig.charCodeAt(2),
sig3 = sig.charCodeAt(3),
data = this.readData(4);
return sig0 === data[0] && sig1 === data[1] && sig2 === data[2] && sig3 === data[3];
};
/**
* @see DataReader.readData
*/
ArrayReader.prototype.readData = function(size) {
this.checkOffset(size);
if(size === 0) {
return [];
}
var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);
this.index += size;
return result;
};
module.exports = ArrayReader;

View File

@@ -0,0 +1,116 @@
"use strict";
var utils = require("../utils");
function DataReader(data) {
this.data = data; // type : see implementation
this.length = data.length;
this.index = 0;
this.zero = 0;
}
DataReader.prototype = {
/**
* Check that the offset will not go too far.
* @param {string} offset the additional offset to check.
* @throws {Error} an Error if the offset is out of bounds.
*/
checkOffset: function(offset) {
this.checkIndex(this.index + offset);
},
/**
* Check that the specified index will not be too far.
* @param {string} newIndex the index to check.
* @throws {Error} an Error if the index is out of bounds.
*/
checkIndex: function(newIndex) {
if (this.length < this.zero + newIndex || newIndex < 0) {
throw new Error("End of data reached (data length = " + this.length + ", asked index = " + (newIndex) + "). Corrupted zip ?");
}
},
/**
* Change the index.
* @param {number} newIndex The new index.
* @throws {Error} if the new index is out of the data.
*/
setIndex: function(newIndex) {
this.checkIndex(newIndex);
this.index = newIndex;
},
/**
* Skip the next n bytes.
* @param {number} n the number of bytes to skip.
* @throws {Error} if the new index is out of the data.
*/
skip: function(n) {
this.setIndex(this.index + n);
},
/**
* Get the byte at the specified index.
* @param {number} i the index to use.
* @return {number} a byte.
*/
byteAt: function() {
// see implementations
},
/**
* Get the next number with a given byte size.
* @param {number} size the number of bytes to read.
* @return {number} the corresponding number.
*/
readInt: function(size) {
var result = 0,
i;
this.checkOffset(size);
for (i = this.index + size - 1; i >= this.index; i--) {
result = (result << 8) + this.byteAt(i);
}
this.index += size;
return result;
},
/**
* Get the next string with a given byte size.
* @param {number} size the number of bytes to read.
* @return {string} the corresponding string.
*/
readString: function(size) {
return utils.transformTo("string", this.readData(size));
},
/**
* Get raw data without conversion, <size> bytes.
* @param {number} size the number of bytes to read.
* @return {Object} the raw data, implementation specific.
*/
readData: function() {
// see implementations
},
/**
* Find the last occurrence of a zip signature (4 bytes).
* @param {string} sig the signature to find.
* @return {number} the index of the last occurrence, -1 if not found.
*/
lastIndexOfSignature: function() {
// see implementations
},
/**
* Read the signature (4 bytes) at the current position and compare it with sig.
* @param {string} sig the expected signature
* @return {boolean} true if the signature matches, false otherwise.
*/
readAndCheckSignature: function() {
// see implementations
},
/**
* Get the next date.
* @return {Date} the date.
*/
readDate: function() {
var dostime = this.readInt(4);
return new Date(Date.UTC(
((dostime >> 25) & 0x7f) + 1980, // year
((dostime >> 21) & 0x0f) - 1, // month
(dostime >> 16) & 0x1f, // day
(dostime >> 11) & 0x1f, // hour
(dostime >> 5) & 0x3f, // minute
(dostime & 0x1f) << 1)); // second
}
};
module.exports = DataReader;

View File

@@ -0,0 +1,19 @@
"use strict";
var Uint8ArrayReader = require("./Uint8ArrayReader");
var utils = require("../utils");
function NodeBufferReader(data) {
Uint8ArrayReader.call(this, data);
}
utils.inherits(NodeBufferReader, Uint8ArrayReader);
/**
* @see DataReader.readData
*/
NodeBufferReader.prototype.readData = function(size) {
this.checkOffset(size);
var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);
this.index += size;
return result;
};
module.exports = NodeBufferReader;

View File

@@ -0,0 +1,38 @@
"use strict";
var DataReader = require("./DataReader");
var utils = require("../utils");
function StringReader(data) {
DataReader.call(this, data);
}
utils.inherits(StringReader, DataReader);
/**
* @see DataReader.byteAt
*/
StringReader.prototype.byteAt = function(i) {
return this.data.charCodeAt(this.zero + i);
};
/**
* @see DataReader.lastIndexOfSignature
*/
StringReader.prototype.lastIndexOfSignature = function(sig) {
return this.data.lastIndexOf(sig) - this.zero;
};
/**
* @see DataReader.readAndCheckSignature
*/
StringReader.prototype.readAndCheckSignature = function (sig) {
var data = this.readData(4);
return sig === data;
};
/**
* @see DataReader.readData
*/
StringReader.prototype.readData = function(size) {
this.checkOffset(size);
// this will work because the constructor applied the "& 0xff" mask.
var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);
this.index += size;
return result;
};
module.exports = StringReader;

View File

@@ -0,0 +1,22 @@
"use strict";
var ArrayReader = require("./ArrayReader");
var utils = require("../utils");
function Uint8ArrayReader(data) {
ArrayReader.call(this, data);
}
utils.inherits(Uint8ArrayReader, ArrayReader);
/**
* @see DataReader.readData
*/
Uint8ArrayReader.prototype.readData = function(size) {
this.checkOffset(size);
if(size === 0) {
// in IE10, when using subarray(idx, idx), we get the array [0x00] instead of [].
return new Uint8Array(0);
}
var result = this.data.subarray(this.zero + this.index, this.zero + this.index + size);
this.index += size;
return result;
};
module.exports = Uint8ArrayReader;

View File

@@ -0,0 +1,28 @@
"use strict";
var utils = require("../utils");
var support = require("../support");
var ArrayReader = require("./ArrayReader");
var StringReader = require("./StringReader");
var NodeBufferReader = require("./NodeBufferReader");
var Uint8ArrayReader = require("./Uint8ArrayReader");
/**
* Create a reader adapted to the data.
* @param {String|ArrayBuffer|Uint8Array|Buffer} data the data to read.
* @return {DataReader} the data reader.
*/
module.exports = function (data) {
var type = utils.getTypeOf(data);
utils.checkSupport(type);
if (type === "string" && !support.uint8array) {
return new StringReader(data);
}
if (type === "nodebuffer") {
return new NodeBufferReader(data);
}
if (support.uint8array) {
return new Uint8ArrayReader(utils.transformTo("uint8array", data));
}
return new ArrayReader(utils.transformTo("array", data));
};

View File

@@ -0,0 +1,7 @@
"use strict";
exports.LOCAL_FILE_HEADER = "PK\x03\x04";
exports.CENTRAL_FILE_HEADER = "PK\x01\x02";
exports.CENTRAL_DIRECTORY_END = "PK\x05\x06";
exports.ZIP64_CENTRAL_DIRECTORY_LOCATOR = "PK\x06\x07";
exports.ZIP64_CENTRAL_DIRECTORY_END = "PK\x06\x06";
exports.DATA_DESCRIPTOR = "PK\x07\x08";

View File

@@ -0,0 +1,26 @@
"use strict";
var GenericWorker = require("./GenericWorker");
var utils = require("../utils");
/**
* A worker which convert chunks to a specified type.
* @constructor
* @param {String} destType the destination type.
*/
function ConvertWorker(destType) {
GenericWorker.call(this, "ConvertWorker to " + destType);
this.destType = destType;
}
utils.inherits(ConvertWorker, GenericWorker);
/**
* @see GenericWorker.processChunk
*/
ConvertWorker.prototype.processChunk = function (chunk) {
this.push({
data : utils.transformTo(this.destType, chunk.data),
meta : chunk.meta
});
};
module.exports = ConvertWorker;

View File

@@ -0,0 +1,24 @@
"use strict";
var GenericWorker = require("./GenericWorker");
var crc32 = require("../crc32");
var utils = require("../utils");
/**
* A worker which calculate the crc32 of the data flowing through.
* @constructor
*/
function Crc32Probe() {
GenericWorker.call(this, "Crc32Probe");
this.withStreamInfo("crc32", 0);
}
utils.inherits(Crc32Probe, GenericWorker);
/**
* @see GenericWorker.processChunk
*/
Crc32Probe.prototype.processChunk = function (chunk) {
this.streamInfo.crc32 = crc32(chunk.data, this.streamInfo.crc32 || 0);
this.push(chunk);
};
module.exports = Crc32Probe;

View File

@@ -0,0 +1,29 @@
"use strict";
var utils = require("../utils");
var GenericWorker = require("./GenericWorker");
/**
* A worker which calculate the total length of the data flowing through.
* @constructor
* @param {String} propName the name used to expose the length
*/
function DataLengthProbe(propName) {
GenericWorker.call(this, "DataLengthProbe for " + propName);
this.propName = propName;
this.withStreamInfo(propName, 0);
}
utils.inherits(DataLengthProbe, GenericWorker);
/**
* @see GenericWorker.processChunk
*/
DataLengthProbe.prototype.processChunk = function (chunk) {
if(chunk) {
var length = this.streamInfo[this.propName] || 0;
this.streamInfo[this.propName] = length + chunk.data.length;
}
GenericWorker.prototype.processChunk.call(this, chunk);
};
module.exports = DataLengthProbe;

View File

@@ -0,0 +1,116 @@
"use strict";
var utils = require("../utils");
var GenericWorker = require("./GenericWorker");
// the size of the generated chunks
// TODO expose this as a public variable
var DEFAULT_BLOCK_SIZE = 16 * 1024;
/**
* A worker that reads a content and emits chunks.
* @constructor
* @param {Promise} dataP the promise of the data to split
*/
function DataWorker(dataP) {
GenericWorker.call(this, "DataWorker");
var self = this;
this.dataIsReady = false;
this.index = 0;
this.max = 0;
this.data = null;
this.type = "";
this._tickScheduled = false;
dataP.then(function (data) {
self.dataIsReady = true;
self.data = data;
self.max = data && data.length || 0;
self.type = utils.getTypeOf(data);
if(!self.isPaused) {
self._tickAndRepeat();
}
}, function (e) {
self.error(e);
});
}
utils.inherits(DataWorker, GenericWorker);
/**
* @see GenericWorker.cleanUp
*/
DataWorker.prototype.cleanUp = function () {
GenericWorker.prototype.cleanUp.call(this);
this.data = null;
};
/**
* @see GenericWorker.resume
*/
DataWorker.prototype.resume = function () {
if(!GenericWorker.prototype.resume.call(this)) {
return false;
}
if (!this._tickScheduled && this.dataIsReady) {
this._tickScheduled = true;
utils.delay(this._tickAndRepeat, [], this);
}
return true;
};
/**
* Trigger a tick a schedule an other call to this function.
*/
DataWorker.prototype._tickAndRepeat = function() {
this._tickScheduled = false;
if(this.isPaused || this.isFinished) {
return;
}
this._tick();
if(!this.isFinished) {
utils.delay(this._tickAndRepeat, [], this);
this._tickScheduled = true;
}
};
/**
* Read and push a chunk.
*/
DataWorker.prototype._tick = function() {
if(this.isPaused || this.isFinished) {
return false;
}
var size = DEFAULT_BLOCK_SIZE;
var data = null, nextIndex = Math.min(this.max, this.index + size);
if (this.index >= this.max) {
// EOF
return this.end();
} else {
switch(this.type) {
case "string":
data = this.data.substring(this.index, nextIndex);
break;
case "uint8array":
data = this.data.subarray(this.index, nextIndex);
break;
case "array":
case "nodebuffer":
data = this.data.slice(this.index, nextIndex);
break;
}
this.index = nextIndex;
return this.push({
data : data,
meta : {
percent : this.max ? this.index / this.max * 100 : 0
}
});
}
};
module.exports = DataWorker;

View File

@@ -0,0 +1,263 @@
"use strict";
/**
* A worker that does nothing but passing chunks to the next one. This is like
* a nodejs stream but with some differences. On the good side :
* - it works on IE 6-9 without any issue / polyfill
* - it weights less than the full dependencies bundled with browserify
* - it forwards errors (no need to declare an error handler EVERYWHERE)
*
* A chunk is an object with 2 attributes : `meta` and `data`. The former is an
* object containing anything (`percent` for example), see each worker for more
* details. The latter is the real data (String, Uint8Array, etc).
*
* @constructor
* @param {String} name the name of the stream (mainly used for debugging purposes)
*/
function GenericWorker(name) {
// the name of the worker
this.name = name || "default";
// an object containing metadata about the workers chain
this.streamInfo = {};
// an error which happened when the worker was paused
this.generatedError = null;
// an object containing metadata to be merged by this worker into the general metadata
this.extraStreamInfo = {};
// true if the stream is paused (and should not do anything), false otherwise
this.isPaused = true;
// true if the stream is finished (and should not do anything), false otherwise
this.isFinished = false;
// true if the stream is locked to prevent further structure updates (pipe), false otherwise
this.isLocked = false;
// the event listeners
this._listeners = {
"data":[],
"end":[],
"error":[]
};
// the previous worker, if any
this.previous = null;
}
GenericWorker.prototype = {
/**
* Push a chunk to the next workers.
* @param {Object} chunk the chunk to push
*/
push : function (chunk) {
this.emit("data", chunk);
},
/**
* End the stream.
* @return {Boolean} true if this call ended the worker, false otherwise.
*/
end : function () {
if (this.isFinished) {
return false;
}
this.flush();
try {
this.emit("end");
this.cleanUp();
this.isFinished = true;
} catch (e) {
this.emit("error", e);
}
return true;
},
/**
* End the stream with an error.
* @param {Error} e the error which caused the premature end.
* @return {Boolean} true if this call ended the worker with an error, false otherwise.
*/
error : function (e) {
if (this.isFinished) {
return false;
}
if(this.isPaused) {
this.generatedError = e;
} else {
this.isFinished = true;
this.emit("error", e);
// in the workers chain exploded in the middle of the chain,
// the error event will go downward but we also need to notify
// workers upward that there has been an error.
if(this.previous) {
this.previous.error(e);
}
this.cleanUp();
}
return true;
},
/**
* Add a callback on an event.
* @param {String} name the name of the event (data, end, error)
* @param {Function} listener the function to call when the event is triggered
* @return {GenericWorker} the current object for chainability
*/
on : function (name, listener) {
this._listeners[name].push(listener);
return this;
},
/**
* Clean any references when a worker is ending.
*/
cleanUp : function () {
this.streamInfo = this.generatedError = this.extraStreamInfo = null;
this._listeners = [];
},
/**
* Trigger an event. This will call registered callback with the provided arg.
* @param {String} name the name of the event (data, end, error)
* @param {Object} arg the argument to call the callback with.
*/
emit : function (name, arg) {
if (this._listeners[name]) {
for(var i = 0; i < this._listeners[name].length; i++) {
this._listeners[name][i].call(this, arg);
}
}
},
/**
* Chain a worker with an other.
* @param {Worker} next the worker receiving events from the current one.
* @return {worker} the next worker for chainability
*/
pipe : function (next) {
return next.registerPrevious(this);
},
/**
* Same as `pipe` in the other direction.
* Using an API with `pipe(next)` is very easy.
* Implementing the API with the point of view of the next one registering
* a source is easier, see the ZipFileWorker.
* @param {Worker} previous the previous worker, sending events to this one
* @return {Worker} the current worker for chainability
*/
registerPrevious : function (previous) {
if (this.isLocked) {
throw new Error("The stream '" + this + "' has already been used.");
}
// sharing the streamInfo...
this.streamInfo = previous.streamInfo;
// ... and adding our own bits
this.mergeStreamInfo();
this.previous = previous;
var self = this;
previous.on("data", function (chunk) {
self.processChunk(chunk);
});
previous.on("end", function () {
self.end();
});
previous.on("error", function (e) {
self.error(e);
});
return this;
},
/**
* Pause the stream so it doesn't send events anymore.
* @return {Boolean} true if this call paused the worker, false otherwise.
*/
pause : function () {
if(this.isPaused || this.isFinished) {
return false;
}
this.isPaused = true;
if(this.previous) {
this.previous.pause();
}
return true;
},
/**
* Resume a paused stream.
* @return {Boolean} true if this call resumed the worker, false otherwise.
*/
resume : function () {
if(!this.isPaused || this.isFinished) {
return false;
}
this.isPaused = false;
// if true, the worker tried to resume but failed
var withError = false;
if(this.generatedError) {
this.error(this.generatedError);
withError = true;
}
if(this.previous) {
this.previous.resume();
}
return !withError;
},
/**
* Flush any remaining bytes as the stream is ending.
*/
flush : function () {},
/**
* Process a chunk. This is usually the method overridden.
* @param {Object} chunk the chunk to process.
*/
processChunk : function(chunk) {
this.push(chunk);
},
/**
* Add a key/value to be added in the workers chain streamInfo once activated.
* @param {String} key the key to use
* @param {Object} value the associated value
* @return {Worker} the current worker for chainability
*/
withStreamInfo : function (key, value) {
this.extraStreamInfo[key] = value;
this.mergeStreamInfo();
return this;
},
/**
* Merge this worker's streamInfo into the chain's streamInfo.
*/
mergeStreamInfo : function () {
for(var key in this.extraStreamInfo) {
if (!Object.prototype.hasOwnProperty.call(this.extraStreamInfo, key)) {
continue;
}
this.streamInfo[key] = this.extraStreamInfo[key];
}
},
/**
* Lock the stream to prevent further updates on the workers chain.
* After calling this method, all calls to pipe will fail.
*/
lock: function () {
if (this.isLocked) {
throw new Error("The stream '" + this + "' has already been used.");
}
this.isLocked = true;
if (this.previous) {
this.previous.lock();
}
},
/**
*
* Pretty print the workers chain.
*/
toString : function () {
var me = "Worker " + this.name;
if (this.previous) {
return this.previous + " -> " + me;
} else {
return me;
}
}
};
module.exports = GenericWorker;

View File

@@ -0,0 +1,214 @@
"use strict";
var utils = require("../utils");
var ConvertWorker = require("./ConvertWorker");
var GenericWorker = require("./GenericWorker");
var base64 = require("../base64");
var support = require("../support");
var external = require("../external");
var NodejsStreamOutputAdapter = null;
if (support.nodestream) {
try {
NodejsStreamOutputAdapter = require("../nodejs/NodejsStreamOutputAdapter");
} catch(e) {
// ignore
}
}
/**
* Apply the final transformation of the data. If the user wants a Blob for
* example, it's easier to work with an U8intArray and finally do the
* ArrayBuffer/Blob conversion.
* @param {String} type the name of the final type
* @param {String|Uint8Array|Buffer} content the content to transform
* @param {String} mimeType the mime type of the content, if applicable.
* @return {String|Uint8Array|ArrayBuffer|Buffer|Blob} the content in the right format.
*/
function transformZipOutput(type, content, mimeType) {
switch(type) {
case "blob" :
return utils.newBlob(utils.transformTo("arraybuffer", content), mimeType);
case "base64" :
return base64.encode(content);
default :
return utils.transformTo(type, content);
}
}
/**
* Concatenate an array of data of the given type.
* @param {String} type the type of the data in the given array.
* @param {Array} dataArray the array containing the data chunks to concatenate
* @return {String|Uint8Array|Buffer} the concatenated data
* @throws Error if the asked type is unsupported
*/
function concat (type, dataArray) {
var i, index = 0, res = null, totalLength = 0;
for(i = 0; i < dataArray.length; i++) {
totalLength += dataArray[i].length;
}
switch(type) {
case "string":
return dataArray.join("");
case "array":
return Array.prototype.concat.apply([], dataArray);
case "uint8array":
res = new Uint8Array(totalLength);
for(i = 0; i < dataArray.length; i++) {
res.set(dataArray[i], index);
index += dataArray[i].length;
}
return res;
case "nodebuffer":
return Buffer.concat(dataArray);
default:
throw new Error("concat : unsupported type '" + type + "'");
}
}
/**
* Listen a StreamHelper, accumulate its content and concatenate it into a
* complete block.
* @param {StreamHelper} helper the helper to use.
* @param {Function} updateCallback a callback called on each update. Called
* with one arg :
* - the metadata linked to the update received.
* @return Promise the promise for the accumulation.
*/
function accumulate(helper, updateCallback) {
return new external.Promise(function (resolve, reject){
var dataArray = [];
var chunkType = helper._internalType,
resultType = helper._outputType,
mimeType = helper._mimeType;
helper
.on("data", function (data, meta) {
dataArray.push(data);
if(updateCallback) {
updateCallback(meta);
}
})
.on("error", function(err) {
dataArray = [];
reject(err);
})
.on("end", function (){
try {
var result = transformZipOutput(resultType, concat(chunkType, dataArray), mimeType);
resolve(result);
} catch (e) {
reject(e);
}
dataArray = [];
})
.resume();
});
}
/**
* An helper to easily use workers outside of JSZip.
* @constructor
* @param {Worker} worker the worker to wrap
* @param {String} outputType the type of data expected by the use
* @param {String} mimeType the mime type of the content, if applicable.
*/
function StreamHelper(worker, outputType, mimeType) {
var internalType = outputType;
switch(outputType) {
case "blob":
case "arraybuffer":
internalType = "uint8array";
break;
case "base64":
internalType = "string";
break;
}
try {
// the type used internally
this._internalType = internalType;
// the type used to output results
this._outputType = outputType;
// the mime type
this._mimeType = mimeType;
utils.checkSupport(internalType);
this._worker = worker.pipe(new ConvertWorker(internalType));
// the last workers can be rewired without issues but we need to
// prevent any updates on previous workers.
worker.lock();
} catch(e) {
this._worker = new GenericWorker("error");
this._worker.error(e);
}
}
StreamHelper.prototype = {
/**
* Listen a StreamHelper, accumulate its content and concatenate it into a
* complete block.
* @param {Function} updateCb the update callback.
* @return Promise the promise for the accumulation.
*/
accumulate : function (updateCb) {
return accumulate(this, updateCb);
},
/**
* Add a listener on an event triggered on a stream.
* @param {String} evt the name of the event
* @param {Function} fn the listener
* @return {StreamHelper} the current helper.
*/
on : function (evt, fn) {
var self = this;
if(evt === "data") {
this._worker.on(evt, function (chunk) {
fn.call(self, chunk.data, chunk.meta);
});
} else {
this._worker.on(evt, function () {
utils.delay(fn, arguments, self);
});
}
return this;
},
/**
* Resume the flow of chunks.
* @return {StreamHelper} the current helper.
*/
resume : function () {
utils.delay(this._worker.resume, [], this._worker);
return this;
},
/**
* Pause the flow of chunks.
* @return {StreamHelper} the current helper.
*/
pause : function () {
this._worker.pause();
return this;
},
/**
* Return a nodejs stream for this helper.
* @param {Function} updateCb the update callback.
* @return {NodejsStreamOutputAdapter} the nodejs stream.
*/
toNodejsStream : function (updateCb) {
utils.checkSupport("nodestream");
if (this._outputType !== "nodebuffer") {
// an object stream containing blob/arraybuffer/uint8array/string
// is strange and I don't know if it would be useful.
// I you find this comment and have a good usecase, please open a
// bug report !
throw new Error(this._outputType + " is not supported by this method");
}
return new NodejsStreamOutputAdapter(this, {
objectMode : this._outputType !== "nodebuffer"
}, updateCb);
}
};
module.exports = StreamHelper;

View File

@@ -0,0 +1,38 @@
"use strict";
exports.base64 = true;
exports.array = true;
exports.string = true;
exports.arraybuffer = typeof ArrayBuffer !== "undefined" && typeof Uint8Array !== "undefined";
exports.nodebuffer = typeof Buffer !== "undefined";
// contains true if JSZip can read/generate Uint8Array, false otherwise.
exports.uint8array = typeof Uint8Array !== "undefined";
if (typeof ArrayBuffer === "undefined") {
exports.blob = false;
}
else {
var buffer = new ArrayBuffer(0);
try {
exports.blob = new Blob([buffer], {
type: "application/zip"
}).size === 0;
}
catch (e) {
try {
var Builder = self.BlobBuilder || self.WebKitBlobBuilder || self.MozBlobBuilder || self.MSBlobBuilder;
var builder = new Builder();
builder.append(buffer);
exports.blob = builder.getBlob("application/zip").size === 0;
}
catch (e) {
exports.blob = false;
}
}
}
try {
exports.nodestream = !!require("readable-stream").Readable;
} catch(e) {
exports.nodestream = false;
}

View File

@@ -0,0 +1,275 @@
"use strict";
var utils = require("./utils");
var support = require("./support");
var nodejsUtils = require("./nodejsUtils");
var GenericWorker = require("./stream/GenericWorker");
/**
* The following functions come from pako, from pako/lib/utils/strings
* released under the MIT license, see pako https://github.com/nodeca/pako/
*/
// Table with utf8 lengths (calculated by first byte of sequence)
// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS,
// because max possible codepoint is 0x10ffff
var _utf8len = new Array(256);
for (var i=0; i<256; i++) {
_utf8len[i] = (i >= 252 ? 6 : i >= 248 ? 5 : i >= 240 ? 4 : i >= 224 ? 3 : i >= 192 ? 2 : 1);
}
_utf8len[254]=_utf8len[254]=1; // Invalid sequence start
// convert string to array (typed, when possible)
var string2buf = function (str) {
var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0;
// count binary size
for (m_pos = 0; m_pos < str_len; m_pos++) {
c = str.charCodeAt(m_pos);
if ((c & 0xfc00) === 0xd800 && (m_pos+1 < str_len)) {
c2 = str.charCodeAt(m_pos+1);
if ((c2 & 0xfc00) === 0xdc00) {
c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);
m_pos++;
}
}
buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;
}
// allocate buffer
if (support.uint8array) {
buf = new Uint8Array(buf_len);
} else {
buf = new Array(buf_len);
}
// convert
for (i=0, m_pos = 0; i < buf_len; m_pos++) {
c = str.charCodeAt(m_pos);
if ((c & 0xfc00) === 0xd800 && (m_pos+1 < str_len)) {
c2 = str.charCodeAt(m_pos+1);
if ((c2 & 0xfc00) === 0xdc00) {
c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);
m_pos++;
}
}
if (c < 0x80) {
/* one byte */
buf[i++] = c;
} else if (c < 0x800) {
/* two bytes */
buf[i++] = 0xC0 | (c >>> 6);
buf[i++] = 0x80 | (c & 0x3f);
} else if (c < 0x10000) {
/* three bytes */
buf[i++] = 0xE0 | (c >>> 12);
buf[i++] = 0x80 | (c >>> 6 & 0x3f);
buf[i++] = 0x80 | (c & 0x3f);
} else {
/* four bytes */
buf[i++] = 0xf0 | (c >>> 18);
buf[i++] = 0x80 | (c >>> 12 & 0x3f);
buf[i++] = 0x80 | (c >>> 6 & 0x3f);
buf[i++] = 0x80 | (c & 0x3f);
}
}
return buf;
};
// Calculate max possible position in utf8 buffer,
// that will not break sequence. If that's not possible
// - (very small limits) return max size as is.
//
// buf[] - utf8 bytes array
// max - length limit (mandatory);
var utf8border = function(buf, max) {
var pos;
max = max || buf.length;
if (max > buf.length) { max = buf.length; }
// go back from last position, until start of sequence found
pos = max-1;
while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; }
// Fuckup - very small and broken sequence,
// return max, because we should return something anyway.
if (pos < 0) { return max; }
// If we came to start of buffer - that means vuffer is too small,
// return max too.
if (pos === 0) { return max; }
return (pos + _utf8len[buf[pos]] > max) ? pos : max;
};
// convert array to string
var buf2string = function (buf) {
var i, out, c, c_len;
var len = buf.length;
// Reserve max possible length (2 words per char)
// NB: by unknown reasons, Array is significantly faster for
// String.fromCharCode.apply than Uint16Array.
var utf16buf = new Array(len*2);
for (out=0, i=0; i<len;) {
c = buf[i++];
// quick process ascii
if (c < 0x80) { utf16buf[out++] = c; continue; }
c_len = _utf8len[c];
// skip 5 & 6 byte codes
if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len-1; continue; }
// apply mask on first byte
c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07;
// join the rest
while (c_len > 1 && i < len) {
c = (c << 6) | (buf[i++] & 0x3f);
c_len--;
}
// terminated by end of string?
if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; }
if (c < 0x10000) {
utf16buf[out++] = c;
} else {
c -= 0x10000;
utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff);
utf16buf[out++] = 0xdc00 | (c & 0x3ff);
}
}
// shrinkBuf(utf16buf, out)
if (utf16buf.length !== out) {
if(utf16buf.subarray) {
utf16buf = utf16buf.subarray(0, out);
} else {
utf16buf.length = out;
}
}
// return String.fromCharCode.apply(null, utf16buf);
return utils.applyFromCharCode(utf16buf);
};
// That's all for the pako functions.
/**
* Transform a javascript string into an array (typed if possible) of bytes,
* UTF-8 encoded.
* @param {String} str the string to encode
* @return {Array|Uint8Array|Buffer} the UTF-8 encoded string.
*/
exports.utf8encode = function utf8encode(str) {
if (support.nodebuffer) {
return nodejsUtils.newBufferFrom(str, "utf-8");
}
return string2buf(str);
};
/**
* Transform a bytes array (or a representation) representing an UTF-8 encoded
* string into a javascript string.
* @param {Array|Uint8Array|Buffer} buf the data de decode
* @return {String} the decoded string.
*/
exports.utf8decode = function utf8decode(buf) {
if (support.nodebuffer) {
return utils.transformTo("nodebuffer", buf).toString("utf-8");
}
buf = utils.transformTo(support.uint8array ? "uint8array" : "array", buf);
return buf2string(buf);
};
/**
* A worker to decode utf8 encoded binary chunks into string chunks.
* @constructor
*/
function Utf8DecodeWorker() {
GenericWorker.call(this, "utf-8 decode");
// the last bytes if a chunk didn't end with a complete codepoint.
this.leftOver = null;
}
utils.inherits(Utf8DecodeWorker, GenericWorker);
/**
* @see GenericWorker.processChunk
*/
Utf8DecodeWorker.prototype.processChunk = function (chunk) {
var data = utils.transformTo(support.uint8array ? "uint8array" : "array", chunk.data);
// 1st step, re-use what's left of the previous chunk
if (this.leftOver && this.leftOver.length) {
if(support.uint8array) {
var previousData = data;
data = new Uint8Array(previousData.length + this.leftOver.length);
data.set(this.leftOver, 0);
data.set(previousData, this.leftOver.length);
} else {
data = this.leftOver.concat(data);
}
this.leftOver = null;
}
var nextBoundary = utf8border(data);
var usableData = data;
if (nextBoundary !== data.length) {
if (support.uint8array) {
usableData = data.subarray(0, nextBoundary);
this.leftOver = data.subarray(nextBoundary, data.length);
} else {
usableData = data.slice(0, nextBoundary);
this.leftOver = data.slice(nextBoundary, data.length);
}
}
this.push({
data : exports.utf8decode(usableData),
meta : chunk.meta
});
};
/**
* @see GenericWorker.flush
*/
Utf8DecodeWorker.prototype.flush = function () {
if(this.leftOver && this.leftOver.length) {
this.push({
data : exports.utf8decode(this.leftOver),
meta : {}
});
this.leftOver = null;
}
};
exports.Utf8DecodeWorker = Utf8DecodeWorker;
/**
* A worker to endcode string chunks into utf8 encoded binary chunks.
* @constructor
*/
function Utf8EncodeWorker() {
GenericWorker.call(this, "utf-8 encode");
}
utils.inherits(Utf8EncodeWorker, GenericWorker);
/**
* @see GenericWorker.processChunk
*/
Utf8EncodeWorker.prototype.processChunk = function (chunk) {
this.push({
data : exports.utf8encode(chunk.data),
meta : chunk.meta
});
};
exports.Utf8EncodeWorker = Utf8EncodeWorker;

View File

@@ -0,0 +1,501 @@
"use strict";
var support = require("./support");
var base64 = require("./base64");
var nodejsUtils = require("./nodejsUtils");
var external = require("./external");
require("setimmediate");
/**
* Convert a string that pass as a "binary string": it should represent a byte
* array but may have > 255 char codes. Be sure to take only the first byte
* and returns the byte array.
* @param {String} str the string to transform.
* @return {Array|Uint8Array} the string in a binary format.
*/
function string2binary(str) {
var result = null;
if (support.uint8array) {
result = new Uint8Array(str.length);
} else {
result = new Array(str.length);
}
return stringToArrayLike(str, result);
}
/**
* Create a new blob with the given content and the given type.
* @param {String|ArrayBuffer} part the content to put in the blob. DO NOT use
* an Uint8Array because the stock browser of android 4 won't accept it (it
* will be silently converted to a string, "[object Uint8Array]").
*
* Use only ONE part to build the blob to avoid a memory leak in IE11 / Edge:
* when a large amount of Array is used to create the Blob, the amount of
* memory consumed is nearly 100 times the original data amount.
*
* @param {String} type the mime type of the blob.
* @return {Blob} the created blob.
*/
exports.newBlob = function(part, type) {
exports.checkSupport("blob");
try {
// Blob constructor
return new Blob([part], {
type: type
});
}
catch (e) {
try {
// deprecated, browser only, old way
var Builder = self.BlobBuilder || self.WebKitBlobBuilder || self.MozBlobBuilder || self.MSBlobBuilder;
var builder = new Builder();
builder.append(part);
return builder.getBlob(type);
}
catch (e) {
// well, fuck ?!
throw new Error("Bug : can't construct the Blob.");
}
}
};
/**
* The identity function.
* @param {Object} input the input.
* @return {Object} the same input.
*/
function identity(input) {
return input;
}
/**
* Fill in an array with a string.
* @param {String} str the string to use.
* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to fill in (will be mutated).
* @return {Array|ArrayBuffer|Uint8Array|Buffer} the updated array.
*/
function stringToArrayLike(str, array) {
for (var i = 0; i < str.length; ++i) {
array[i] = str.charCodeAt(i) & 0xFF;
}
return array;
}
/**
* An helper for the function arrayLikeToString.
* This contains static information and functions that
* can be optimized by the browser JIT compiler.
*/
var arrayToStringHelper = {
/**
* Transform an array of int into a string, chunk by chunk.
* See the performances notes on arrayLikeToString.
* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
* @param {String} type the type of the array.
* @param {Integer} chunk the chunk size.
* @return {String} the resulting string.
* @throws Error if the chunk is too big for the stack.
*/
stringifyByChunk: function(array, type, chunk) {
var result = [], k = 0, len = array.length;
// shortcut
if (len <= chunk) {
return String.fromCharCode.apply(null, array);
}
while (k < len) {
if (type === "array" || type === "nodebuffer") {
result.push(String.fromCharCode.apply(null, array.slice(k, Math.min(k + chunk, len))));
}
else {
result.push(String.fromCharCode.apply(null, array.subarray(k, Math.min(k + chunk, len))));
}
k += chunk;
}
return result.join("");
},
/**
* Call String.fromCharCode on every item in the array.
* This is the naive implementation, which generate A LOT of intermediate string.
* This should be used when everything else fail.
* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
* @return {String} the result.
*/
stringifyByChar: function(array){
var resultStr = "";
for(var i = 0; i < array.length; i++) {
resultStr += String.fromCharCode(array[i]);
}
return resultStr;
},
applyCanBeUsed : {
/**
* true if the browser accepts to use String.fromCharCode on Uint8Array
*/
uint8array : (function () {
try {
return support.uint8array && String.fromCharCode.apply(null, new Uint8Array(1)).length === 1;
} catch (e) {
return false;
}
})(),
/**
* true if the browser accepts to use String.fromCharCode on nodejs Buffer.
*/
nodebuffer : (function () {
try {
return support.nodebuffer && String.fromCharCode.apply(null, nodejsUtils.allocBuffer(1)).length === 1;
} catch (e) {
return false;
}
})()
}
};
/**
* Transform an array-like object to a string.
* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
* @return {String} the result.
*/
function arrayLikeToString(array) {
// Performances notes :
// --------------------
// String.fromCharCode.apply(null, array) is the fastest, see
// see http://jsperf.com/converting-a-uint8array-to-a-string/2
// but the stack is limited (and we can get huge arrays !).
//
// result += String.fromCharCode(array[i]); generate too many strings !
//
// This code is inspired by http://jsperf.com/arraybuffer-to-string-apply-performance/2
// TODO : we now have workers that split the work. Do we still need that ?
var chunk = 65536,
type = exports.getTypeOf(array),
canUseApply = true;
if (type === "uint8array") {
canUseApply = arrayToStringHelper.applyCanBeUsed.uint8array;
} else if (type === "nodebuffer") {
canUseApply = arrayToStringHelper.applyCanBeUsed.nodebuffer;
}
if (canUseApply) {
while (chunk > 1) {
try {
return arrayToStringHelper.stringifyByChunk(array, type, chunk);
} catch (e) {
chunk = Math.floor(chunk / 2);
}
}
}
// no apply or chunk error : slow and painful algorithm
// default browser on android 4.*
return arrayToStringHelper.stringifyByChar(array);
}
exports.applyFromCharCode = arrayLikeToString;
/**
* Copy the data from an array-like to an other array-like.
* @param {Array|ArrayBuffer|Uint8Array|Buffer} arrayFrom the origin array.
* @param {Array|ArrayBuffer|Uint8Array|Buffer} arrayTo the destination array which will be mutated.
* @return {Array|ArrayBuffer|Uint8Array|Buffer} the updated destination array.
*/
function arrayLikeToArrayLike(arrayFrom, arrayTo) {
for (var i = 0; i < arrayFrom.length; i++) {
arrayTo[i] = arrayFrom[i];
}
return arrayTo;
}
// a matrix containing functions to transform everything into everything.
var transform = {};
// string to ?
transform["string"] = {
"string": identity,
"array": function(input) {
return stringToArrayLike(input, new Array(input.length));
},
"arraybuffer": function(input) {
return transform["string"]["uint8array"](input).buffer;
},
"uint8array": function(input) {
return stringToArrayLike(input, new Uint8Array(input.length));
},
"nodebuffer": function(input) {
return stringToArrayLike(input, nodejsUtils.allocBuffer(input.length));
}
};
// array to ?
transform["array"] = {
"string": arrayLikeToString,
"array": identity,
"arraybuffer": function(input) {
return (new Uint8Array(input)).buffer;
},
"uint8array": function(input) {
return new Uint8Array(input);
},
"nodebuffer": function(input) {
return nodejsUtils.newBufferFrom(input);
}
};
// arraybuffer to ?
transform["arraybuffer"] = {
"string": function(input) {
return arrayLikeToString(new Uint8Array(input));
},
"array": function(input) {
return arrayLikeToArrayLike(new Uint8Array(input), new Array(input.byteLength));
},
"arraybuffer": identity,
"uint8array": function(input) {
return new Uint8Array(input);
},
"nodebuffer": function(input) {
return nodejsUtils.newBufferFrom(new Uint8Array(input));
}
};
// uint8array to ?
transform["uint8array"] = {
"string": arrayLikeToString,
"array": function(input) {
return arrayLikeToArrayLike(input, new Array(input.length));
},
"arraybuffer": function(input) {
return input.buffer;
},
"uint8array": identity,
"nodebuffer": function(input) {
return nodejsUtils.newBufferFrom(input);
}
};
// nodebuffer to ?
transform["nodebuffer"] = {
"string": arrayLikeToString,
"array": function(input) {
return arrayLikeToArrayLike(input, new Array(input.length));
},
"arraybuffer": function(input) {
return transform["nodebuffer"]["uint8array"](input).buffer;
},
"uint8array": function(input) {
return arrayLikeToArrayLike(input, new Uint8Array(input.length));
},
"nodebuffer": identity
};
/**
* Transform an input into any type.
* The supported output type are : string, array, uint8array, arraybuffer, nodebuffer.
* If no output type is specified, the unmodified input will be returned.
* @param {String} outputType the output type.
* @param {String|Array|ArrayBuffer|Uint8Array|Buffer} input the input to convert.
* @throws {Error} an Error if the browser doesn't support the requested output type.
*/
exports.transformTo = function(outputType, input) {
if (!input) {
// undefined, null, etc
// an empty string won't harm.
input = "";
}
if (!outputType) {
return input;
}
exports.checkSupport(outputType);
var inputType = exports.getTypeOf(input);
var result = transform[inputType][outputType](input);
return result;
};
/**
* Resolve all relative path components, "." and "..", in a path. If these relative components
* traverse above the root then the resulting path will only contain the final path component.
*
* All empty components, e.g. "//", are removed.
* @param {string} path A path with / or \ separators
* @returns {string} The path with all relative path components resolved.
*/
exports.resolve = function(path) {
var parts = path.split("/");
var result = [];
for (var index = 0; index < parts.length; index++) {
var part = parts[index];
// Allow the first and last component to be empty for trailing slashes.
if (part === "." || (part === "" && index !== 0 && index !== parts.length - 1)) {
continue;
} else if (part === "..") {
result.pop();
} else {
result.push(part);
}
}
return result.join("/");
};
/**
* Return the type of the input.
* The type will be in a format valid for JSZip.utils.transformTo : string, array, uint8array, arraybuffer.
* @param {Object} input the input to identify.
* @return {String} the (lowercase) type of the input.
*/
exports.getTypeOf = function(input) {
if (typeof input === "string") {
return "string";
}
if (Object.prototype.toString.call(input) === "[object Array]") {
return "array";
}
if (support.nodebuffer && nodejsUtils.isBuffer(input)) {
return "nodebuffer";
}
if (support.uint8array && input instanceof Uint8Array) {
return "uint8array";
}
if (support.arraybuffer && input instanceof ArrayBuffer) {
return "arraybuffer";
}
};
/**
* Throw an exception if the type is not supported.
* @param {String} type the type to check.
* @throws {Error} an Error if the browser doesn't support the requested type.
*/
exports.checkSupport = function(type) {
var supported = support[type.toLowerCase()];
if (!supported) {
throw new Error(type + " is not supported by this platform");
}
};
exports.MAX_VALUE_16BITS = 65535;
exports.MAX_VALUE_32BITS = -1; // well, "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" is parsed as -1
/**
* Prettify a string read as binary.
* @param {string} str the string to prettify.
* @return {string} a pretty string.
*/
exports.pretty = function(str) {
var res = "",
code, i;
for (i = 0; i < (str || "").length; i++) {
code = str.charCodeAt(i);
res += "\\x" + (code < 16 ? "0" : "") + code.toString(16).toUpperCase();
}
return res;
};
/**
* Defer the call of a function.
* @param {Function} callback the function to call asynchronously.
* @param {Array} args the arguments to give to the callback.
*/
exports.delay = function(callback, args, self) {
setImmediate(function () {
callback.apply(self || null, args || []);
});
};
/**
* Extends a prototype with an other, without calling a constructor with
* side effects. Inspired by nodejs' `utils.inherits`
* @param {Function} ctor the constructor to augment
* @param {Function} superCtor the parent constructor to use
*/
exports.inherits = function (ctor, superCtor) {
var Obj = function() {};
Obj.prototype = superCtor.prototype;
ctor.prototype = new Obj();
};
/**
* Merge the objects passed as parameters into a new one.
* @private
* @param {...Object} var_args All objects to merge.
* @return {Object} a new object with the data of the others.
*/
exports.extend = function() {
var result = {}, i, attr;
for (i = 0; i < arguments.length; i++) { // arguments is not enumerable in some browsers
for (attr in arguments[i]) {
if (Object.prototype.hasOwnProperty.call(arguments[i], attr) && typeof result[attr] === "undefined") {
result[attr] = arguments[i][attr];
}
}
}
return result;
};
/**
* Transform arbitrary content into a Promise.
* @param {String} name a name for the content being processed.
* @param {Object} inputData the content to process.
* @param {Boolean} isBinary true if the content is not an unicode string
* @param {Boolean} isOptimizedBinaryString true if the string content only has one byte per character.
* @param {Boolean} isBase64 true if the string content is encoded with base64.
* @return {Promise} a promise in a format usable by JSZip.
*/
exports.prepareContent = function(name, inputData, isBinary, isOptimizedBinaryString, isBase64) {
// if inputData is already a promise, this flatten it.
var promise = external.Promise.resolve(inputData).then(function(data) {
var isBlob = support.blob && (data instanceof Blob || ["[object File]", "[object Blob]"].indexOf(Object.prototype.toString.call(data)) !== -1);
if (isBlob && typeof FileReader !== "undefined") {
return new external.Promise(function (resolve, reject) {
var reader = new FileReader();
reader.onload = function(e) {
resolve(e.target.result);
};
reader.onerror = function(e) {
reject(e.target.error);
};
reader.readAsArrayBuffer(data);
});
} else {
return data;
}
});
return promise.then(function(data) {
var dataType = exports.getTypeOf(data);
if (!dataType) {
return external.Promise.reject(
new Error("Can't read the data of '" + name + "'. Is it " +
"in a supported JavaScript type (String, Blob, ArrayBuffer, etc) ?")
);
}
// special case : it's way easier to work with Uint8Array than with ArrayBuffer
if (dataType === "arraybuffer") {
data = exports.transformTo("uint8array", data);
} else if (dataType === "string") {
if (isBase64) {
data = base64.decode(data);
}
else if (isBinary) {
// optimizedBinaryString === true means that the file has already been filtered with a 0xFF mask
if (isOptimizedBinaryString !== true) {
// this is a string, not in a base64 format.
// Be sure that this is a correct "binary string"
data = string2binary(data);
}
}
}
return data;
});
};

View File

@@ -0,0 +1,261 @@
"use strict";
var readerFor = require("./reader/readerFor");
var utils = require("./utils");
var sig = require("./signature");
var ZipEntry = require("./zipEntry");
var support = require("./support");
// class ZipEntries {{{
/**
* All the entries in the zip file.
* @constructor
* @param {Object} loadOptions Options for loading the stream.
*/
function ZipEntries(loadOptions) {
this.files = [];
this.loadOptions = loadOptions;
}
ZipEntries.prototype = {
/**
* Check that the reader is on the specified signature.
* @param {string} expectedSignature the expected signature.
* @throws {Error} if it is an other signature.
*/
checkSignature: function(expectedSignature) {
if (!this.reader.readAndCheckSignature(expectedSignature)) {
this.reader.index -= 4;
var signature = this.reader.readString(4);
throw new Error("Corrupted zip or bug: unexpected signature " + "(" + utils.pretty(signature) + ", expected " + utils.pretty(expectedSignature) + ")");
}
},
/**
* Check if the given signature is at the given index.
* @param {number} askedIndex the index to check.
* @param {string} expectedSignature the signature to expect.
* @return {boolean} true if the signature is here, false otherwise.
*/
isSignature: function(askedIndex, expectedSignature) {
var currentIndex = this.reader.index;
this.reader.setIndex(askedIndex);
var signature = this.reader.readString(4);
var result = signature === expectedSignature;
this.reader.setIndex(currentIndex);
return result;
},
/**
* Read the end of the central directory.
*/
readBlockEndOfCentral: function() {
this.diskNumber = this.reader.readInt(2);
this.diskWithCentralDirStart = this.reader.readInt(2);
this.centralDirRecordsOnThisDisk = this.reader.readInt(2);
this.centralDirRecords = this.reader.readInt(2);
this.centralDirSize = this.reader.readInt(4);
this.centralDirOffset = this.reader.readInt(4);
this.zipCommentLength = this.reader.readInt(2);
// warning : the encoding depends of the system locale
// On a linux machine with LANG=en_US.utf8, this field is utf8 encoded.
// On a windows machine, this field is encoded with the localized windows code page.
var zipComment = this.reader.readData(this.zipCommentLength);
var decodeParamType = support.uint8array ? "uint8array" : "array";
// To get consistent behavior with the generation part, we will assume that
// this is utf8 encoded unless specified otherwise.
var decodeContent = utils.transformTo(decodeParamType, zipComment);
this.zipComment = this.loadOptions.decodeFileName(decodeContent);
},
/**
* Read the end of the Zip 64 central directory.
* Not merged with the method readEndOfCentral :
* The end of central can coexist with its Zip64 brother,
* I don't want to read the wrong number of bytes !
*/
readBlockZip64EndOfCentral: function() {
this.zip64EndOfCentralSize = this.reader.readInt(8);
this.reader.skip(4);
// this.versionMadeBy = this.reader.readString(2);
// this.versionNeeded = this.reader.readInt(2);
this.diskNumber = this.reader.readInt(4);
this.diskWithCentralDirStart = this.reader.readInt(4);
this.centralDirRecordsOnThisDisk = this.reader.readInt(8);
this.centralDirRecords = this.reader.readInt(8);
this.centralDirSize = this.reader.readInt(8);
this.centralDirOffset = this.reader.readInt(8);
this.zip64ExtensibleData = {};
var extraDataSize = this.zip64EndOfCentralSize - 44,
index = 0,
extraFieldId,
extraFieldLength,
extraFieldValue;
while (index < extraDataSize) {
extraFieldId = this.reader.readInt(2);
extraFieldLength = this.reader.readInt(4);
extraFieldValue = this.reader.readData(extraFieldLength);
this.zip64ExtensibleData[extraFieldId] = {
id: extraFieldId,
length: extraFieldLength,
value: extraFieldValue
};
}
},
/**
* Read the end of the Zip 64 central directory locator.
*/
readBlockZip64EndOfCentralLocator: function() {
this.diskWithZip64CentralDirStart = this.reader.readInt(4);
this.relativeOffsetEndOfZip64CentralDir = this.reader.readInt(8);
this.disksCount = this.reader.readInt(4);
if (this.disksCount > 1) {
throw new Error("Multi-volumes zip are not supported");
}
},
/**
* Read the local files, based on the offset read in the central part.
*/
readLocalFiles: function() {
var i, file;
for (i = 0; i < this.files.length; i++) {
file = this.files[i];
this.reader.setIndex(file.localHeaderOffset);
this.checkSignature(sig.LOCAL_FILE_HEADER);
file.readLocalPart(this.reader);
file.handleUTF8();
file.processAttributes();
}
},
/**
* Read the central directory.
*/
readCentralDir: function() {
var file;
this.reader.setIndex(this.centralDirOffset);
while (this.reader.readAndCheckSignature(sig.CENTRAL_FILE_HEADER)) {
file = new ZipEntry({
zip64: this.zip64
}, this.loadOptions);
file.readCentralPart(this.reader);
this.files.push(file);
}
if (this.centralDirRecords !== this.files.length) {
if (this.centralDirRecords !== 0 && this.files.length === 0) {
// We expected some records but couldn't find ANY.
// This is really suspicious, as if something went wrong.
throw new Error("Corrupted zip or bug: expected " + this.centralDirRecords + " records in central dir, got " + this.files.length);
} else {
// We found some records but not all.
// Something is wrong but we got something for the user: no error here.
// console.warn("expected", this.centralDirRecords, "records in central dir, got", this.files.length);
}
}
},
/**
* Read the end of central directory.
*/
readEndOfCentral: function() {
var offset = this.reader.lastIndexOfSignature(sig.CENTRAL_DIRECTORY_END);
if (offset < 0) {
// Check if the content is a truncated zip or complete garbage.
// A "LOCAL_FILE_HEADER" is not required at the beginning (auto
// extractible zip for example) but it can give a good hint.
// If an ajax request was used without responseType, we will also
// get unreadable data.
var isGarbage = !this.isSignature(0, sig.LOCAL_FILE_HEADER);
if (isGarbage) {
throw new Error("Can't find end of central directory : is this a zip file ? " +
"If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html");
} else {
throw new Error("Corrupted zip: can't find end of central directory");
}
}
this.reader.setIndex(offset);
var endOfCentralDirOffset = offset;
this.checkSignature(sig.CENTRAL_DIRECTORY_END);
this.readBlockEndOfCentral();
/* extract from the zip spec :
4) If one of the fields in the end of central directory
record is too small to hold required data, the field
should be set to -1 (0xFFFF or 0xFFFFFFFF) and the
ZIP64 format record should be created.
5) The end of central directory record and the
Zip64 end of central directory locator record must
reside on the same disk when splitting or spanning
an archive.
*/
if (this.diskNumber === utils.MAX_VALUE_16BITS || this.diskWithCentralDirStart === utils.MAX_VALUE_16BITS || this.centralDirRecordsOnThisDisk === utils.MAX_VALUE_16BITS || this.centralDirRecords === utils.MAX_VALUE_16BITS || this.centralDirSize === utils.MAX_VALUE_32BITS || this.centralDirOffset === utils.MAX_VALUE_32BITS) {
this.zip64 = true;
/*
Warning : the zip64 extension is supported, but ONLY if the 64bits integer read from
the zip file can fit into a 32bits integer. This cannot be solved : JavaScript represents
all numbers as 64-bit double precision IEEE 754 floating point numbers.
So, we have 53bits for integers and bitwise operations treat everything as 32bits.
see https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Operators/Bitwise_Operators
and http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-262.pdf section 8.5
*/
// should look for a zip64 EOCD locator
offset = this.reader.lastIndexOfSignature(sig.ZIP64_CENTRAL_DIRECTORY_LOCATOR);
if (offset < 0) {
throw new Error("Corrupted zip: can't find the ZIP64 end of central directory locator");
}
this.reader.setIndex(offset);
this.checkSignature(sig.ZIP64_CENTRAL_DIRECTORY_LOCATOR);
this.readBlockZip64EndOfCentralLocator();
// now the zip64 EOCD record
if (!this.isSignature(this.relativeOffsetEndOfZip64CentralDir, sig.ZIP64_CENTRAL_DIRECTORY_END)) {
// console.warn("ZIP64 end of central directory not where expected.");
this.relativeOffsetEndOfZip64CentralDir = this.reader.lastIndexOfSignature(sig.ZIP64_CENTRAL_DIRECTORY_END);
if (this.relativeOffsetEndOfZip64CentralDir < 0) {
throw new Error("Corrupted zip: can't find the ZIP64 end of central directory");
}
}
this.reader.setIndex(this.relativeOffsetEndOfZip64CentralDir);
this.checkSignature(sig.ZIP64_CENTRAL_DIRECTORY_END);
this.readBlockZip64EndOfCentral();
}
var expectedEndOfCentralDirOffset = this.centralDirOffset + this.centralDirSize;
if (this.zip64) {
expectedEndOfCentralDirOffset += 20; // end of central dir 64 locator
expectedEndOfCentralDirOffset += 12 /* should not include the leading 12 bytes */ + this.zip64EndOfCentralSize;
}
var extraBytes = endOfCentralDirOffset - expectedEndOfCentralDirOffset;
if (extraBytes > 0) {
// console.warn(extraBytes, "extra bytes at beginning or within zipfile");
if (this.isSignature(endOfCentralDirOffset, sig.CENTRAL_FILE_HEADER)) {
// The offsets seem wrong, but we have something at the specified offset.
// So… we keep it.
} else {
// the offset is wrong, update the "zero" of the reader
// this happens if data has been prepended (crx files for example)
this.reader.zero = extraBytes;
}
} else if (extraBytes < 0) {
throw new Error("Corrupted zip: missing " + Math.abs(extraBytes) + " bytes.");
}
},
prepareReader: function(data) {
this.reader = readerFor(data);
},
/**
* Read a zip file and create ZipEntries.
* @param {String|ArrayBuffer|Uint8Array|Buffer} data the binary string representing a zip file.
*/
load: function(data) {
this.prepareReader(data);
this.readEndOfCentral();
this.readCentralDir();
this.readLocalFiles();
}
};
// }}} end of ZipEntries
module.exports = ZipEntries;

View File

@@ -0,0 +1,293 @@
"use strict";
var readerFor = require("./reader/readerFor");
var utils = require("./utils");
var CompressedObject = require("./compressedObject");
var crc32fn = require("./crc32");
var utf8 = require("./utf8");
var compressions = require("./compressions");
var support = require("./support");
var MADE_BY_DOS = 0x00;
var MADE_BY_UNIX = 0x03;
/**
* Find a compression registered in JSZip.
* @param {string} compressionMethod the method magic to find.
* @return {Object|null} the JSZip compression object, null if none found.
*/
var findCompression = function(compressionMethod) {
for (var method in compressions) {
if (!Object.prototype.hasOwnProperty.call(compressions, method)) {
continue;
}
if (compressions[method].magic === compressionMethod) {
return compressions[method];
}
}
return null;
};
// class ZipEntry {{{
/**
* An entry in the zip file.
* @constructor
* @param {Object} options Options of the current file.
* @param {Object} loadOptions Options for loading the stream.
*/
function ZipEntry(options, loadOptions) {
this.options = options;
this.loadOptions = loadOptions;
}
ZipEntry.prototype = {
/**
* say if the file is encrypted.
* @return {boolean} true if the file is encrypted, false otherwise.
*/
isEncrypted: function() {
// bit 1 is set
return (this.bitFlag & 0x0001) === 0x0001;
},
/**
* say if the file has utf-8 filename/comment.
* @return {boolean} true if the filename/comment is in utf-8, false otherwise.
*/
useUTF8: function() {
// bit 11 is set
return (this.bitFlag & 0x0800) === 0x0800;
},
/**
* Read the local part of a zip file and add the info in this object.
* @param {DataReader} reader the reader to use.
*/
readLocalPart: function(reader) {
var compression, localExtraFieldsLength;
// we already know everything from the central dir !
// If the central dir data are false, we are doomed.
// On the bright side, the local part is scary : zip64, data descriptors, both, etc.
// The less data we get here, the more reliable this should be.
// Let's skip the whole header and dash to the data !
reader.skip(22);
// in some zip created on windows, the filename stored in the central dir contains \ instead of /.
// Strangely, the filename here is OK.
// I would love to treat these zip files as corrupted (see http://www.info-zip.org/FAQ.html#backslashes
// or APPNOTE#4.4.17.1, "All slashes MUST be forward slashes '/'") but there are a lot of bad zip generators...
// Search "unzip mismatching "local" filename continuing with "central" filename version" on
// the internet.
//
// I think I see the logic here : the central directory is used to display
// content and the local directory is used to extract the files. Mixing / and \
// may be used to display \ to windows users and use / when extracting the files.
// Unfortunately, this lead also to some issues : http://seclists.org/fulldisclosure/2009/Sep/394
this.fileNameLength = reader.readInt(2);
localExtraFieldsLength = reader.readInt(2); // can't be sure this will be the same as the central dir
// the fileName is stored as binary data, the handleUTF8 method will take care of the encoding.
this.fileName = reader.readData(this.fileNameLength);
reader.skip(localExtraFieldsLength);
if (this.compressedSize === -1 || this.uncompressedSize === -1) {
throw new Error("Bug or corrupted zip : didn't get enough information from the central directory " + "(compressedSize === -1 || uncompressedSize === -1)");
}
compression = findCompression(this.compressionMethod);
if (compression === null) { // no compression found
throw new Error("Corrupted zip : compression " + utils.pretty(this.compressionMethod) + " unknown (inner file : " + utils.transformTo("string", this.fileName) + ")");
}
this.decompressed = new CompressedObject(this.compressedSize, this.uncompressedSize, this.crc32, compression, reader.readData(this.compressedSize));
},
/**
* Read the central part of a zip file and add the info in this object.
* @param {DataReader} reader the reader to use.
*/
readCentralPart: function(reader) {
this.versionMadeBy = reader.readInt(2);
reader.skip(2);
// this.versionNeeded = reader.readInt(2);
this.bitFlag = reader.readInt(2);
this.compressionMethod = reader.readString(2);
this.date = reader.readDate();
this.crc32 = reader.readInt(4);
this.compressedSize = reader.readInt(4);
this.uncompressedSize = reader.readInt(4);
var fileNameLength = reader.readInt(2);
this.extraFieldsLength = reader.readInt(2);
this.fileCommentLength = reader.readInt(2);
this.diskNumberStart = reader.readInt(2);
this.internalFileAttributes = reader.readInt(2);
this.externalFileAttributes = reader.readInt(4);
this.localHeaderOffset = reader.readInt(4);
if (this.isEncrypted()) {
throw new Error("Encrypted zip are not supported");
}
// will be read in the local part, see the comments there
reader.skip(fileNameLength);
this.readExtraFields(reader);
this.parseZIP64ExtraField(reader);
this.fileComment = reader.readData(this.fileCommentLength);
},
/**
* Parse the external file attributes and get the unix/dos permissions.
*/
processAttributes: function () {
this.unixPermissions = null;
this.dosPermissions = null;
var madeBy = this.versionMadeBy >> 8;
// Check if we have the DOS directory flag set.
// We look for it in the DOS and UNIX permissions
// but some unknown platform could set it as a compatibility flag.
this.dir = this.externalFileAttributes & 0x0010 ? true : false;
if(madeBy === MADE_BY_DOS) {
// first 6 bits (0 to 5)
this.dosPermissions = this.externalFileAttributes & 0x3F;
}
if(madeBy === MADE_BY_UNIX) {
this.unixPermissions = (this.externalFileAttributes >> 16) & 0xFFFF;
// the octal permissions are in (this.unixPermissions & 0x01FF).toString(8);
}
// fail safe : if the name ends with a / it probably means a folder
if (!this.dir && this.fileNameStr.slice(-1) === "/") {
this.dir = true;
}
},
/**
* Parse the ZIP64 extra field and merge the info in the current ZipEntry.
* @param {DataReader} reader the reader to use.
*/
parseZIP64ExtraField: function() {
if (!this.extraFields[0x0001]) {
return;
}
// should be something, preparing the extra reader
var extraReader = readerFor(this.extraFields[0x0001].value);
// I really hope that these 64bits integer can fit in 32 bits integer, because js
// won't let us have more.
if (this.uncompressedSize === utils.MAX_VALUE_32BITS) {
this.uncompressedSize = extraReader.readInt(8);
}
if (this.compressedSize === utils.MAX_VALUE_32BITS) {
this.compressedSize = extraReader.readInt(8);
}
if (this.localHeaderOffset === utils.MAX_VALUE_32BITS) {
this.localHeaderOffset = extraReader.readInt(8);
}
if (this.diskNumberStart === utils.MAX_VALUE_32BITS) {
this.diskNumberStart = extraReader.readInt(4);
}
},
/**
* Read the central part of a zip file and add the info in this object.
* @param {DataReader} reader the reader to use.
*/
readExtraFields: function(reader) {
var end = reader.index + this.extraFieldsLength,
extraFieldId,
extraFieldLength,
extraFieldValue;
if (!this.extraFields) {
this.extraFields = {};
}
while (reader.index + 4 < end) {
extraFieldId = reader.readInt(2);
extraFieldLength = reader.readInt(2);
extraFieldValue = reader.readData(extraFieldLength);
this.extraFields[extraFieldId] = {
id: extraFieldId,
length: extraFieldLength,
value: extraFieldValue
};
}
reader.setIndex(end);
},
/**
* Apply an UTF8 transformation if needed.
*/
handleUTF8: function() {
var decodeParamType = support.uint8array ? "uint8array" : "array";
if (this.useUTF8()) {
this.fileNameStr = utf8.utf8decode(this.fileName);
this.fileCommentStr = utf8.utf8decode(this.fileComment);
} else {
var upath = this.findExtraFieldUnicodePath();
if (upath !== null) {
this.fileNameStr = upath;
} else {
// ASCII text or unsupported code page
var fileNameByteArray = utils.transformTo(decodeParamType, this.fileName);
this.fileNameStr = this.loadOptions.decodeFileName(fileNameByteArray);
}
var ucomment = this.findExtraFieldUnicodeComment();
if (ucomment !== null) {
this.fileCommentStr = ucomment;
} else {
// ASCII text or unsupported code page
var commentByteArray = utils.transformTo(decodeParamType, this.fileComment);
this.fileCommentStr = this.loadOptions.decodeFileName(commentByteArray);
}
}
},
/**
* Find the unicode path declared in the extra field, if any.
* @return {String} the unicode path, null otherwise.
*/
findExtraFieldUnicodePath: function() {
var upathField = this.extraFields[0x7075];
if (upathField) {
var extraReader = readerFor(upathField.value);
// wrong version
if (extraReader.readInt(1) !== 1) {
return null;
}
// the crc of the filename changed, this field is out of date.
if (crc32fn(this.fileName) !== extraReader.readInt(4)) {
return null;
}
return utf8.utf8decode(extraReader.readData(upathField.length - 5));
}
return null;
},
/**
* Find the unicode comment declared in the extra field, if any.
* @return {String} the unicode comment, null otherwise.
*/
findExtraFieldUnicodeComment: function() {
var ucommentField = this.extraFields[0x6375];
if (ucommentField) {
var extraReader = readerFor(ucommentField.value);
// wrong version
if (extraReader.readInt(1) !== 1) {
return null;
}
// the crc of the comment changed, this field is out of date.
if (crc32fn(this.fileComment) !== extraReader.readInt(4)) {
return null;
}
return utf8.utf8decode(extraReader.readData(ucommentField.length - 5));
}
return null;
}
};
module.exports = ZipEntry;

View File

@@ -0,0 +1,133 @@
"use strict";
var StreamHelper = require("./stream/StreamHelper");
var DataWorker = require("./stream/DataWorker");
var utf8 = require("./utf8");
var CompressedObject = require("./compressedObject");
var GenericWorker = require("./stream/GenericWorker");
/**
* A simple object representing a file in the zip file.
* @constructor
* @param {string} name the name of the file
* @param {String|ArrayBuffer|Uint8Array|Buffer} data the data
* @param {Object} options the options of the file
*/
var ZipObject = function(name, data, options) {
this.name = name;
this.dir = options.dir;
this.date = options.date;
this.comment = options.comment;
this.unixPermissions = options.unixPermissions;
this.dosPermissions = options.dosPermissions;
this._data = data;
this._dataBinary = options.binary;
// keep only the compression
this.options = {
compression : options.compression,
compressionOptions : options.compressionOptions
};
};
ZipObject.prototype = {
/**
* Create an internal stream for the content of this object.
* @param {String} type the type of each chunk.
* @return StreamHelper the stream.
*/
internalStream: function (type) {
var result = null, outputType = "string";
try {
if (!type) {
throw new Error("No output type specified.");
}
outputType = type.toLowerCase();
var askUnicodeString = outputType === "string" || outputType === "text";
if (outputType === "binarystring" || outputType === "text") {
outputType = "string";
}
result = this._decompressWorker();
var isUnicodeString = !this._dataBinary;
if (isUnicodeString && !askUnicodeString) {
result = result.pipe(new utf8.Utf8EncodeWorker());
}
if (!isUnicodeString && askUnicodeString) {
result = result.pipe(new utf8.Utf8DecodeWorker());
}
} catch (e) {
result = new GenericWorker("error");
result.error(e);
}
return new StreamHelper(result, outputType, "");
},
/**
* Prepare the content in the asked type.
* @param {String} type the type of the result.
* @param {Function} onUpdate a function to call on each internal update.
* @return Promise the promise of the result.
*/
async: function (type, onUpdate) {
return this.internalStream(type).accumulate(onUpdate);
},
/**
* Prepare the content as a nodejs stream.
* @param {String} type the type of each chunk.
* @param {Function} onUpdate a function to call on each internal update.
* @return Stream the stream.
*/
nodeStream: function (type, onUpdate) {
return this.internalStream(type || "nodebuffer").toNodejsStream(onUpdate);
},
/**
* Return a worker for the compressed content.
* @private
* @param {Object} compression the compression object to use.
* @param {Object} compressionOptions the options to use when compressing.
* @return Worker the worker.
*/
_compressWorker: function (compression, compressionOptions) {
if (
this._data instanceof CompressedObject &&
this._data.compression.magic === compression.magic
) {
return this._data.getCompressedWorker();
} else {
var result = this._decompressWorker();
if(!this._dataBinary) {
result = result.pipe(new utf8.Utf8EncodeWorker());
}
return CompressedObject.createWorkerFrom(result, compression, compressionOptions);
}
},
/**
* Return a worker for the decompressed content.
* @private
* @return Worker the worker.
*/
_decompressWorker : function () {
if (this._data instanceof CompressedObject) {
return this._data.getContentWorker();
} else if (this._data instanceof GenericWorker) {
return this._data;
} else {
return new DataWorker(this._data);
}
}
};
var removedMethods = ["asText", "asBinary", "asNodeBuffer", "asUint8Array", "asArrayBuffer"];
var removedFn = function () {
throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.");
};
for(var i = 0; i < removedMethods.length; i++) {
ZipObject.prototype[removedMethods[i]] = removedFn;
}
module.exports = ZipObject;

View File

@@ -0,0 +1,34 @@
sudo: false
language: node_js
before_install:
- (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true
notifications:
email: false
matrix:
fast_finish: true
include:
- node_js: '0.8'
env: NPM_LEGACY=true
- node_js: '0.10'
env: NPM_LEGACY=true
- node_js: '0.11'
env: NPM_LEGACY=true
- node_js: '0.12'
env: NPM_LEGACY=true
- node_js: 1
env: NPM_LEGACY=true
- node_js: 2
env: NPM_LEGACY=true
- node_js: 3
env: NPM_LEGACY=true
- node_js: 4
- node_js: 5
- node_js: 6
- node_js: 7
- node_js: 8
- node_js: 9
script: "npm run test"
env:
global:
- secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc=
- secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI=

View File

@@ -0,0 +1,38 @@
# Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
* (a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
* (b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
* (c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
* (d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
## Moderation Policy
The [Node.js Moderation Policy] applies to this WG.
## Code of Conduct
The [Node.js Code of Conduct][] applies to this WG.
[Node.js Code of Conduct]:
https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
[Node.js Moderation Policy]:
https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md

View File

@@ -0,0 +1,136 @@
### Streams Working Group
The Node.js Streams is jointly governed by a Working Group
(WG)
that is responsible for high-level guidance of the project.
The WG has final authority over this project including:
* Technical direction
* Project governance and process (including this policy)
* Contribution policy
* GitHub repository hosting
* Conduct guidelines
* Maintaining the list of additional Collaborators
For the current list of WG members, see the project
[README.md](./README.md#current-project-team-members).
### Collaborators
The readable-stream GitHub repository is
maintained by the WG and additional Collaborators who are added by the
WG on an ongoing basis.
Individuals making significant and valuable contributions are made
Collaborators and given commit-access to the project. These
individuals are identified by the WG and their addition as
Collaborators is discussed during the WG meeting.
_Note:_ If you make a significant contribution and are not considered
for commit-access log an issue or contact a WG member directly and it
will be brought up in the next WG meeting.
Modifications of the contents of the readable-stream repository are
made on
a collaborative basis. Anybody with a GitHub account may propose a
modification via pull request and it will be considered by the project
Collaborators. All pull requests must be reviewed and accepted by a
Collaborator with sufficient expertise who is able to take full
responsibility for the change. In the case of pull requests proposed
by an existing Collaborator, an additional Collaborator is required
for sign-off. Consensus should be sought if additional Collaborators
participate and there is disagreement around a particular
modification. See _Consensus Seeking Process_ below for further detail
on the consensus model used for governance.
Collaborators may opt to elevate significant or controversial
modifications, or modifications that have not found consensus to the
WG for discussion by assigning the ***WG-agenda*** tag to a pull
request or issue. The WG should serve as the final arbiter where
required.
For the current list of Collaborators, see the project
[README.md](./README.md#members).
### WG Membership
WG seats are not time-limited. There is no fixed size of the WG.
However, the expected target is between 6 and 12, to ensure adequate
coverage of important areas of expertise, balanced with the ability to
make decisions efficiently.
There is no specific set of requirements or qualifications for WG
membership beyond these rules.
The WG may add additional members to the WG by unanimous consensus.
A WG member may be removed from the WG by voluntary resignation, or by
unanimous consensus of all other WG members.
Changes to WG membership should be posted in the agenda, and may be
suggested as any other agenda item (see "WG Meetings" below).
If an addition or removal is proposed during a meeting, and the full
WG is not in attendance to participate, then the addition or removal
is added to the agenda for the subsequent meeting. This is to ensure
that all members are given the opportunity to participate in all
membership decisions. If a WG member is unable to attend a meeting
where a planned membership decision is being made, then their consent
is assumed.
No more than 1/3 of the WG members may be affiliated with the same
employer. If removal or resignation of a WG member, or a change of
employment by a WG member, creates a situation where more than 1/3 of
the WG membership shares an employer, then the situation must be
immediately remedied by the resignation or removal of one or more WG
members affiliated with the over-represented employer(s).
### WG Meetings
The WG meets occasionally on a Google Hangout On Air. A designated moderator
approved by the WG runs the meeting. Each meeting should be
published to YouTube.
Items are added to the WG agenda that are considered contentious or
are modifications of governance, contribution policy, WG membership,
or release process.
The intention of the agenda is not to approve or review all patches;
that should happen continuously on GitHub and be handled by the larger
group of Collaborators.
Any community member or contributor can ask that something be added to
the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
WG member or the moderator can add the item to the agenda by adding
the ***WG-agenda*** tag to the issue.
Prior to each WG meeting the moderator will share the Agenda with
members of the WG. WG members can add any items they like to the
agenda at the beginning of each meeting. The moderator and the WG
cannot veto or remove items.
The WG may invite persons or representatives from certain projects to
participate in a non-voting capacity.
The moderator is responsible for summarizing the discussion of each
agenda item and sends it as a pull request after the meeting.
### Consensus Seeking Process
The WG follows a
[Consensus
Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
decision-making model.
When an agenda item has appeared to reach a consensus the moderator
will ask "Does anyone object?" as a final call for dissent from the
consensus.
If an agenda item cannot reach a consensus a WG member can call for
either a closing vote or a vote to table the issue to the next
meeting. The call for a vote must be seconded by a majority of the WG
or else the discussion will continue. Simple majority wins.
Note that changes to WG membership require a majority consensus. See
"WG Membership" above.

View File

@@ -0,0 +1,47 @@
Node.js is licensed for use as follows:
"""
Copyright Node.js contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
This license applies to parts of Node.js originating from the
https://github.com/joyent/node repository:
"""
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""

View File

@@ -0,0 +1,58 @@
# readable-stream
***Node-core v8.17.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream)
[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream)
```bash
npm install --save readable-stream
```
***Node-core streams for userland***
This package is a mirror of the Streams2 and Streams3 implementations in
Node-core.
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.17.0/docs/api/stream.html).
If you want to guarantee a stable streams base, regardless of what version of
Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
As of version 2.0.0 **readable-stream** uses semantic versioning.
# Streams Working Group
`readable-stream` is maintained by the Streams Working Group, which
oversees the development and maintenance of the Streams API within
Node.js. The responsibilities of the Streams Working Group include:
* Addressing stream issues on the Node.js issue tracker.
* Authoring and editing stream documentation within the Node.js project.
* Reviewing changes to stream subclasses within the Node.js project.
* Redirecting changes to streams from the Node.js project to this
project.
* Assisting in the implementation of stream providers within Node.js.
* Recommending versions of `readable-stream` to be included in Node.js.
* Messaging about the future of streams to give the community advance
notice of changes.
<a name="members"></a>
## Team Members
* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) &lt;christopher.s.dickinson@gmail.com&gt;
- Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) &lt;calvin.metcalf@gmail.com&gt;
- Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) &lt;rod@vagg.org&gt;
- Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D
* **Sam Newman** ([@sonewman](https://github.com/sonewman)) &lt;newmansam@outlook.com&gt;
* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;mathiasbuus@gmail.com&gt;
* **Domenic Denicola** ([@domenic](https://github.com/domenic)) &lt;d@domenic.me&gt;
* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) &lt;matteo.collina@gmail.com&gt;
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) &lt;shestak.irina@gmail.com&gt;

View File

@@ -0,0 +1,60 @@
# streams WG Meeting 2015-01-30
## Links
* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg
* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106
* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/
## Agenda
Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting.
* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105)
* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101)
* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102)
* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99)
## Minutes
### adopt a charter
* group: +1's all around
### What versioning scheme should be adopted?
* group: +1s 3.0.0
* domenic+group: pulling in patches from other sources where appropriate
* mikeal: version independently, suggesting versions for io.js
* mikeal+domenic: work with TC to notify in advance of changes
simpler stream creation
### streamline creation of streams
* sam: streamline creation of streams
* domenic: nice simple solution posted
but, we lose the opportunity to change the model
may not be backwards incompatible (double check keys)
**action item:** domenic will check
### remove implicit flowing of streams on(data)
* add isFlowing / isPaused
* mikeal: worrying that were documenting polyfill methods confuses users
* domenic: more reflective API is probably good, with warning labels for users
* new section for mad scientists (reflective stream access)
* calvin: name the “third state”
* mikeal: maybe borrow the name from whatwg?
* domenic: were missing the “third state”
* consensus: kind of difficult to name the third state
* mikeal: figure out differences in states / compat
* mathias: always flow on data eliminates third state
* explore what it breaks
**action items:**
* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream)
* ask rod/build for infrastructure
* **chris**: explore the “flow on data” approach
* add isPaused/isFlowing
* add new docs section
* move isPaused to that section

View File

@@ -0,0 +1 @@
module.exports = require('./lib/_stream_duplex.js');

View File

@@ -0,0 +1 @@
module.exports = require('./readable').Duplex

View File

@@ -0,0 +1,131 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
'use strict';
/*<replacement>*/
var pna = require('process-nextick-args');
/*</replacement>*/
/*<replacement>*/
var objectKeys = Object.keys || function (obj) {
var keys = [];
for (var key in obj) {
keys.push(key);
}return keys;
};
/*</replacement>*/
module.exports = Duplex;
/*<replacement>*/
var util = Object.create(require('core-util-is'));
util.inherits = require('inherits');
/*</replacement>*/
var Readable = require('./_stream_readable');
var Writable = require('./_stream_writable');
util.inherits(Duplex, Readable);
{
// avoid scope creep, the keys array can then be collected
var keys = objectKeys(Writable.prototype);
for (var v = 0; v < keys.length; v++) {
var method = keys[v];
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
}
}
function Duplex(options) {
if (!(this instanceof Duplex)) return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
if (options && options.readable === false) this.readable = false;
if (options && options.writable === false) this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
this.once('end', onend);
}
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function () {
return this._writableState.highWaterMark;
}
});
// the no-half-open enforcer
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended) return;
// no more data can be written.
// But allow more writes to happen in this tick.
pna.nextTick(onEndNT, this);
}
function onEndNT(self) {
self.end();
}
Object.defineProperty(Duplex.prototype, 'destroyed', {
get: function () {
if (this._readableState === undefined || this._writableState === undefined) {
return false;
}
return this._readableState.destroyed && this._writableState.destroyed;
},
set: function (value) {
// we ignore the value if the stream
// has not been initialized yet
if (this._readableState === undefined || this._writableState === undefined) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._readableState.destroyed = value;
this._writableState.destroyed = value;
}
});
Duplex.prototype._destroy = function (err, cb) {
this.push(null);
this.end();
pna.nextTick(cb, err);
};

View File

@@ -0,0 +1,47 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
'use strict';
module.exports = PassThrough;
var Transform = require('./_stream_transform');
/*<replacement>*/
var util = Object.create(require('core-util-is'));
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,214 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
'use strict';
module.exports = Transform;
var Duplex = require('./_stream_duplex');
/*<replacement>*/
var util = Object.create(require('core-util-is'));
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(Transform, Duplex);
function afterTransform(er, data) {
var ts = this._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb) {
return this.emit('error', new Error('write callback called multiple times'));
}
ts.writechunk = null;
ts.writecb = null;
if (data != null) // single equals check for both `null` and `undefined`
this.push(data);
cb(er);
var rs = this._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
this._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform)) return new Transform(options);
Duplex.call(this, options);
this._transformState = {
afterTransform: afterTransform.bind(this),
needTransform: false,
transforming: false,
writecb: null,
writechunk: null,
writeencoding: null
};
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
if (options) {
if (typeof options.transform === 'function') this._transform = options.transform;
if (typeof options.flush === 'function') this._flush = options.flush;
}
// When the writable side finishes, then flush out anything remaining.
this.on('prefinish', prefinish);
}
function prefinish() {
var _this = this;
if (typeof this._flush === 'function') {
this._flush(function (er, data) {
done(_this, er, data);
});
} else {
done(this, null, null);
}
}
Transform.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function (chunk, encoding, cb) {
throw new Error('_transform() is not implemented');
};
Transform.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function (n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
Transform.prototype._destroy = function (err, cb) {
var _this2 = this;
Duplex.prototype._destroy.call(this, err, function (err2) {
cb(err2);
_this2.emit('close');
});
};
function done(stream, er, data) {
if (er) return stream.emit('error', er);
if (data != null) // single equals check for both `null` and `undefined`
stream.push(data);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
return stream.push(null);
}

View File

@@ -0,0 +1,685 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
// the drain event emission and buffering.
'use strict';
/*<replacement>*/
var pna = require('process-nextick-args');
/*</replacement>*/
module.exports = Writable;
/* <replacement> */
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
this.next = null;
}
// It seems a linked list but it is not
// there will be only 2 of these for each stream
function CorkedRequest(state) {
var _this = this;
this.next = null;
this.entry = null;
this.finish = function () {
onCorkedFinish(_this, state);
};
}
/* </replacement> */
/*<replacement>*/
var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
/*</replacement>*/
/*<replacement>*/
var Duplex;
/*</replacement>*/
Writable.WritableState = WritableState;
/*<replacement>*/
var util = Object.create(require('core-util-is'));
util.inherits = require('inherits');
/*</replacement>*/
/*<replacement>*/
var internalUtil = {
deprecate: require('util-deprecate')
};
/*</replacement>*/
/*<replacement>*/
var Stream = require('./internal/streams/stream');
/*</replacement>*/
/*<replacement>*/
var Buffer = require('safe-buffer').Buffer;
var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
function _uint8ArrayToBuffer(chunk) {
return Buffer.from(chunk);
}
function _isUint8Array(obj) {
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
}
/*</replacement>*/
var destroyImpl = require('./internal/streams/destroy');
util.inherits(Writable, Stream);
function nop() {}
function WritableState(options, stream) {
Duplex = Duplex || require('./_stream_duplex');
options = options || {};
// Duplex streams are both readable and writable, but share
// the same options object.
// However, some cases require setting options to different
// values for the readable and the writable sides of the duplex stream.
// These options can be provided separately as readableXXX and writableXXX.
var isDuplex = stream instanceof Duplex;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
var writableHwm = options.writableHighWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
// cast to ints.
this.highWaterMark = Math.floor(this.highWaterMark);
// if _final has been called
this.finalCalled = false;
// drain event flag.
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// has it been destroyed
this.destroyed = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// when true all writes will be buffered until .uncork() call
this.corked = 0;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function (er) {
onwrite(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.bufferedRequest = null;
this.lastBufferedRequest = null;
// number of pending user-supplied write callbacks
// this must be 0 before 'finish' can be emitted
this.pendingcb = 0;
// emit prefinish if the only thing we're waiting for is _write cbs
// This is relevant for synchronous Transform streams
this.prefinished = false;
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
// count buffered requests
this.bufferedRequestCount = 0;
// allocate the first CorkedRequest, there is always
// one allocated and free to use, and we maintain at most two
this.corkedRequestsFree = new CorkedRequest(this);
}
WritableState.prototype.getBuffer = function getBuffer() {
var current = this.bufferedRequest;
var out = [];
while (current) {
out.push(current);
current = current.next;
}
return out;
};
(function () {
try {
Object.defineProperty(WritableState.prototype, 'buffer', {
get: internalUtil.deprecate(function () {
return this.getBuffer();
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
});
} catch (_) {}
})();
// Test _writableState for inheritance to account for Duplex streams,
// whose prototype chain only points to Readable.
var realHasInstance;
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
realHasInstance = Function.prototype[Symbol.hasInstance];
Object.defineProperty(Writable, Symbol.hasInstance, {
value: function (object) {
if (realHasInstance.call(this, object)) return true;
if (this !== Writable) return false;
return object && object._writableState instanceof WritableState;
}
});
} else {
realHasInstance = function (object) {
return object instanceof this;
};
}
function Writable(options) {
Duplex = Duplex || require('./_stream_duplex');
// Writable ctor is applied to Duplexes, too.
// `realHasInstance` is necessary because using plain `instanceof`
// would return false, as no `_writableState` property is attached.
// Trying to use the custom `instanceof` for Writable here will also break the
// Node.js LazyTransform implementation, which has a non-trivial getter for
// `_writableState` that would lead to infinite recursion.
if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
return new Writable(options);
}
this._writableState = new WritableState(options, this);
// legacy.
this.writable = true;
if (options) {
if (typeof options.write === 'function') this._write = options.write;
if (typeof options.writev === 'function') this._writev = options.writev;
if (typeof options.destroy === 'function') this._destroy = options.destroy;
if (typeof options.final === 'function') this._final = options.final;
}
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function () {
this.emit('error', new Error('Cannot pipe, not readable'));
};
function writeAfterEnd(stream, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
pna.nextTick(cb, er);
}
// Checks that a user-supplied chunk is valid, especially for the particular
// mode the stream is in. Currently this means that `null` is never accepted
// and undefined/non-string values are only allowed in object mode.
function validChunk(stream, state, chunk, cb) {
var valid = true;
var er = false;
if (chunk === null) {
er = new TypeError('May not write null values to stream');
} else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
if (er) {
stream.emit('error', er);
pna.nextTick(cb, er);
valid = false;
}
return valid;
}
Writable.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
var isBuf = !state.objectMode && _isUint8Array(chunk);
if (isBuf && !Buffer.isBuffer(chunk)) {
chunk = _uint8ArrayToBuffer(chunk);
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
if (typeof cb !== 'function') cb = nop;
if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
}
return ret;
};
Writable.prototype.cork = function () {
var state = this._writableState;
state.corked++;
};
Writable.prototype.uncork = function () {
var state = this._writableState;
if (state.corked) {
state.corked--;
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
}
};
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
// node::ParseEncoding() requires lower case.
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
this._writableState.defaultEncoding = encoding;
return this;
};
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding);
}
return chunk;
}
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function () {
return this._writableState.highWaterMark;
}
});
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
if (!isBuf) {
var newChunk = decodeChunk(state, chunk, encoding);
if (chunk !== newChunk) {
isBuf = true;
encoding = 'buffer';
chunk = newChunk;
}
}
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret) state.needDrain = true;
if (state.writing || state.corked) {
var last = state.lastBufferedRequest;
state.lastBufferedRequest = {
chunk: chunk,
encoding: encoding,
isBuf: isBuf,
callback: cb,
next: null
};
if (last) {
last.next = state.lastBufferedRequest;
} else {
state.bufferedRequest = state.lastBufferedRequest;
}
state.bufferedRequestCount += 1;
} else {
doWrite(stream, state, false, len, chunk, encoding, cb);
}
return ret;
}
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
--state.pendingcb;
if (sync) {
// defer the callback if we are being called synchronously
// to avoid piling up things on the stack
pna.nextTick(cb, er);
// this can emit finish, and it will always happen
// after error
pna.nextTick(finishMaybe, stream, state);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
} else {
// the caller expect this to happen before if
// it is async
cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
// this can emit finish, but finish must
// always follow error
finishMaybe(stream, state);
}
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate(state);
if (er) onwriteError(stream, state, sync, er, cb);else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(state);
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
clearBuffer(stream, state);
}
if (sync) {
/*<replacement>*/
asyncWrite(afterWrite, stream, state, finished, cb);
/*</replacement>*/
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished) onwriteDrain(stream, state);
state.pendingcb--;
cb();
finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
var entry = state.bufferedRequest;
if (stream._writev && entry && entry.next) {
// Fast case, write everything using _writev()
var l = state.bufferedRequestCount;
var buffer = new Array(l);
var holder = state.corkedRequestsFree;
holder.entry = entry;
var count = 0;
var allBuffers = true;
while (entry) {
buffer[count] = entry;
if (!entry.isBuf) allBuffers = false;
entry = entry.next;
count += 1;
}
buffer.allBuffers = allBuffers;
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
// doWrite is almost always async, defer these to save a bit of time
// as the hot path ends with doWrite
state.pendingcb++;
state.lastBufferedRequest = null;
if (holder.next) {
state.corkedRequestsFree = holder.next;
holder.next = null;
} else {
state.corkedRequestsFree = new CorkedRequest(state);
}
state.bufferedRequestCount = 0;
} else {
// Slow case, write chunks one-by-one
while (entry) {
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, false, len, chunk, encoding, cb);
entry = entry.next;
state.bufferedRequestCount--;
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
break;
}
}
if (entry === null) state.lastBufferedRequest = null;
}
state.bufferedRequest = entry;
state.bufferProcessing = false;
}
Writable.prototype._write = function (chunk, encoding, cb) {
cb(new Error('_write() is not implemented'));
};
Writable.prototype._writev = null;
Writable.prototype.end = function (chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
state.corked = 1;
this.uncork();
}
// ignore unnecessary end() calls.
if (!state.ending) endWritable(this, state, cb);
};
function needFinish(state) {
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
}
function callFinal(stream, state) {
stream._final(function (err) {
state.pendingcb--;
if (err) {
stream.emit('error', err);
}
state.prefinished = true;
stream.emit('prefinish');
finishMaybe(stream, state);
});
}
function prefinish(stream, state) {
if (!state.prefinished && !state.finalCalled) {
if (typeof stream._final === 'function') {
state.pendingcb++;
state.finalCalled = true;
pna.nextTick(callFinal, stream, state);
} else {
state.prefinished = true;
stream.emit('prefinish');
}
}
}
function finishMaybe(stream, state) {
var need = needFinish(state);
if (need) {
prefinish(stream, state);
if (state.pendingcb === 0) {
state.finished = true;
stream.emit('finish');
}
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
}
state.ended = true;
stream.writable = false;
}
function onCorkedFinish(corkReq, state, err) {
var entry = corkReq.entry;
corkReq.entry = null;
while (entry) {
var cb = entry.callback;
state.pendingcb--;
cb(err);
entry = entry.next;
}
// reuse the free corkReq.
state.corkedRequestsFree.next = corkReq;
}
Object.defineProperty(Writable.prototype, 'destroyed', {
get: function () {
if (this._writableState === undefined) {
return false;
}
return this._writableState.destroyed;
},
set: function (value) {
// we ignore the value if the stream
// has not been initialized yet
if (!this._writableState) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._writableState.destroyed = value;
}
});
Writable.prototype.destroy = destroyImpl.destroy;
Writable.prototype._undestroy = destroyImpl.undestroy;
Writable.prototype._destroy = function (err, cb) {
this.end();
cb(err);
};

View File

@@ -0,0 +1,78 @@
'use strict';
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var Buffer = require('safe-buffer').Buffer;
var util = require('util');
function copyBuffer(src, target, offset) {
src.copy(target, offset);
}
module.exports = function () {
function BufferList() {
_classCallCheck(this, BufferList);
this.head = null;
this.tail = null;
this.length = 0;
}
BufferList.prototype.push = function push(v) {
var entry = { data: v, next: null };
if (this.length > 0) this.tail.next = entry;else this.head = entry;
this.tail = entry;
++this.length;
};
BufferList.prototype.unshift = function unshift(v) {
var entry = { data: v, next: this.head };
if (this.length === 0) this.tail = entry;
this.head = entry;
++this.length;
};
BufferList.prototype.shift = function shift() {
if (this.length === 0) return;
var ret = this.head.data;
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
--this.length;
return ret;
};
BufferList.prototype.clear = function clear() {
this.head = this.tail = null;
this.length = 0;
};
BufferList.prototype.join = function join(s) {
if (this.length === 0) return '';
var p = this.head;
var ret = '' + p.data;
while (p = p.next) {
ret += s + p.data;
}return ret;
};
BufferList.prototype.concat = function concat(n) {
if (this.length === 0) return Buffer.alloc(0);
var ret = Buffer.allocUnsafe(n >>> 0);
var p = this.head;
var i = 0;
while (p) {
copyBuffer(p.data, ret, i);
i += p.data.length;
p = p.next;
}
return ret;
};
return BufferList;
}();
if (util && util.inspect && util.inspect.custom) {
module.exports.prototype[util.inspect.custom] = function () {
var obj = util.inspect({ length: this.length });
return this.constructor.name + ' ' + obj;
};
}

View File

@@ -0,0 +1,84 @@
'use strict';
/*<replacement>*/
var pna = require('process-nextick-args');
/*</replacement>*/
// undocumented cb() API, needed for core, not for public API
function destroy(err, cb) {
var _this = this;
var readableDestroyed = this._readableState && this._readableState.destroyed;
var writableDestroyed = this._writableState && this._writableState.destroyed;
if (readableDestroyed || writableDestroyed) {
if (cb) {
cb(err);
} else if (err) {
if (!this._writableState) {
pna.nextTick(emitErrorNT, this, err);
} else if (!this._writableState.errorEmitted) {
this._writableState.errorEmitted = true;
pna.nextTick(emitErrorNT, this, err);
}
}
return this;
}
// we set destroyed to true before firing error callbacks in order
// to make it re-entrance safe in case destroy() is called within callbacks
if (this._readableState) {
this._readableState.destroyed = true;
}
// if this is a duplex stream mark the writable part as destroyed as well
if (this._writableState) {
this._writableState.destroyed = true;
}
this._destroy(err || null, function (err) {
if (!cb && err) {
if (!_this._writableState) {
pna.nextTick(emitErrorNT, _this, err);
} else if (!_this._writableState.errorEmitted) {
_this._writableState.errorEmitted = true;
pna.nextTick(emitErrorNT, _this, err);
}
} else if (cb) {
cb(err);
}
});
return this;
}
function undestroy() {
if (this._readableState) {
this._readableState.destroyed = false;
this._readableState.reading = false;
this._readableState.ended = false;
this._readableState.endEmitted = false;
}
if (this._writableState) {
this._writableState.destroyed = false;
this._writableState.ended = false;
this._writableState.ending = false;
this._writableState.finalCalled = false;
this._writableState.prefinished = false;
this._writableState.finished = false;
this._writableState.errorEmitted = false;
}
}
function emitErrorNT(self, err) {
self.emit('error', err);
}
module.exports = {
destroy: destroy,
undestroy: undestroy
};

View File

@@ -0,0 +1 @@
module.exports = require('events').EventEmitter;

View File

@@ -0,0 +1 @@
module.exports = require('stream');

View File

@@ -0,0 +1,52 @@
{
"name": "readable-stream",
"version": "2.3.8",
"description": "Streams3, a user-land copy of the stream library from Node.js",
"main": "readable.js",
"dependencies": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
},
"devDependencies": {
"assert": "^1.4.0",
"babel-polyfill": "^6.9.1",
"buffer": "^4.9.0",
"lolex": "^2.3.2",
"nyc": "^6.4.0",
"tap": "^0.7.0",
"tape": "^4.8.0"
},
"scripts": {
"test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js",
"ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js",
"cover": "nyc npm test",
"report": "nyc report --reporter=lcov"
},
"repository": {
"type": "git",
"url": "git://github.com/nodejs/readable-stream"
},
"keywords": [
"readable",
"stream",
"pipe"
],
"browser": {
"util": false,
"./readable.js": "./readable-browser.js",
"./writable.js": "./writable-browser.js",
"./duplex.js": "./duplex-browser.js",
"./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
},
"nyc": {
"include": [
"lib/**.js"
]
},
"license": "MIT"
}

View File

@@ -0,0 +1 @@
module.exports = require('./readable').PassThrough

View File

@@ -0,0 +1,7 @@
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');

View File

@@ -0,0 +1,19 @@
var Stream = require('stream');
if (process.env.READABLE_STREAM === 'disable' && Stream) {
module.exports = Stream;
exports = module.exports = Stream.Readable;
exports.Readable = Stream.Readable;
exports.Writable = Stream.Writable;
exports.Duplex = Stream.Duplex;
exports.Transform = Stream.Transform;
exports.PassThrough = Stream.PassThrough;
exports.Stream = Stream;
} else {
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = Stream || exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
}

View File

@@ -0,0 +1 @@
module.exports = require('./readable').Transform

View File

@@ -0,0 +1 @@
module.exports = require('./lib/_stream_writable.js');

View File

@@ -0,0 +1,8 @@
var Stream = require("stream")
var Writable = require("./lib/_stream_writable.js")
if (process.env.READABLE_STREAM === 'disable') {
module.exports = Stream && Stream.Writable || Writable
} else {
module.exports = Writable
}

View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) Feross Aboukhadijeh
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -0,0 +1,584 @@
# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg
[travis-url]: https://travis-ci.org/feross/safe-buffer
[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg
[npm-url]: https://npmjs.org/package/safe-buffer
[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg
[downloads-url]: https://npmjs.org/package/safe-buffer
[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
[standard-url]: https://standardjs.com
#### Safer Node.js Buffer API
**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`,
`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.**
**Uses the built-in implementation when available.**
## install
```
npm install safe-buffer
```
## usage
The goal of this package is to provide a safe replacement for the node.js `Buffer`.
It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to
the top of your node.js modules:
```js
var Buffer = require('safe-buffer').Buffer
// Existing buffer code will continue to work without issues:
new Buffer('hey', 'utf8')
new Buffer([1, 2, 3], 'utf8')
new Buffer(obj)
new Buffer(16) // create an uninitialized buffer (potentially unsafe)
// But you can use these new explicit APIs to make clear what you want:
Buffer.from('hey', 'utf8') // convert from many types to a Buffer
Buffer.alloc(16) // create a zero-filled buffer (safe)
Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe)
```
## api
### Class Method: Buffer.from(array)
<!-- YAML
added: v3.0.0
-->
* `array` {Array}
Allocates a new `Buffer` using an `array` of octets.
```js
const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]);
// creates a new Buffer containing ASCII bytes
// ['b','u','f','f','e','r']
```
A `TypeError` will be thrown if `array` is not an `Array`.
### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]])
<!-- YAML
added: v5.10.0
-->
* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or
a `new ArrayBuffer()`
* `byteOffset` {Number} Default: `0`
* `length` {Number} Default: `arrayBuffer.length - byteOffset`
When passed a reference to the `.buffer` property of a `TypedArray` instance,
the newly created `Buffer` will share the same allocated memory as the
TypedArray.
```js
const arr = new Uint16Array(2);
arr[0] = 5000;
arr[1] = 4000;
const buf = Buffer.from(arr.buffer); // shares the memory with arr;
console.log(buf);
// Prints: <Buffer 88 13 a0 0f>
// changing the TypedArray changes the Buffer also
arr[1] = 6000;
console.log(buf);
// Prints: <Buffer 88 13 70 17>
```
The optional `byteOffset` and `length` arguments specify a memory range within
the `arrayBuffer` that will be shared by the `Buffer`.
```js
const ab = new ArrayBuffer(10);
const buf = Buffer.from(ab, 0, 2);
console.log(buf.length);
// Prints: 2
```
A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`.
### Class Method: Buffer.from(buffer)
<!-- YAML
added: v3.0.0
-->
* `buffer` {Buffer}
Copies the passed `buffer` data onto a new `Buffer` instance.
```js
const buf1 = Buffer.from('buffer');
const buf2 = Buffer.from(buf1);
buf1[0] = 0x61;
console.log(buf1.toString());
// 'auffer'
console.log(buf2.toString());
// 'buffer' (copy is not changed)
```
A `TypeError` will be thrown if `buffer` is not a `Buffer`.
### Class Method: Buffer.from(str[, encoding])
<!-- YAML
added: v5.10.0
-->
* `str` {String} String to encode.
* `encoding` {String} Encoding to use, Default: `'utf8'`
Creates a new `Buffer` containing the given JavaScript string `str`. If
provided, the `encoding` parameter identifies the character encoding.
If not provided, `encoding` defaults to `'utf8'`.
```js
const buf1 = Buffer.from('this is a tést');
console.log(buf1.toString());
// prints: this is a tést
console.log(buf1.toString('ascii'));
// prints: this is a tC)st
const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex');
console.log(buf2.toString());
// prints: this is a tést
```
A `TypeError` will be thrown if `str` is not a string.
### Class Method: Buffer.alloc(size[, fill[, encoding]])
<!-- YAML
added: v5.10.0
-->
* `size` {Number}
* `fill` {Value} Default: `undefined`
* `encoding` {String} Default: `utf8`
Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the
`Buffer` will be *zero-filled*.
```js
const buf = Buffer.alloc(5);
console.log(buf);
// <Buffer 00 00 00 00 00>
```
The `size` must be less than or equal to the value of
`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
be created if a `size` less than or equal to 0 is specified.
If `fill` is specified, the allocated `Buffer` will be initialized by calling
`buf.fill(fill)`. See [`buf.fill()`][] for more information.
```js
const buf = Buffer.alloc(5, 'a');
console.log(buf);
// <Buffer 61 61 61 61 61>
```
If both `fill` and `encoding` are specified, the allocated `Buffer` will be
initialized by calling `buf.fill(fill, encoding)`. For example:
```js
const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
console.log(buf);
// <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
```
Calling `Buffer.alloc(size)` can be significantly slower than the alternative
`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance
contents will *never contain sensitive data*.
A `TypeError` will be thrown if `size` is not a number.
### Class Method: Buffer.allocUnsafe(size)
<!-- YAML
added: v5.10.0
-->
* `size` {Number}
Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must
be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit
architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is
thrown. A zero-length Buffer will be created if a `size` less than or equal to
0 is specified.
The underlying memory for `Buffer` instances created in this way is *not
initialized*. The contents of the newly created `Buffer` are unknown and
*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
`Buffer` instances to zeroes.
```js
const buf = Buffer.allocUnsafe(5);
console.log(buf);
// <Buffer 78 e0 82 02 01>
// (octets will be different, every time)
buf.fill(0);
console.log(buf);
// <Buffer 00 00 00 00 00>
```
A `TypeError` will be thrown if `size` is not a number.
Note that the `Buffer` module pre-allocates an internal `Buffer` instance of
size `Buffer.poolSize` that is used as a pool for the fast allocation of new
`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated
`new Buffer(size)` constructor) only when `size` is less than or equal to
`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default
value of `Buffer.poolSize` is `8192` but can be modified.
Use of this pre-allocated internal memory pool is a key difference between
calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer
pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal
Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The
difference is subtle but can be important when an application requires the
additional performance that `Buffer.allocUnsafe(size)` provides.
### Class Method: Buffer.allocUnsafeSlow(size)
<!-- YAML
added: v5.10.0
-->
* `size` {Number}
Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The
`size` must be less than or equal to the value of
`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
be created if a `size` less than or equal to 0 is specified.
The underlying memory for `Buffer` instances created in this way is *not
initialized*. The contents of the newly created `Buffer` are unknown and
*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
`Buffer` instances to zeroes.
When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
allocations under 4KB are, by default, sliced from a single pre-allocated
`Buffer`. This allows applications to avoid the garbage collection overhead of
creating many individually allocated Buffers. This approach improves both
performance and memory usage by eliminating the need to track and cleanup as
many `Persistent` objects.
However, in the case where a developer may need to retain a small chunk of
memory from a pool for an indeterminate amount of time, it may be appropriate
to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then
copy out the relevant bits.
```js
// need to keep around a few small chunks of memory
const store = [];
socket.on('readable', () => {
const data = socket.read();
// allocate for retained data
const sb = Buffer.allocUnsafeSlow(10);
// copy the data into the new allocation
data.copy(sb, 0, 0, 10);
store.push(sb);
});
```
Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after*
a developer has observed undue memory retention in their applications.
A `TypeError` will be thrown if `size` is not a number.
### All the Rest
The rest of the `Buffer` API is exactly the same as in node.js.
[See the docs](https://nodejs.org/api/buffer.html).
## Related links
- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660)
- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4)
## Why is `Buffer` unsafe?
Today, the node.js `Buffer` constructor is overloaded to handle many different argument
types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.),
`ArrayBuffer`, and also `Number`.
The API is optimized for convenience: you can throw any type at it, and it will try to do
what you want.
Because the Buffer constructor is so powerful, you often see code like this:
```js
// Convert UTF-8 strings to hex
function toHex (str) {
return new Buffer(str).toString('hex')
}
```
***But what happens if `toHex` is called with a `Number` argument?***
### Remote Memory Disclosure
If an attacker can make your program call the `Buffer` constructor with a `Number`
argument, then they can make it allocate uninitialized memory from the node.js process.
This could potentially disclose TLS private keys, user data, or database passwords.
When the `Buffer` constructor is passed a `Number` argument, it returns an
**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like
this, you **MUST** overwrite the contents before returning it to the user.
From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size):
> `new Buffer(size)`
>
> - `size` Number
>
> The underlying memory for `Buffer` instances created in this way is not initialized.
> **The contents of a newly created `Buffer` are unknown and could contain sensitive
> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes.
(Emphasis our own.)
Whenever the programmer intended to create an uninitialized `Buffer` you often see code
like this:
```js
var buf = new Buffer(16)
// Immediately overwrite the uninitialized buffer with data from another buffer
for (var i = 0; i < buf.length; i++) {
buf[i] = otherBuf[i]
}
```
### Would this ever be a problem in real code?
Yes. It's surprisingly common to forget to check the type of your variables in a
dynamically-typed language like JavaScript.
Usually the consequences of assuming the wrong type is that your program crashes with an
uncaught exception. But the failure mode for forgetting to check the type of arguments to
the `Buffer` constructor is more catastrophic.
Here's an example of a vulnerable service that takes a JSON payload and converts it to
hex:
```js
// Take a JSON payload {str: "some string"} and convert it to hex
var server = http.createServer(function (req, res) {
var data = ''
req.setEncoding('utf8')
req.on('data', function (chunk) {
data += chunk
})
req.on('end', function () {
var body = JSON.parse(data)
res.end(new Buffer(body.str).toString('hex'))
})
})
server.listen(8080)
```
In this example, an http client just has to send:
```json
{
"str": 1000
}
```
and it will get back 1,000 bytes of uninitialized memory from the server.
This is a very serious bug. It's similar in severity to the
[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process
memory by remote attackers.
### Which real-world packages were vulnerable?
#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht)
[Mathias Buus](https://github.com/mafintosh) and I
([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages,
[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow
anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get
them to reveal 20 bytes at a time of uninitialized memory from the node.js process.
Here's
[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8)
that fixed it. We released a new fixed version, created a
[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all
vulnerable versions on npm so users will get a warning to upgrade to a newer version.
#### [`ws`](https://www.npmjs.com/package/ws)
That got us wondering if there were other vulnerable packages. Sure enough, within a short
period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the
most popular WebSocket implementation in node.js.
If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as
expected, then uninitialized server memory would be disclosed to the remote peer.
These were the vulnerable methods:
```js
socket.send(number)
socket.ping(number)
socket.pong(number)
```
Here's a vulnerable socket server with some echo functionality:
```js
server.on('connection', function (socket) {
socket.on('message', function (message) {
message = JSON.parse(message)
if (message.type === 'echo') {
socket.send(message.data) // send back the user's message
}
})
})
```
`socket.send(number)` called on the server, will disclose server memory.
Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue
was fixed, with a more detailed explanation. Props to
[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the
[Node Security Project disclosure](https://nodesecurity.io/advisories/67).
### What's the solution?
It's important that node.js offers a fast way to get memory otherwise performance-critical
applications would needlessly get a lot slower.
But we need a better way to *signal our intent* as programmers. **When we want
uninitialized memory, we should request it explicitly.**
Sensitive functionality should not be packed into a developer-friendly API that loosely
accepts many different types. This type of API encourages the lazy practice of passing
variables in without checking the type very carefully.
#### A new API: `Buffer.allocUnsafe(number)`
The functionality of creating buffers with uninitialized memory should be part of another
API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that
frequently gets user input of all sorts of different types passed into it.
```js
var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory!
// Immediately overwrite the uninitialized buffer with data from another buffer
for (var i = 0; i < buf.length; i++) {
buf[i] = otherBuf[i]
}
```
### How do we fix node.js core?
We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as
`semver-major`) which defends against one case:
```js
var str = 16
new Buffer(str, 'utf8')
```
In this situation, it's implied that the programmer intended the first argument to be a
string, since they passed an encoding as a second argument. Today, node.js will allocate
uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not
what the programmer intended.
But this is only a partial solution, since if the programmer does `new Buffer(variable)`
(without an `encoding` parameter) there's no way to know what they intended. If `variable`
is sometimes a number, then uninitialized memory will sometimes be returned.
### What's the real long-term fix?
We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when
we need uninitialized memory. But that would break 1000s of packages.
~~We believe the best solution is to:~~
~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~
~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~
#### Update
We now support adding three new APIs:
- `Buffer.from(value)` - convert from any type to a buffer
- `Buffer.alloc(size)` - create a zero-filled buffer
- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size
This solves the core problem that affected `ws` and `bittorrent-dht` which is
`Buffer(variable)` getting tricked into taking a number argument.
This way, existing code continues working and the impact on the npm ecosystem will be
minimal. Over time, npm maintainers can migrate performance-critical code to use
`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`.
### Conclusion
We think there's a serious design issue with the `Buffer` API as it exists today. It
promotes insecure software by putting high-risk functionality into a convenient API
with friendly "developer ergonomics".
This wasn't merely a theoretical exercise because we found the issue in some of the
most popular npm packages.
Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of
`buffer`.
```js
var Buffer = require('safe-buffer').Buffer
```
Eventually, we hope that node.js core can switch to this new, safer behavior. We believe
the impact on the ecosystem would be minimal since it's not a breaking change.
Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while
older, insecure packages would magically become safe from this attack vector.
## links
- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514)
- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67)
- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68)
## credit
The original issues in `bittorrent-dht`
([disclosure](https://nodesecurity.io/advisories/68)) and
`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by
[Mathias Buus](https://github.com/mafintosh) and
[Feross Aboukhadijeh](http://feross.org/).
Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues
and for his work running the [Node Security Project](https://nodesecurity.io/).
Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and
auditing the code.
## license
MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org)

View File

@@ -0,0 +1,187 @@
declare module "safe-buffer" {
export class Buffer {
length: number
write(string: string, offset?: number, length?: number, encoding?: string): number;
toString(encoding?: string, start?: number, end?: number): string;
toJSON(): { type: 'Buffer', data: any[] };
equals(otherBuffer: Buffer): boolean;
compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
slice(start?: number, end?: number): Buffer;
writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
readUInt8(offset: number, noAssert?: boolean): number;
readUInt16LE(offset: number, noAssert?: boolean): number;
readUInt16BE(offset: number, noAssert?: boolean): number;
readUInt32LE(offset: number, noAssert?: boolean): number;
readUInt32BE(offset: number, noAssert?: boolean): number;
readInt8(offset: number, noAssert?: boolean): number;
readInt16LE(offset: number, noAssert?: boolean): number;
readInt16BE(offset: number, noAssert?: boolean): number;
readInt32LE(offset: number, noAssert?: boolean): number;
readInt32BE(offset: number, noAssert?: boolean): number;
readFloatLE(offset: number, noAssert?: boolean): number;
readFloatBE(offset: number, noAssert?: boolean): number;
readDoubleLE(offset: number, noAssert?: boolean): number;
readDoubleBE(offset: number, noAssert?: boolean): number;
swap16(): Buffer;
swap32(): Buffer;
swap64(): Buffer;
writeUInt8(value: number, offset: number, noAssert?: boolean): number;
writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
writeInt8(value: number, offset: number, noAssert?: boolean): number;
writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
fill(value: any, offset?: number, end?: number): this;
indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
/**
* Allocates a new buffer containing the given {str}.
*
* @param str String to store in buffer.
* @param encoding encoding to use, optional. Default is 'utf8'
*/
constructor (str: string, encoding?: string);
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
*/
constructor (size: number);
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
*/
constructor (array: Uint8Array);
/**
* Produces a Buffer backed by the same allocated memory as
* the given {ArrayBuffer}.
*
*
* @param arrayBuffer The ArrayBuffer with which to share memory.
*/
constructor (arrayBuffer: ArrayBuffer);
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
*/
constructor (array: any[]);
/**
* Copies the passed {buffer} data onto a new {Buffer} instance.
*
* @param buffer The buffer to copy.
*/
constructor (buffer: Buffer);
prototype: Buffer;
/**
* Allocates a new Buffer using an {array} of octets.
*
* @param array
*/
static from(array: any[]): Buffer;
/**
* When passed a reference to the .buffer property of a TypedArray instance,
* the newly created Buffer will share the same allocated memory as the TypedArray.
* The optional {byteOffset} and {length} arguments specify a memory range
* within the {arrayBuffer} that will be shared by the Buffer.
*
* @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
* @param byteOffset
* @param length
*/
static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
/**
* Copies the passed {buffer} data onto a new Buffer instance.
*
* @param buffer
*/
static from(buffer: Buffer): Buffer;
/**
* Creates a new Buffer containing the given JavaScript string {str}.
* If provided, the {encoding} parameter identifies the character encoding.
* If not provided, {encoding} defaults to 'utf8'.
*
* @param str
*/
static from(str: string, encoding?: string): Buffer;
/**
* Returns true if {obj} is a Buffer
*
* @param obj object to test.
*/
static isBuffer(obj: any): obj is Buffer;
/**
* Returns true if {encoding} is a valid encoding argument.
* Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
*
* @param encoding string to test.
*/
static isEncoding(encoding: string): boolean;
/**
* Gives the actual byte length of a string. encoding defaults to 'utf8'.
* This is not the same as String.prototype.length since that returns the number of characters in a string.
*
* @param string string to test.
* @param encoding encoding used to evaluate (defaults to 'utf8')
*/
static byteLength(string: string, encoding?: string): number;
/**
* Returns a buffer which is the result of concatenating all the buffers in the list together.
*
* If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
* If the list has exactly one item, then the first item of the list is returned.
* If the list has more than one item, then a new Buffer is created.
*
* @param list An array of Buffer objects to concatenate
* @param totalLength Total length of the buffers when concatenated.
* If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
*/
static concat(list: Buffer[], totalLength?: number): Buffer;
/**
* The same as buf1.compare(buf2).
*/
static compare(buf1: Buffer, buf2: Buffer): number;
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
* @param fill if specified, buffer will be initialized by calling buf.fill(fill).
* If parameter is omitted, buffer will be filled with zeros.
* @param encoding encoding used for call to buf.fill while initalizing
*/
static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
/**
* Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
* of the newly created Buffer are unknown and may contain sensitive data.
*
* @param size count of octets to allocate
*/
static allocUnsafe(size: number): Buffer;
/**
* Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
* of the newly created Buffer are unknown and may contain sensitive data.
*
* @param size count of octets to allocate
*/
static allocUnsafeSlow(size: number): Buffer;
}
}

View File

@@ -0,0 +1,62 @@
/* eslint-disable node/no-deprecated-api */
var buffer = require('buffer')
var Buffer = buffer.Buffer
// alternative to using Object.keys for old browsers
function copyProps (src, dst) {
for (var key in src) {
dst[key] = src[key]
}
}
if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
module.exports = buffer
} else {
// Copy properties from require('buffer')
copyProps(buffer, exports)
exports.Buffer = SafeBuffer
}
function SafeBuffer (arg, encodingOrOffset, length) {
return Buffer(arg, encodingOrOffset, length)
}
// Copy static methods from Buffer
copyProps(Buffer, SafeBuffer)
SafeBuffer.from = function (arg, encodingOrOffset, length) {
if (typeof arg === 'number') {
throw new TypeError('Argument must not be a number')
}
return Buffer(arg, encodingOrOffset, length)
}
SafeBuffer.alloc = function (size, fill, encoding) {
if (typeof size !== 'number') {
throw new TypeError('Argument must be a number')
}
var buf = Buffer(size)
if (fill !== undefined) {
if (typeof encoding === 'string') {
buf.fill(fill, encoding)
} else {
buf.fill(fill)
}
} else {
buf.fill(0)
}
return buf
}
SafeBuffer.allocUnsafe = function (size) {
if (typeof size !== 'number') {
throw new TypeError('Argument must be a number')
}
return Buffer(size)
}
SafeBuffer.allocUnsafeSlow = function (size) {
if (typeof size !== 'number') {
throw new TypeError('Argument must be a number')
}
return buffer.SlowBuffer(size)
}

View File

@@ -0,0 +1,37 @@
{
"name": "safe-buffer",
"description": "Safer Node.js Buffer API",
"version": "5.1.2",
"author": {
"name": "Feross Aboukhadijeh",
"email": "feross@feross.org",
"url": "http://feross.org"
},
"bugs": {
"url": "https://github.com/feross/safe-buffer/issues"
},
"devDependencies": {
"standard": "*",
"tape": "^4.0.0"
},
"homepage": "https://github.com/feross/safe-buffer",
"keywords": [
"buffer",
"buffer allocate",
"node security",
"safe",
"safe-buffer",
"security",
"uninitialized"
],
"license": "MIT",
"main": "index.js",
"types": "index.d.ts",
"repository": {
"type": "git",
"url": "git://github.com/feross/safe-buffer.git"
},
"scripts": {
"test": "standard && tape test/*.js"
}
}

View File

@@ -0,0 +1,50 @@
sudo: false
language: node_js
before_install:
- npm install -g npm@2
- test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g
notifications:
email: false
matrix:
fast_finish: true
include:
- node_js: '0.8'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: '0.10'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: '0.11'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: '0.12'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 1
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 2
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 3
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 4
env: TASK=test
- node_js: 5
env: TASK=test
- node_js: 6
env: TASK=test
- node_js: 7
env: TASK=test
- node_js: 8
env: TASK=test
- node_js: 9
env: TASK=test

View File

@@ -0,0 +1,48 @@
Node.js is licensed for use as follows:
"""
Copyright Node.js contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
This license applies to parts of Node.js originating from the
https://github.com/joyent/node repository:
"""
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""

View File

@@ -0,0 +1,47 @@
# string_decoder
***Node-core v8.9.4 string_decoder for userland***
[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/)
[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/)
```bash
npm install --save string_decoder
```
***Node-core string_decoder for userland***
This package is a mirror of the string_decoder implementation in Node-core.
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/).
As of version 1.0.0 **string_decoder** uses semantic versioning.
## Previous versions
Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10.
## Update
The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version.
## Streams Working Group
`string_decoder` is maintained by the Streams Working Group, which
oversees the development and maintenance of the Streams API within
Node.js. The responsibilities of the Streams Working Group include:
* Addressing stream issues on the Node.js issue tracker.
* Authoring and editing stream documentation within the Node.js project.
* Reviewing changes to stream subclasses within the Node.js project.
* Redirecting changes to streams from the Node.js project to this
project.
* Assisting in the implementation of stream providers within Node.js.
* Recommending versions of `readable-stream` to be included in Node.js.
* Messaging about the future of streams to give the community advance
notice of changes.
See [readable-stream](https://github.com/nodejs/readable-stream) for
more details.

View File

@@ -0,0 +1,296 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
/*<replacement>*/
var Buffer = require('safe-buffer').Buffer;
/*</replacement>*/
var isEncoding = Buffer.isEncoding || function (encoding) {
encoding = '' + encoding;
switch (encoding && encoding.toLowerCase()) {
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
return true;
default:
return false;
}
};
function _normalizeEncoding(enc) {
if (!enc) return 'utf8';
var retried;
while (true) {
switch (enc) {
case 'utf8':
case 'utf-8':
return 'utf8';
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return 'utf16le';
case 'latin1':
case 'binary':
return 'latin1';
case 'base64':
case 'ascii':
case 'hex':
return enc;
default:
if (retried) return; // undefined
enc = ('' + enc).toLowerCase();
retried = true;
}
}
};
// Do not cache `Buffer.isEncoding` when checking encoding names as some
// modules monkey-patch it to support additional encodings
function normalizeEncoding(enc) {
var nenc = _normalizeEncoding(enc);
if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
return nenc || enc;
}
// StringDecoder provides an interface for efficiently splitting a series of
// buffers into a series of JS strings without breaking apart multi-byte
// characters.
exports.StringDecoder = StringDecoder;
function StringDecoder(encoding) {
this.encoding = normalizeEncoding(encoding);
var nb;
switch (this.encoding) {
case 'utf16le':
this.text = utf16Text;
this.end = utf16End;
nb = 4;
break;
case 'utf8':
this.fillLast = utf8FillLast;
nb = 4;
break;
case 'base64':
this.text = base64Text;
this.end = base64End;
nb = 3;
break;
default:
this.write = simpleWrite;
this.end = simpleEnd;
return;
}
this.lastNeed = 0;
this.lastTotal = 0;
this.lastChar = Buffer.allocUnsafe(nb);
}
StringDecoder.prototype.write = function (buf) {
if (buf.length === 0) return '';
var r;
var i;
if (this.lastNeed) {
r = this.fillLast(buf);
if (r === undefined) return '';
i = this.lastNeed;
this.lastNeed = 0;
} else {
i = 0;
}
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
return r || '';
};
StringDecoder.prototype.end = utf8End;
// Returns only complete characters in a Buffer
StringDecoder.prototype.text = utf8Text;
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
StringDecoder.prototype.fillLast = function (buf) {
if (this.lastNeed <= buf.length) {
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
}
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
this.lastNeed -= buf.length;
};
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
// continuation byte. If an invalid byte is detected, -2 is returned.
function utf8CheckByte(byte) {
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
return byte >> 6 === 0x02 ? -1 : -2;
}
// Checks at most 3 bytes at the end of a Buffer in order to detect an
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
// needed to complete the UTF-8 character (if applicable) are returned.
function utf8CheckIncomplete(self, buf, i) {
var j = buf.length - 1;
if (j < i) return 0;
var nb = utf8CheckByte(buf[j]);
if (nb >= 0) {
if (nb > 0) self.lastNeed = nb - 1;
return nb;
}
if (--j < i || nb === -2) return 0;
nb = utf8CheckByte(buf[j]);
if (nb >= 0) {
if (nb > 0) self.lastNeed = nb - 2;
return nb;
}
if (--j < i || nb === -2) return 0;
nb = utf8CheckByte(buf[j]);
if (nb >= 0) {
if (nb > 0) {
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
}
return nb;
}
return 0;
}
// Validates as many continuation bytes for a multi-byte UTF-8 character as
// needed or are available. If we see a non-continuation byte where we expect
// one, we "replace" the validated continuation bytes we've seen so far with
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
// behavior. The continuation byte check is included three times in the case
// where all of the continuation bytes for a character exist in the same buffer.
// It is also done this way as a slight performance increase instead of using a
// loop.
function utf8CheckExtraBytes(self, buf, p) {
if ((buf[0] & 0xC0) !== 0x80) {
self.lastNeed = 0;
return '\ufffd';
}
if (self.lastNeed > 1 && buf.length > 1) {
if ((buf[1] & 0xC0) !== 0x80) {
self.lastNeed = 1;
return '\ufffd';
}
if (self.lastNeed > 2 && buf.length > 2) {
if ((buf[2] & 0xC0) !== 0x80) {
self.lastNeed = 2;
return '\ufffd';
}
}
}
}
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
function utf8FillLast(buf) {
var p = this.lastTotal - this.lastNeed;
var r = utf8CheckExtraBytes(this, buf, p);
if (r !== undefined) return r;
if (this.lastNeed <= buf.length) {
buf.copy(this.lastChar, p, 0, this.lastNeed);
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
}
buf.copy(this.lastChar, p, 0, buf.length);
this.lastNeed -= buf.length;
}
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
// partial character, the character's bytes are buffered until the required
// number of bytes are available.
function utf8Text(buf, i) {
var total = utf8CheckIncomplete(this, buf, i);
if (!this.lastNeed) return buf.toString('utf8', i);
this.lastTotal = total;
var end = buf.length - (total - this.lastNeed);
buf.copy(this.lastChar, 0, end);
return buf.toString('utf8', i, end);
}
// For UTF-8, a replacement character is added when ending on a partial
// character.
function utf8End(buf) {
var r = buf && buf.length ? this.write(buf) : '';
if (this.lastNeed) return r + '\ufffd';
return r;
}
// UTF-16LE typically needs two bytes per character, but even if we have an even
// number of bytes available, we need to check if we end on a leading/high
// surrogate. In that case, we need to wait for the next two bytes in order to
// decode the last character properly.
function utf16Text(buf, i) {
if ((buf.length - i) % 2 === 0) {
var r = buf.toString('utf16le', i);
if (r) {
var c = r.charCodeAt(r.length - 1);
if (c >= 0xD800 && c <= 0xDBFF) {
this.lastNeed = 2;
this.lastTotal = 4;
this.lastChar[0] = buf[buf.length - 2];
this.lastChar[1] = buf[buf.length - 1];
return r.slice(0, -1);
}
}
return r;
}
this.lastNeed = 1;
this.lastTotal = 2;
this.lastChar[0] = buf[buf.length - 1];
return buf.toString('utf16le', i, buf.length - 1);
}
// For UTF-16LE we do not explicitly append special replacement characters if we
// end on a partial character, we simply let v8 handle that.
function utf16End(buf) {
var r = buf && buf.length ? this.write(buf) : '';
if (this.lastNeed) {
var end = this.lastTotal - this.lastNeed;
return r + this.lastChar.toString('utf16le', 0, end);
}
return r;
}
function base64Text(buf, i) {
var n = (buf.length - i) % 3;
if (n === 0) return buf.toString('base64', i);
this.lastNeed = 3 - n;
this.lastTotal = 3;
if (n === 1) {
this.lastChar[0] = buf[buf.length - 1];
} else {
this.lastChar[0] = buf[buf.length - 2];
this.lastChar[1] = buf[buf.length - 1];
}
return buf.toString('base64', i, buf.length - n);
}
function base64End(buf) {
var r = buf && buf.length ? this.write(buf) : '';
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
return r;
}
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
function simpleWrite(buf) {
return buf.toString(this.encoding);
}
function simpleEnd(buf) {
return buf && buf.length ? this.write(buf) : '';
}

View File

@@ -0,0 +1,31 @@
{
"name": "string_decoder",
"version": "1.1.1",
"description": "The string_decoder module from Node core",
"main": "lib/string_decoder.js",
"dependencies": {
"safe-buffer": "~5.1.0"
},
"devDependencies": {
"babel-polyfill": "^6.23.0",
"core-util-is": "^1.0.2",
"inherits": "^2.0.3",
"tap": "~0.4.8"
},
"scripts": {
"test": "tap test/parallel/*.js && node test/verify-dependencies",
"ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js"
},
"repository": {
"type": "git",
"url": "git://github.com/nodejs/string_decoder.git"
},
"homepage": "https://github.com/nodejs/string_decoder",
"keywords": [
"string",
"decoder",
"browser",
"browserify"
],
"license": "MIT"
}

View File

@@ -0,0 +1,67 @@
{
"name": "jszip",
"version": "3.10.1",
"author": "Stuart Knightley <stuart@stuartk.com>",
"description": "Create, read and edit .zip files with JavaScript http://stuartk.com/jszip",
"scripts": {
"test": "npm run test-node && npm run test-browser && tsc",
"test-node": "qunit --require ./test/helpers/test-utils.js --require ./test/helpers/node-test-utils.js test/asserts/",
"test-browser": "grunt build && node test/run.js --test",
"benchmark": "npm run benchmark-node && npm run benchmark-browser",
"benchmark-node": "node test/benchmark/node.js",
"benchmark-browser": "node test/run.js --benchmark",
"lint": "eslint ."
},
"contributors": [
{
"name": "Franz Buchinger"
},
{
"name": "António Afonso"
},
{
"name": "David Duponchel"
},
{
"name": "yiminghe"
}
],
"main": "./lib/index",
"browser": {
"./lib/index": "./dist/jszip.min.js",
"readable-stream": "./lib/readable-stream-browser.js"
},
"types": "./index.d.ts",
"repository": {
"type": "git",
"url": "https://github.com/Stuk/jszip.git"
},
"keywords": [
"zip",
"deflate",
"inflate"
],
"devDependencies": {
"benchmark": "^2.1.4",
"browserify": "~13.0.0",
"eslint": "^8.18.0",
"grunt": "~0.4.1",
"grunt-browserify": "~5.0.0",
"grunt-cli": "~1.1.0",
"grunt-contrib-uglify": "~4.0.1",
"http-server": "^13.0.2",
"jszip-utils": "~0.0.2",
"package-json-versionify": "1.0.2",
"playwright": "^1.15.2",
"qunit": "~2.9.2",
"tmp": "0.0.28",
"typescript": "^4.6.3"
},
"dependencies": {
"lie": "~3.3.0",
"pako": "~1.0.2",
"readable-stream": "~2.3.6",
"setimmediate": "^1.0.5"
},
"license": "(MIT OR GPL-3.0-or-later)"
}

View File

@@ -0,0 +1,21 @@
---
title: "Sponsors"
layout: default
section: main
---
[JSZip](https://github.com/Stuk/jszip) was created in 2009 by [Stuart](https://github.com/Stuk). Since then it has received well over [600 million downloads](https://npm-stat.com/charts.html?package=jszip&from=2009-06-20&to=2022-06-20), is depended on by over 3000 packages on npm, and powers zipping and unzipping on sites large and small.
This project only exists because of all the work dedicated to it by me and the other contributors.
If you or your company has benefited from JSZip then please consider [sponsoring on Github](https://github.com/sponsors/Stuk).
## 💎 Diamond
## 🥇 Gold
## 🥈 Silver
## 🥉 Bronze
## Supporters

View File

@@ -0,0 +1,101 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig.json to read more about this file */
/* Projects */
// "incremental": true, /* Enable incremental compilation */
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
// "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
/* Language and Environment */
"target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
// "jsx": "preserve", /* Specify what JSX code is generated. */
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */
// "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
/* Modules */
"module": "commonjs", /* Specify what module code is generated. */
// "rootDir": "./", /* Specify the root folder within your source files. */
// "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
// "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
// "resolveJsonModule": true, /* Enable importing .json files */
// "noResolve": true, /* Disallow `import`s, `require`s or `<reference>`s from expanding the number of files TypeScript should add to a project. */
/* JavaScript Support */
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */
/* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */
// "outDir": "./", /* Specify an output folder for all emitted files. */
// "removeComments": true, /* Disable emitting comments. */
"noEmit": true, /* Disable emitting files from a compilation. */
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
// "newLine": "crlf", /* Set the newline character for emitting files. */
// "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */
// "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
/* Interop Constraints */
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
/* Type Checking */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */
// "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
// "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
// "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */
// "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
// "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
/* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
// "skipLibCheck": true /* Skip type checking all .d.ts files. */
}
}

View File

@@ -0,0 +1,247 @@
/*! FileSaver.js
* A saveAs() FileSaver implementation.
* 2014-01-24
*
* By Eli Grey, http://eligrey.com
* License: X11/MIT
* See LICENSE.md
*/
/*global self */
/*jslint bitwise: true, indent: 4, laxbreak: true, laxcomma: true, smarttabs: true, plusplus: true */
/*! @source http://purl.eligrey.com/github/FileSaver.js/blob/main/FileSaver.js */
var saveAs = saveAs
// IE 10+ (native saveAs)
|| (typeof navigator !== "undefined" &&
navigator.msSaveOrOpenBlob && navigator.msSaveOrOpenBlob.bind(navigator))
// Everyone else
|| (function(view) {
"use strict";
// IE <10 is explicitly unsupported
if (typeof navigator !== "undefined" &&
/MSIE [1-9]\./.test(navigator.userAgent)) {
return;
}
var
doc = view.document
// only get URL when necessary in case BlobBuilder.js hasn't overridden it yet
, get_URL = function() {
return view.URL || view.webkitURL || view;
}
, URL = view.URL || view.webkitURL || view
, save_link = doc.createElementNS("http://www.w3.org/1999/xhtml", "a")
, can_use_save_link = !view.externalHost && "download" in save_link
, click = function(node) {
var event = doc.createEvent("MouseEvents");
event.initMouseEvent(
"click", true, false, view, 0, 0, 0, 0, 0
, false, false, false, false, 0, null
);
node.dispatchEvent(event);
}
, webkit_req_fs = view.webkitRequestFileSystem
, req_fs = view.requestFileSystem || webkit_req_fs || view.mozRequestFileSystem
, throw_outside = function(ex) {
(view.setImmediate || view.setTimeout)(function() {
throw ex;
}, 0);
}
, force_saveable_type = "application/octet-stream"
, fs_min_size = 0
, deletion_queue = []
, process_deletion_queue = function() {
var i = deletion_queue.length;
while (i--) {
var file = deletion_queue[i];
if (typeof file === "string") { // file is an object URL
URL.revokeObjectURL(file);
} else { // file is a File
file.remove();
}
}
deletion_queue.length = 0; // clear queue
}
, dispatch = function(filesaver, event_types, event) {
event_types = [].concat(event_types);
var i = event_types.length;
while (i--) {
var listener = filesaver["on" + event_types[i]];
if (typeof listener === "function") {
try {
listener.call(filesaver, event || filesaver);
} catch (ex) {
throw_outside(ex);
}
}
}
}
, FileSaver = function(blob, name) {
// First try a.download, then web filesystem, then object URLs
var
filesaver = this
, type = blob.type
, blob_changed = false
, object_url
, target_view
, get_object_url = function() {
var object_url = get_URL().createObjectURL(blob);
deletion_queue.push(object_url);
return object_url;
}
, dispatch_all = function() {
dispatch(filesaver, "writestart progress write writeend".split(" "));
}
// on any filesys errors revert to saving with object URLs
, fs_error = function() {
// don't create more object URLs than needed
if (blob_changed || !object_url) {
object_url = get_object_url(blob);
}
if (target_view) {
target_view.location.href = object_url;
} else {
window.open(object_url, "_blank");
}
filesaver.readyState = filesaver.DONE;
dispatch_all();
}
, abortable = function(func) {
return function() {
if (filesaver.readyState !== filesaver.DONE) {
return func.apply(this, arguments);
}
};
}
, create_if_not_found = {create: true, exclusive: false}
, slice
;
filesaver.readyState = filesaver.INIT;
if (!name) {
name = "download";
}
if (can_use_save_link) {
object_url = get_object_url(blob);
// FF for Android has a nasty garbage collection mechanism
// that turns all objects that are not pure javascript into 'deadObject'
// this means `doc` and `save_link` are unusable and need to be recreated
// `view` is usable though:
doc = view.document;
save_link = doc.createElementNS("http://www.w3.org/1999/xhtml", "a");
save_link.href = object_url;
save_link.download = name;
var event = doc.createEvent("MouseEvents");
event.initMouseEvent(
"click", true, false, view, 0, 0, 0, 0, 0
, false, false, false, false, 0, null
);
save_link.dispatchEvent(event);
filesaver.readyState = filesaver.DONE;
dispatch_all();
return;
}
// Object and web filesystem URLs have a problem saving in Google Chrome when
// viewed in a tab, so I force save with application/octet-stream
// http://code.google.com/p/chromium/issues/detail?id=91158
if (view.chrome && type && type !== force_saveable_type) {
slice = blob.slice || blob.webkitSlice;
blob = slice.call(blob, 0, blob.size, force_saveable_type);
blob_changed = true;
}
// Since I can't be sure that the guessed media type will trigger a download
// in WebKit, I append .download to the filename.
// https://bugs.webkit.org/show_bug.cgi?id=65440
if (webkit_req_fs && name !== "download") {
name += ".download";
}
if (type === force_saveable_type || webkit_req_fs) {
target_view = view;
}
if (!req_fs) {
fs_error();
return;
}
fs_min_size += blob.size;
req_fs(view.TEMPORARY, fs_min_size, abortable(function(fs) {
fs.root.getDirectory("saved", create_if_not_found, abortable(function(dir) {
var save = function() {
dir.getFile(name, create_if_not_found, abortable(function(file) {
file.createWriter(abortable(function(writer) {
writer.onwriteend = function(event) {
target_view.location.href = file.toURL();
deletion_queue.push(file);
filesaver.readyState = filesaver.DONE;
dispatch(filesaver, "writeend", event);
};
writer.onerror = function() {
var error = writer.error;
if (error.code !== error.ABORT_ERR) {
fs_error();
}
};
"writestart progress write abort".split(" ").forEach(function(event) {
writer["on" + event] = filesaver["on" + event];
});
writer.write(blob);
filesaver.abort = function() {
writer.abort();
filesaver.readyState = filesaver.DONE;
};
filesaver.readyState = filesaver.WRITING;
}), fs_error);
}), fs_error);
};
dir.getFile(name, {create: false}, abortable(function(file) {
// delete file if it already exists
file.remove();
save();
}), abortable(function(ex) {
if (ex.code === ex.NOT_FOUND_ERR) {
save();
} else {
fs_error();
}
}));
}), fs_error);
}), fs_error);
}
, FS_proto = FileSaver.prototype
, saveAs = function(blob, name) {
return new FileSaver(blob, name);
}
;
FS_proto.abort = function() {
var filesaver = this;
filesaver.readyState = filesaver.DONE;
dispatch(filesaver, "abort");
};
FS_proto.readyState = FS_proto.INIT = 0;
FS_proto.WRITING = 1;
FS_proto.DONE = 2;
FS_proto.error =
FS_proto.onwritestart =
FS_proto.onprogress =
FS_proto.onwrite =
FS_proto.onabort =
FS_proto.onerror =
FS_proto.onwriteend =
null;
view.addEventListener("unload", process_deletion_queue, false);
saveAs.unload = function() {
process_deletion_queue();
view.removeEventListener("unload", process_deletion_queue, false);
};
return saveAs;
}(
typeof self !== "undefined" && self
|| typeof window !== "undefined" && window
|| this.content
));
// `self` is undefined in Firefox for Android content script context
// while `this` is nsIContentFrameMessageManager
// with an attribute `content` that corresponds to the window
if (typeof module !== "undefined") module.exports = saveAs;