add simpler navigators
This commit is contained in:
parent
0b292377ed
commit
c6233bb15a
35
cluster.js
Normal file
35
cluster.js
Normal file
@ -0,0 +1,35 @@
|
||||
const { addExtra } = require('puppeteer-extra')
|
||||
const puppeteerVanilla = require('puppeteer')
|
||||
const StealthPlugin = require('puppeteer-extra-plugin-stealth')
|
||||
const { Cluster } = require('puppeteer-cluster')
|
||||
|
||||
const puppeteer = addExtra(puppeteerVanilla)
|
||||
puppeteer.use(StealthPlugin)
|
||||
|
||||
const clusterSearch = async () => {
|
||||
const cluster = await Cluster.launch({
|
||||
puppeteer,
|
||||
concurrency: Cluster.CONCURRENCY_CONTEXT,
|
||||
maxConcurrency: 1,
|
||||
puppeteerOptions: {
|
||||
headless: true
|
||||
}
|
||||
})
|
||||
|
||||
await cluster.task(async ({ page, data: url }) => {
|
||||
await page.goto(url, { waitUntil: 'networkidle2' });
|
||||
const version = await page.evaluate(() => { pbjs.version });
|
||||
console.log(url, version);
|
||||
});
|
||||
|
||||
cluster.queue('https://www.techcrunch.com/');
|
||||
cluster.queue('https://www.google.com/');
|
||||
cluster.queue('https://www.wikipedia.org/');
|
||||
cluster.queue('https://www.abcnews.go.com/');
|
||||
cluster.queue('https://www.foxnews.com/');
|
||||
|
||||
await cluster.idle();
|
||||
await cluster.close();
|
||||
}
|
||||
|
||||
clusterSearch()
|
||||
63
index.js
Normal file
63
index.js
Normal file
@ -0,0 +1,63 @@
|
||||
import * as fs from 'fs';
|
||||
import { createObjectCsvWriter } from 'csv-writer';
|
||||
import puppeteer from 'puppeteer-extra';
|
||||
import StealthPlugin from 'puppeteer-extra-plugin-stealth'
|
||||
|
||||
let payload = []
|
||||
const OUTPUT_DIRECTORY = 'output';
|
||||
const urls = ['https://yelp.com','https://www.cbsnews.com','https://www.cnbc.com']
|
||||
const prebidSearch = async () => {
|
||||
const browser = await puppeteer
|
||||
.use(StealthPlugin())
|
||||
.launch({
|
||||
protocolTimeout: 300000,
|
||||
defaultViewport: null,
|
||||
headless: false
|
||||
});
|
||||
|
||||
const page = await browser.newPage();
|
||||
|
||||
try {
|
||||
for (let [index, value] of urls.entries()) {
|
||||
await page.goto(value);
|
||||
const results = await page.evaluate(async () => {
|
||||
const sleep = ms => new Promise(res => setTimeout(res, ms));
|
||||
await sleep((1000 * 60) * .25); // wait for page to load
|
||||
|
||||
if (!window._pbjsGlobals) return null; // return null if no PBJS found
|
||||
|
||||
const result = window._pbjsGlobals.map((pbjs) => {
|
||||
const { version, installedModules } = window[pbjs];
|
||||
return {
|
||||
instance: pbjs, // instance name
|
||||
url: location.href, // url
|
||||
version, // version
|
||||
installedModules // list of installed modules
|
||||
}
|
||||
})
|
||||
return result;
|
||||
});
|
||||
payload = payload.concat(results);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(OUTPUT_DIRECTORY)) fs.mkdirSync(OUTPUT_DIRECTORY);
|
||||
const csvWriter = createObjectCsvWriter({
|
||||
path: `${OUTPUT_DIRECTORY}/${Date.now()}-output.csv`,
|
||||
header: [
|
||||
{id: 'url', title: 'URL'},
|
||||
{id: 'instance', title: 'PBJS Instance'},
|
||||
{id: 'version', title: 'PBJS Version'},
|
||||
{id: 'installedModules', title: 'PBJS Installed Modules'},
|
||||
]
|
||||
});
|
||||
await csvWriter.writeRecords(payload);
|
||||
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
} finally {
|
||||
await page.close();
|
||||
await browser.close();
|
||||
}
|
||||
}
|
||||
|
||||
prebidSearch()
|
||||
9
input.txt
Normal file
9
input.txt
Normal file
@ -0,0 +1,9 @@
|
||||
https://www.cnbc.com
|
||||
https://yelp.com
|
||||
https://buzzfeed.com
|
||||
https://www.ebay.com
|
||||
https://www.foxnews.com
|
||||
https://www.cbsnews.com
|
||||
https://www.techcrunch.com
|
||||
https://www.bbc.com
|
||||
https://starcraft.fandom.com
|
||||
26
node_modules/.package-lock.json
generated
vendored
26
node_modules/.package-lock.json
generated
vendored
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "prebid-integration-monitor",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 2,
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@babel/code-frame": {
|
||||
@ -372,6 +372,12 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/csv-writer": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/csv-writer/-/csv-writer-1.6.0.tgz",
|
||||
"integrity": "sha512-NOx7YDFWEsM/fTRAJjRpPp8t+MKRVvniAg9wQlUKx20MFrPs73WLJhFf5iteqrxNYnsy924K3Iroh3yNHeYd2g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/data-uri-to-buffer": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz",
|
||||
@ -837,6 +843,12 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/line-reader": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/line-reader/-/line-reader-0.4.0.tgz",
|
||||
"integrity": "sha512-AYJ8g+eE7v+Ba4s/cuYqzuNulH/WbjdKQ55fvx8fNVn8WQzTpioY6vI1MoxTuMgcHYX3VlmZWbVvnkIqkyJbCA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lines-and-columns": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
|
||||
@ -1054,6 +1066,18 @@
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/puppeteer-cluster": {
|
||||
"version": "0.24.0",
|
||||
"resolved": "https://registry.npmjs.org/puppeteer-cluster/-/puppeteer-cluster-0.24.0.tgz",
|
||||
"integrity": "sha512-zHPoNsrwkFLKFtgJJv2aC13EbMASQsE048uZd7CyikEXcl+sc1Nf6PMFb9kMoZI7/zMYxvuP658o2mw079ZZyQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"puppeteer": ">=22.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/puppeteer-core": {
|
||||
"version": "22.7.1",
|
||||
"resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-22.7.1.tgz",
|
||||
|
||||
58
node_modules/csv-writer/CHANGELOG.md
generated
vendored
Normal file
58
node_modules/csv-writer/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [1.6.0] - 2020-01-18
|
||||
### Added
|
||||
- Support for specifying values in nested objects. [#34](https://github.com/ryu1kn/csv-writer/pull/34)
|
||||
|
||||
## [1.5.0] - 2019-07-13
|
||||
### Added
|
||||
- Added `alwaysQuote` flag to always double-quote all fields. [#21](https://github.com/ryu1kn/csv-writer/pull/21)
|
||||
|
||||
## [1.4.0] - 2019-06-19
|
||||
### Added
|
||||
- Allow CRLF as a record delimiter. [#27](https://github.com/ryu1kn/csv-writer/pull/27)
|
||||
|
||||
## [1.3.0] - 2019-04-19
|
||||
### Changed
|
||||
- Changed project language from JavaScript to TypeScript.
|
||||
|
||||
### Added
|
||||
- Made TypeScript type definitions accessible. Thanks to @coyotte508.
|
||||
[PR #23](https://github.com/ryu1kn/csv-writer/pull/23)
|
||||
|
||||
## [1.2.0] - 2018-08-22
|
||||
### Added
|
||||
- CSV records are now not limited to an array but can be an iterable object. Thanks to @pineapplemachine.
|
||||
[PR #11](https://github.com/ryu1kn/csv-writer/pull/11)
|
||||
|
||||
## [1.1.0] - 2018-08-20
|
||||
### Added
|
||||
- Allow semicolon as a field delimiter as it is commonly used in CSV in some regions. Thanks to @HKskn.
|
||||
[PR #8](https://github.com/ryu1kn/csv-writer/pull/8), [#6](https://github.com/ryu1kn/csv-writer/pull/6)
|
||||
|
||||
## [1.0.1] - 2018-08-09
|
||||
### Fixed
|
||||
- Fixed the issue that coverage report badge on README shows question mark.
|
||||
Use Coveralls instead of CodeClimate to get code coverage.
|
||||
|
||||
## [1.0.0] - 2018-02-28
|
||||
### Added
|
||||
- Support for adding CSV records to already existing files. Thanks to @jonmelcher. [PR #4](https://github.com/ryu1kn/csv-writer/pull/4)
|
||||
|
||||
## [0.0.3] - 2016-11-09
|
||||
### Fixed
|
||||
- Fixed the bug that fields were not always surrounded by double quotes
|
||||
- Fixed the bug that white space characters on the edge of fields were trimmed
|
||||
|
||||
## [0.0.2] - 2016-10-15
|
||||
### Fixed
|
||||
- Fixed the bug that field values were not quoted when they have newline characters
|
||||
|
||||
## [0.0.1] - 2016-09-09
|
||||
### Added
|
||||
- Initial release of csv-writer
|
||||
328
node_modules/csv-writer/README.md
generated
vendored
Normal file
328
node_modules/csv-writer/README.md
generated
vendored
Normal file
@ -0,0 +1,328 @@
|
||||
[](https://travis-ci.org/ryu1kn/csv-writer)
|
||||
[](https://coveralls.io/github/ryu1kn/csv-writer?branch=master)
|
||||
[](https://codeclimate.com/github/ryu1kn/csv-writer)
|
||||
|
||||
# CSV Writer
|
||||
|
||||
Convert objects/arrays into a CSV string or write them into a file.
|
||||
It respects [RFC 4180](https://tools.ietf.org/html/rfc4180) for the output CSV format.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
* Node version 4 or above
|
||||
|
||||
## Usage
|
||||
|
||||
The example below shows how you can write records defined as the array of objects into a file.
|
||||
|
||||
```js
|
||||
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
|
||||
const csvWriter = createCsvWriter({
|
||||
path: 'path/to/file.csv',
|
||||
header: [
|
||||
{id: 'name', title: 'NAME'},
|
||||
{id: 'lang', title: 'LANGUAGE'}
|
||||
]
|
||||
});
|
||||
|
||||
const records = [
|
||||
{name: 'Bob', lang: 'French, English'},
|
||||
{name: 'Mary', lang: 'English'}
|
||||
];
|
||||
|
||||
csvWriter.writeRecords(records) // returns a promise
|
||||
.then(() => {
|
||||
console.log('...Done');
|
||||
});
|
||||
|
||||
// This will produce a file path/to/file.csv with following contents:
|
||||
//
|
||||
// NAME,LANGUAGE
|
||||
// Bob,"French, English"
|
||||
// Mary,English
|
||||
```
|
||||
|
||||
You can keep writing records into the same file by calling `writeRecords` multiple times
|
||||
(but need to wait for the fulfillment of the `promise` of the previous `writeRecords` call).
|
||||
|
||||
```js
|
||||
// In an `async` function
|
||||
await csvWriter.writeRecords(records1)
|
||||
await csvWriter.writeRecords(records2)
|
||||
...
|
||||
```
|
||||
|
||||
However, if you need to keep writing large data to a certain file, you would want to create
|
||||
node's transform stream and use `CsvStringifier`, which is explained later, inside it
|
||||
, and pipe the stream into a file write stream.
|
||||
|
||||
If you don't want to write a header line, don't give `title` to header elements and just give field IDs as a string.
|
||||
|
||||
```js
|
||||
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
|
||||
const csvWriter = createCsvWriter({
|
||||
path: 'path/to/file.csv',
|
||||
header: ['name', 'lang']
|
||||
});
|
||||
```
|
||||
|
||||
If each record is defined as an array, use `createArrayCsvWriter` to get an `csvWriter`.
|
||||
|
||||
```js
|
||||
const createCsvWriter = require('csv-writer').createArrayCsvWriter;
|
||||
const csvWriter = createCsvWriter({
|
||||
header: ['NAME', 'LANGUAGE'],
|
||||
path: 'path/to/file.csv'
|
||||
});
|
||||
|
||||
const records = [
|
||||
['Bob', 'French, English'],
|
||||
['Mary', 'English']
|
||||
];
|
||||
|
||||
csvWriter.writeRecords(records) // returns a promise
|
||||
.then(() => {
|
||||
console.log('...Done');
|
||||
});
|
||||
|
||||
// This will produce a file path/to/file.csv with following contents:
|
||||
//
|
||||
// NAME,LANGUAGE
|
||||
// Bob,"French, English"
|
||||
// Mary,English
|
||||
```
|
||||
|
||||
If you just want to get a CSV string but don't want to write into a file,
|
||||
you can use `createObjectCsvStringifier` (or `createArrayCsvStringifier`)
|
||||
to get an `csvStringifier`.
|
||||
|
||||
```js
|
||||
const createCsvStringifier = require('csv-writer').createObjectCsvStringifier;
|
||||
const csvStringifier = createCsvStringifier({
|
||||
header: [
|
||||
{id: 'name', title: 'NAME'},
|
||||
{id: 'lang', title: 'LANGUAGE'}
|
||||
]
|
||||
});
|
||||
|
||||
const records = [
|
||||
{name: 'Bob', lang: 'French, English'},
|
||||
{name: 'Mary', lang: 'English'}
|
||||
];
|
||||
|
||||
console.log(csvStringifier.getHeaderString());
|
||||
// => 'NAME,LANGUAGE\n'
|
||||
|
||||
console.log(csvStringifier.stringifyRecords(records));
|
||||
// => 'Bob,"French, English"\nMary,English\n'
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
### createObjectCsvWriter(params)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* params `<Object>`
|
||||
* path `<string>`
|
||||
|
||||
Path to a write file
|
||||
|
||||
* header `<Array<{id, title}|string>>`
|
||||
|
||||
Array of objects (`id` and `title` properties) or strings (field IDs).
|
||||
A header line will be written to the file only if given as an array of objects.
|
||||
|
||||
* fieldDelimiter `<string>` (optional)
|
||||
|
||||
Default: `,`. Only either comma `,` or semicolon `;` is allowed.
|
||||
|
||||
* recordDelimiter `<string>` (optional)
|
||||
|
||||
Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed.
|
||||
|
||||
* headerIdDelimiter `<string>` (optional)
|
||||
|
||||
Default: `undefined`. Give this value to specify a path to a value in a nested object.
|
||||
|
||||
* alwaysQuote `<boolean>` (optional)
|
||||
|
||||
Default: `false`. Set it to `true` to double-quote all fields regardless of their values.
|
||||
|
||||
* encoding `<string>` (optional)
|
||||
|
||||
Default: `utf8`.
|
||||
|
||||
* append `<boolean>` (optional)
|
||||
|
||||
Default: `false`. When `true`, it will append CSV records to the specified file.
|
||||
If the file doesn't exist, it will create one.
|
||||
|
||||
**NOTE:** A header line will not be written to the file if `true` is given.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<CsvWriter>`
|
||||
|
||||
|
||||
### createArrayCsvWriter(params)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* params `<Object>`
|
||||
* path `<string>`
|
||||
|
||||
Path to a write file
|
||||
|
||||
* header `<Array<string>>` (optional)
|
||||
|
||||
Array of field titles
|
||||
|
||||
* fieldDelimiter `<string>` (optional)
|
||||
|
||||
Default: `,`. Only either comma `,` or semicolon `;` is allowed.
|
||||
|
||||
* recordDelimiter `<string>` (optional)
|
||||
|
||||
Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed.
|
||||
|
||||
* alwaysQuote `<boolean>` (optional)
|
||||
|
||||
Default: `false`. Set it to `true` to double-quote all fields regardless of their values.
|
||||
|
||||
* encoding `<string>` (optional)
|
||||
|
||||
Default: `utf8`.
|
||||
|
||||
* append `<boolean>` (optional)
|
||||
|
||||
Default: `false`. When `true`, it will append CSV records to the specified file.
|
||||
If the file doesn't exist, it will create one.
|
||||
|
||||
**NOTE:** A header line will not be written to the file if `true` is given.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<CsvWriter>`
|
||||
|
||||
|
||||
### CsvWriter#writeRecords(records)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* records `<Iterator<Object|Array>>`
|
||||
|
||||
Depending on which function was used to create a `csvWriter` (i.e. `createObjectCsvWriter` or `createArrayCsvWriter`),
|
||||
records will be either a collection of objects or arrays. As long as the collection is iterable, it doesn't need to be an array.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<Promise>`
|
||||
|
||||
|
||||
### createObjectCsvStringifier(params)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* params `<Object>`
|
||||
* header `<Array<{id, title}|string>>`
|
||||
|
||||
Array of objects (`id` and `title` properties) or strings (field IDs)
|
||||
|
||||
* fieldDelimiter `<string>` (optional)
|
||||
|
||||
Default: `,`. Only either comma `,` or semicolon `;` is allowed.
|
||||
|
||||
* recordDelimiter `<string>` (optional)
|
||||
|
||||
Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed.
|
||||
|
||||
* headerIdDelimiter `<string>` (optional)
|
||||
|
||||
Default: `undefined`. Give this value to specify a path to a value in a nested object.
|
||||
|
||||
* alwaysQuote `<boolean>` (optional)
|
||||
|
||||
Default: `false`. Set it to `true` to double-quote all fields regardless of their values.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<ObjectCsvStringifier>`
|
||||
|
||||
### ObjectCsvStringifier#getHeaderString()
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<string>`
|
||||
|
||||
### ObjectCsvStringifier#stringifyRecords(records)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* records `<Array<Object>>`
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<string>`
|
||||
|
||||
### createArrayCsvStringifier(params)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* params `<Object>`
|
||||
* header `<Array<string>>` (optional)
|
||||
|
||||
Array of field titles
|
||||
|
||||
* fieldDelimiter `<string>` (optional)
|
||||
|
||||
Default: `,`. Only either comma `,` or semicolon `;` is allowed.
|
||||
|
||||
* recordDelimiter `<string>` (optional)
|
||||
|
||||
Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed.
|
||||
|
||||
* alwaysQuote `<boolean>` (optional)
|
||||
|
||||
Default: `false`. Set it to `true` to double-quote all fields regardless of their values.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<ArrayCsvStringifier>`
|
||||
|
||||
### ArrayCsvStringifier#getHeaderString()
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<string>`
|
||||
|
||||
### ArrayCsvStringifier#stringifyRecords(records)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* records `<Array<Array<string>>>`
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<string>`
|
||||
|
||||
|
||||
## Request Features or Report Bugs
|
||||
|
||||
Feature requests and bug reports are very welcome: https://github.com/ryu1kn/csv-writer/issues
|
||||
|
||||
A couple of requests from me when you raise an issue on GitHub.
|
||||
|
||||
* **Requesting a feature:** Please try to provide the context of why you want the feature. Such as,
|
||||
in what situation the feature could help you and how, or how the lack of the feature is causing an inconvenience to you.
|
||||
I can't start thinking of introducing it until I understand how it helps you 🙂
|
||||
* **Reporting a bug:** If you could provide a runnable code snippet that reproduces the bug, it would be very helpful!
|
||||
|
||||
|
||||
## Development
|
||||
|
||||
### Prerequisite
|
||||
|
||||
* Node version 8 or above
|
||||
* Docker
|
||||
19
node_modules/csv-writer/dist/index.js
generated
vendored
Normal file
19
node_modules/csv-writer/dist/index.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var csv_stringifier_factory_1 = require("./lib/csv-stringifier-factory");
|
||||
var csv_writer_factory_1 = require("./lib/csv-writer-factory");
|
||||
var csvStringifierFactory = new csv_stringifier_factory_1.CsvStringifierFactory();
|
||||
var csvWriterFactory = new csv_writer_factory_1.CsvWriterFactory(csvStringifierFactory);
|
||||
exports.createArrayCsvStringifier = function (params) {
|
||||
return csvStringifierFactory.createArrayCsvStringifier(params);
|
||||
};
|
||||
exports.createObjectCsvStringifier = function (params) {
|
||||
return csvStringifierFactory.createObjectCsvStringifier(params);
|
||||
};
|
||||
exports.createArrayCsvWriter = function (params) {
|
||||
return csvWriterFactory.createArrayCsvWriter(params);
|
||||
};
|
||||
exports.createObjectCsvWriter = function (params) {
|
||||
return csvWriterFactory.createObjectCsvWriter(params);
|
||||
};
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/csv-writer/dist/index.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/index.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAAA,yEAIuC;AACvC,+DAAuG;AAEvG,IAAM,qBAAqB,GAAG,IAAI,+CAAqB,EAAE,CAAC;AAC1D,IAAM,gBAAgB,GAAG,IAAI,qCAAgB,CAAC,qBAAqB,CAAC,CAAC;AAExD,QAAA,yBAAyB,GAAG,UAAC,MAAiC;IACnE,OAAA,qBAAqB,CAAC,yBAAyB,CAAC,MAAM,CAAC;AAAvD,CAAuD,CAAC;AAEnD,QAAA,0BAA0B,GAAG,UAAC,MAAkC;IACrE,OAAA,qBAAqB,CAAC,0BAA0B,CAAC,MAAM,CAAC;AAAxD,CAAwD,CAAC;AAEpD,QAAA,oBAAoB,GAAG,UAAC,MAA4B;IACzD,OAAA,gBAAgB,CAAC,oBAAoB,CAAC,MAAM,CAAC;AAA7C,CAA6C,CAAC;AAEzC,QAAA,qBAAqB,GAAG,UAAC,MAA6B;IAC3D,OAAA,gBAAgB,CAAC,qBAAqB,CAAC,MAAM,CAAC;AAA9C,CAA8C,CAAC"}
|
||||
20
node_modules/csv-writer/dist/lib/csv-stringifier-factory.js
generated
vendored
Normal file
20
node_modules/csv-writer/dist/lib/csv-stringifier-factory.js
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var array_1 = require("./csv-stringifiers/array");
|
||||
var field_stringifier_1 = require("./field-stringifier");
|
||||
var object_1 = require("./csv-stringifiers/object");
|
||||
var CsvStringifierFactory = /** @class */ (function () {
|
||||
function CsvStringifierFactory() {
|
||||
}
|
||||
CsvStringifierFactory.prototype.createArrayCsvStringifier = function (params) {
|
||||
var fieldStringifier = field_stringifier_1.createFieldStringifier(params.fieldDelimiter, params.alwaysQuote);
|
||||
return new array_1.ArrayCsvStringifier(fieldStringifier, params.recordDelimiter, params.header);
|
||||
};
|
||||
CsvStringifierFactory.prototype.createObjectCsvStringifier = function (params) {
|
||||
var fieldStringifier = field_stringifier_1.createFieldStringifier(params.fieldDelimiter, params.alwaysQuote);
|
||||
return new object_1.ObjectCsvStringifier(fieldStringifier, params.header, params.recordDelimiter, params.headerIdDelimiter);
|
||||
};
|
||||
return CsvStringifierFactory;
|
||||
}());
|
||||
exports.CsvStringifierFactory = CsvStringifierFactory;
|
||||
//# sourceMappingURL=csv-stringifier-factory.js.map
|
||||
1
node_modules/csv-writer/dist/lib/csv-stringifier-factory.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/csv-stringifier-factory.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"csv-stringifier-factory.js","sourceRoot":"","sources":["../../src/lib/csv-stringifier-factory.ts"],"names":[],"mappings":";;AAAA,kDAA6D;AAC7D,yDAA2D;AAC3D,oDAA+D;AAkB/D;IAAA;IAYA,CAAC;IAVG,yDAAyB,GAAzB,UAA0B,MAAiC;QACvD,IAAM,gBAAgB,GAAG,0CAAsB,CAAC,MAAM,CAAC,cAAc,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC;QAC3F,OAAO,IAAI,2BAAmB,CAAC,gBAAgB,EAAE,MAAM,CAAC,eAAe,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;IAC5F,CAAC;IAED,0DAA0B,GAA1B,UAA2B,MAAkC;QACzD,IAAM,gBAAgB,GAAG,0CAAsB,CAAC,MAAM,CAAC,cAAc,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC;QAC3F,OAAO,IAAI,6BAAoB,CAAC,gBAAgB,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,eAAe,EAAE,MAAM,CAAC,iBAAiB,CAAC,CAAC;IACvH,CAAC;IAEL,4BAAC;AAAD,CAAC,AAZD,IAYC;AAZY,sDAAqB"}
|
||||
38
node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js
generated
vendored
Normal file
38
node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var DEFAULT_RECORD_DELIMITER = '\n';
|
||||
var VALID_RECORD_DELIMITERS = [DEFAULT_RECORD_DELIMITER, '\r\n'];
|
||||
var CsvStringifier = /** @class */ (function () {
|
||||
function CsvStringifier(fieldStringifier, recordDelimiter) {
|
||||
if (recordDelimiter === void 0) { recordDelimiter = DEFAULT_RECORD_DELIMITER; }
|
||||
this.fieldStringifier = fieldStringifier;
|
||||
this.recordDelimiter = recordDelimiter;
|
||||
_validateRecordDelimiter(recordDelimiter);
|
||||
}
|
||||
CsvStringifier.prototype.getHeaderString = function () {
|
||||
var headerRecord = this.getHeaderRecord();
|
||||
return headerRecord ? this.joinRecords([this.getCsvLine(headerRecord)]) : null;
|
||||
};
|
||||
CsvStringifier.prototype.stringifyRecords = function (records) {
|
||||
var _this = this;
|
||||
var csvLines = Array.from(records, function (record) { return _this.getCsvLine(_this.getRecordAsArray(record)); });
|
||||
return this.joinRecords(csvLines);
|
||||
};
|
||||
CsvStringifier.prototype.getCsvLine = function (record) {
|
||||
var _this = this;
|
||||
return record
|
||||
.map(function (fieldValue) { return _this.fieldStringifier.stringify(fieldValue); })
|
||||
.join(this.fieldStringifier.fieldDelimiter);
|
||||
};
|
||||
CsvStringifier.prototype.joinRecords = function (records) {
|
||||
return records.join(this.recordDelimiter) + this.recordDelimiter;
|
||||
};
|
||||
return CsvStringifier;
|
||||
}());
|
||||
exports.CsvStringifier = CsvStringifier;
|
||||
function _validateRecordDelimiter(delimiter) {
|
||||
if (VALID_RECORD_DELIMITERS.indexOf(delimiter) === -1) {
|
||||
throw new Error("Invalid record delimiter `" + delimiter + "` is specified");
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=abstract.js.map
|
||||
1
node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"abstract.js","sourceRoot":"","sources":["../../../src/lib/csv-stringifiers/abstract.ts"],"names":[],"mappings":";;AAGA,IAAM,wBAAwB,GAAG,IAAI,CAAC;AACtC,IAAM,uBAAuB,GAAG,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC;AAEnE;IAEI,wBAA6B,gBAAkC,EAClC,eAA0C;QAA1C,gCAAA,EAAA,0CAA0C;QAD1C,qBAAgB,GAAhB,gBAAgB,CAAkB;QAClC,oBAAe,GAAf,eAAe,CAA2B;QACnE,wBAAwB,CAAC,eAAe,CAAC,CAAC;IAC9C,CAAC;IAED,wCAAe,GAAf;QACI,IAAM,YAAY,GAAG,IAAI,CAAC,eAAe,EAAE,CAAC;QAC5C,OAAO,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACnF,CAAC;IAED,yCAAgB,GAAhB,UAAiB,OAAkC;QAAnD,iBAGC;QAFG,IAAM,QAAQ,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,UAAA,MAAM,IAAI,OAAA,KAAI,CAAC,UAAU,CAAC,KAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAA9C,CAA8C,CAAC,CAAC;QAC/F,OAAO,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;IACtC,CAAC;IAMO,mCAAU,GAAlB,UAAmB,MAAe;QAAlC,iBAIC;QAHG,OAAO,MAAM;aACR,GAAG,CAAC,UAAA,UAAU,IAAI,OAAA,KAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC,UAAU,CAAC,EAA3C,CAA2C,CAAC;aAC9D,IAAI,CAAC,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC;IACpD,CAAC;IAEO,oCAAW,GAAnB,UAAoB,OAAiB;QACjC,OAAO,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,eAAe,CAAC;IACrE,CAAC;IACL,qBAAC;AAAD,CAAC,AA9BD,IA8BC;AA9BqB,wCAAc;AAgCpC,SAAS,wBAAwB,CAAC,SAAiB;IAC/C,IAAI,uBAAuB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE;QACnD,MAAM,IAAI,KAAK,CAAC,+BAA8B,SAAS,mBAAiB,CAAC,CAAC;KAC7E;AACL,CAAC"}
|
||||
33
node_modules/csv-writer/dist/lib/csv-stringifiers/array.js
generated
vendored
Normal file
33
node_modules/csv-writer/dist/lib/csv-stringifiers/array.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = function (d, b) {
|
||||
extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return extendStatics(d, b);
|
||||
};
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var abstract_1 = require("./abstract");
|
||||
var ArrayCsvStringifier = /** @class */ (function (_super) {
|
||||
__extends(ArrayCsvStringifier, _super);
|
||||
function ArrayCsvStringifier(fieldStringifier, recordDelimiter, header) {
|
||||
var _this = _super.call(this, fieldStringifier, recordDelimiter) || this;
|
||||
_this.header = header;
|
||||
return _this;
|
||||
}
|
||||
ArrayCsvStringifier.prototype.getHeaderRecord = function () {
|
||||
return this.header;
|
||||
};
|
||||
ArrayCsvStringifier.prototype.getRecordAsArray = function (record) {
|
||||
return record;
|
||||
};
|
||||
return ArrayCsvStringifier;
|
||||
}(abstract_1.CsvStringifier));
|
||||
exports.ArrayCsvStringifier = ArrayCsvStringifier;
|
||||
//# sourceMappingURL=array.js.map
|
||||
1
node_modules/csv-writer/dist/lib/csv-stringifiers/array.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/csv-stringifiers/array.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"array.js","sourceRoot":"","sources":["../../../src/lib/csv-stringifiers/array.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,uCAA0C;AAI1C;IAAyC,uCAAuB;IAE5D,6BAAY,gBAAkC,EAClC,eAAwB,EACP,MAAiB;QAF9C,YAGI,kBAAM,gBAAgB,EAAE,eAAe,CAAC,SAC3C;QAF4B,YAAM,GAAN,MAAM,CAAW;;IAE9C,CAAC;IAES,6CAAe,GAAzB;QACI,OAAO,IAAI,CAAC,MAAM,CAAC;IACvB,CAAC;IAES,8CAAgB,GAA1B,UAA2B,MAAe;QACtC,OAAO,MAAM,CAAC;IAClB,CAAC;IACL,0BAAC;AAAD,CAAC,AAfD,CAAyC,yBAAc,GAetD;AAfY,kDAAmB"}
|
||||
57
node_modules/csv-writer/dist/lib/csv-stringifiers/object.js
generated
vendored
Normal file
57
node_modules/csv-writer/dist/lib/csv-stringifiers/object.js
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
"use strict";
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = function (d, b) {
|
||||
extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return extendStatics(d, b);
|
||||
};
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var abstract_1 = require("./abstract");
|
||||
var object_1 = require("../lang/object");
|
||||
var ObjectCsvStringifier = /** @class */ (function (_super) {
|
||||
__extends(ObjectCsvStringifier, _super);
|
||||
function ObjectCsvStringifier(fieldStringifier, header, recordDelimiter, headerIdDelimiter) {
|
||||
var _this = _super.call(this, fieldStringifier, recordDelimiter) || this;
|
||||
_this.header = header;
|
||||
_this.headerIdDelimiter = headerIdDelimiter;
|
||||
return _this;
|
||||
}
|
||||
ObjectCsvStringifier.prototype.getHeaderRecord = function () {
|
||||
if (!this.isObjectHeader)
|
||||
return null;
|
||||
return this.header.map(function (field) { return field.title; });
|
||||
};
|
||||
ObjectCsvStringifier.prototype.getRecordAsArray = function (record) {
|
||||
var _this = this;
|
||||
return this.fieldIds.map(function (fieldId) { return _this.getNestedValue(record, fieldId); });
|
||||
};
|
||||
ObjectCsvStringifier.prototype.getNestedValue = function (obj, key) {
|
||||
if (!this.headerIdDelimiter)
|
||||
return obj[key];
|
||||
return key.split(this.headerIdDelimiter).reduce(function (subObj, keyPart) { return (subObj || {})[keyPart]; }, obj);
|
||||
};
|
||||
Object.defineProperty(ObjectCsvStringifier.prototype, "fieldIds", {
|
||||
get: function () {
|
||||
return this.isObjectHeader ? this.header.map(function (column) { return column.id; }) : this.header;
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
Object.defineProperty(ObjectCsvStringifier.prototype, "isObjectHeader", {
|
||||
get: function () {
|
||||
return object_1.isObject(this.header && this.header[0]);
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
return ObjectCsvStringifier;
|
||||
}(abstract_1.CsvStringifier));
|
||||
exports.ObjectCsvStringifier = ObjectCsvStringifier;
|
||||
//# sourceMappingURL=object.js.map
|
||||
1
node_modules/csv-writer/dist/lib/csv-stringifiers/object.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/csv-stringifiers/object.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"object.js","sourceRoot":"","sources":["../../../src/lib/csv-stringifiers/object.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,uCAA0C;AAG1C,yCAAmD;AAEnD;IAA0C,wCAAgC;IAEtE,8BAAY,gBAAkC,EACjB,MAA+B,EAChD,eAAwB,EACP,iBAA0B;QAHvD,YAII,kBAAM,gBAAgB,EAAE,eAAe,CAAC,SAC3C;QAJ4B,YAAM,GAAN,MAAM,CAAyB;QAE/B,uBAAiB,GAAjB,iBAAiB,CAAS;;IAEvD,CAAC;IAES,8CAAe,GAAzB;QACI,IAAI,CAAC,IAAI,CAAC,cAAc;YAAE,OAAO,IAAI,CAAC;QACtC,OAAQ,IAAI,CAAC,MAA6B,CAAC,GAAG,CAAC,UAAA,KAAK,IAAI,OAAA,KAAK,CAAC,KAAK,EAAX,CAAW,CAAC,CAAC;IACzE,CAAC;IAES,+CAAgB,GAA1B,UAA2B,MAAwB;QAAnD,iBAEC;QADG,OAAO,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,UAAA,OAAO,IAAI,OAAA,KAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,EAApC,CAAoC,CAAC,CAAC;IAC9E,CAAC;IAEO,6CAAc,GAAtB,UAAuB,GAAqB,EAAE,GAAW;QACrD,IAAI,CAAC,IAAI,CAAC,iBAAiB;YAAE,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC;QAC7C,OAAO,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC,MAAM,CAAC,UAAC,MAAM,EAAE,OAAO,IAAK,OAAA,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,OAAO,CAAC,EAAvB,CAAuB,EAAE,GAAG,CAAC,CAAC;IACvG,CAAC;IAED,sBAAY,0CAAQ;aAApB;YACI,OAAO,IAAI,CAAC,cAAc,CAAC,CAAC,CAAE,IAAI,CAAC,MAA6B,CAAC,GAAG,CAAC,UAAA,MAAM,IAAI,OAAA,MAAM,CAAC,EAAE,EAAT,CAAS,CAAC,CAAC,CAAC,CAAE,IAAI,CAAC,MAAmB,CAAC;QAC1H,CAAC;;;OAAA;IAED,sBAAY,gDAAc;aAA1B;YACI,OAAO,iBAAQ,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC;;;OAAA;IACL,2BAAC;AAAD,CAAC,AA9BD,CAA0C,yBAAc,GA8BvD;AA9BY,oDAAoB"}
|
||||
30
node_modules/csv-writer/dist/lib/csv-writer-factory.js
generated
vendored
Normal file
30
node_modules/csv-writer/dist/lib/csv-writer-factory.js
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var csv_writer_1 = require("./csv-writer");
|
||||
var CsvWriterFactory = /** @class */ (function () {
|
||||
function CsvWriterFactory(csvStringifierFactory) {
|
||||
this.csvStringifierFactory = csvStringifierFactory;
|
||||
}
|
||||
CsvWriterFactory.prototype.createArrayCsvWriter = function (params) {
|
||||
var csvStringifier = this.csvStringifierFactory.createArrayCsvStringifier({
|
||||
header: params.header,
|
||||
fieldDelimiter: params.fieldDelimiter,
|
||||
recordDelimiter: params.recordDelimiter,
|
||||
alwaysQuote: params.alwaysQuote
|
||||
});
|
||||
return new csv_writer_1.CsvWriter(csvStringifier, params.path, params.encoding, params.append);
|
||||
};
|
||||
CsvWriterFactory.prototype.createObjectCsvWriter = function (params) {
|
||||
var csvStringifier = this.csvStringifierFactory.createObjectCsvStringifier({
|
||||
header: params.header,
|
||||
fieldDelimiter: params.fieldDelimiter,
|
||||
recordDelimiter: params.recordDelimiter,
|
||||
headerIdDelimiter: params.headerIdDelimiter,
|
||||
alwaysQuote: params.alwaysQuote
|
||||
});
|
||||
return new csv_writer_1.CsvWriter(csvStringifier, params.path, params.encoding, params.append);
|
||||
};
|
||||
return CsvWriterFactory;
|
||||
}());
|
||||
exports.CsvWriterFactory = CsvWriterFactory;
|
||||
//# sourceMappingURL=csv-writer-factory.js.map
|
||||
1
node_modules/csv-writer/dist/lib/csv-writer-factory.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/csv-writer-factory.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"csv-writer-factory.js","sourceRoot":"","sources":["../../src/lib/csv-writer-factory.ts"],"names":[],"mappings":";;AAAA,2CAAuC;AAyBvC;IACI,0BAA6B,qBAA4C;QAA5C,0BAAqB,GAArB,qBAAqB,CAAuB;IAAG,CAAC;IAE7E,+CAAoB,GAApB,UAAqB,MAA4B;QAC7C,IAAM,cAAc,GAAG,IAAI,CAAC,qBAAqB,CAAC,yBAAyB,CAAC;YACxE,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,cAAc,EAAE,MAAM,CAAC,cAAc;YACrC,eAAe,EAAE,MAAM,CAAC,eAAe;YACvC,WAAW,EAAE,MAAM,CAAC,WAAW;SAClC,CAAC,CAAC;QACH,OAAO,IAAI,sBAAS,CAAC,cAAc,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;IACtF,CAAC;IAED,gDAAqB,GAArB,UAAsB,MAA6B;QAC/C,IAAM,cAAc,GAAG,IAAI,CAAC,qBAAqB,CAAC,0BAA0B,CAAC;YACzE,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,cAAc,EAAE,MAAM,CAAC,cAAc;YACrC,eAAe,EAAE,MAAM,CAAC,eAAe;YACvC,iBAAiB,EAAE,MAAM,CAAC,iBAAiB;YAC3C,WAAW,EAAE,MAAM,CAAC,WAAW;SAClC,CAAC,CAAC;QACH,OAAO,IAAI,sBAAS,CAAC,cAAc,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;IACtF,CAAC;IACL,uBAAC;AAAD,CAAC,AAvBD,IAuBC;AAvBY,4CAAgB"}
|
||||
76
node_modules/csv-writer/dist/lib/csv-writer.js
generated
vendored
Normal file
76
node_modules/csv-writer/dist/lib/csv-writer.js
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var file_writer_1 = require("./file-writer");
|
||||
var DEFAULT_INITIAL_APPEND_FLAG = false;
|
||||
var CsvWriter = /** @class */ (function () {
|
||||
function CsvWriter(csvStringifier, path, encoding, append) {
|
||||
if (append === void 0) { append = DEFAULT_INITIAL_APPEND_FLAG; }
|
||||
this.csvStringifier = csvStringifier;
|
||||
this.append = append;
|
||||
this.fileWriter = new file_writer_1.FileWriter(path, this.append, encoding);
|
||||
}
|
||||
CsvWriter.prototype.writeRecords = function (records) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var recordsString, writeString;
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0:
|
||||
recordsString = this.csvStringifier.stringifyRecords(records);
|
||||
writeString = this.headerString + recordsString;
|
||||
return [4 /*yield*/, this.fileWriter.write(writeString)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
this.append = true;
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
Object.defineProperty(CsvWriter.prototype, "headerString", {
|
||||
get: function () {
|
||||
var headerString = !this.append && this.csvStringifier.getHeaderString();
|
||||
return headerString || '';
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
return CsvWriter;
|
||||
}());
|
||||
exports.CsvWriter = CsvWriter;
|
||||
//# sourceMappingURL=csv-writer.js.map
|
||||
1
node_modules/csv-writer/dist/lib/csv-writer.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/csv-writer.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"csv-writer.js","sourceRoot":"","sources":["../../src/lib/csv-writer.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AACA,6CAAyC;AAEzC,IAAM,2BAA2B,GAAG,KAAK,CAAC;AAE1C;IAGI,mBAA6B,cAAiC,EAClD,IAAY,EACZ,QAAiB,EACT,MAAoC;QAApC,uBAAA,EAAA,oCAAoC;QAH3B,mBAAc,GAAd,cAAc,CAAmB;QAG1C,WAAM,GAAN,MAAM,CAA8B;QACpD,IAAI,CAAC,UAAU,GAAG,IAAI,wBAAU,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAClE,CAAC;IAEK,gCAAY,GAAlB,UAAmB,OAAY;;;;;;wBACrB,aAAa,GAAG,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC;wBAC9D,WAAW,GAAG,IAAI,CAAC,YAAY,GAAG,aAAa,CAAC;wBACtD,qBAAM,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,EAAA;;wBAAxC,SAAwC,CAAC;wBACzC,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;;;;;KACtB;IAED,sBAAY,mCAAY;aAAxB;YACI,IAAM,YAAY,GAAG,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,cAAc,CAAC,eAAe,EAAE,CAAC;YAC3E,OAAO,YAAY,IAAI,EAAE,CAAC;QAC9B,CAAC;;;OAAA;IACL,gBAAC;AAAD,CAAC,AArBD,IAqBC;AArBY,8BAAS"}
|
||||
69
node_modules/csv-writer/dist/lib/field-stringifier.js
generated
vendored
Normal file
69
node_modules/csv-writer/dist/lib/field-stringifier.js
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
"use strict";
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = function (d, b) {
|
||||
extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return extendStatics(d, b);
|
||||
};
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var DEFAULT_FIELD_DELIMITER = ',';
|
||||
var VALID_FIELD_DELIMITERS = [DEFAULT_FIELD_DELIMITER, ';'];
|
||||
var FieldStringifier = /** @class */ (function () {
|
||||
function FieldStringifier(fieldDelimiter) {
|
||||
this.fieldDelimiter = fieldDelimiter;
|
||||
}
|
||||
FieldStringifier.prototype.isEmpty = function (value) {
|
||||
return typeof value === 'undefined' || value === null || value === '';
|
||||
};
|
||||
FieldStringifier.prototype.quoteField = function (field) {
|
||||
return "\"" + field.replace(/"/g, '""') + "\"";
|
||||
};
|
||||
return FieldStringifier;
|
||||
}());
|
||||
exports.FieldStringifier = FieldStringifier;
|
||||
var DefaultFieldStringifier = /** @class */ (function (_super) {
|
||||
__extends(DefaultFieldStringifier, _super);
|
||||
function DefaultFieldStringifier() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
DefaultFieldStringifier.prototype.stringify = function (value) {
|
||||
if (this.isEmpty(value))
|
||||
return '';
|
||||
var str = String(value);
|
||||
return this.needsQuote(str) ? this.quoteField(str) : str;
|
||||
};
|
||||
DefaultFieldStringifier.prototype.needsQuote = function (str) {
|
||||
return str.includes(this.fieldDelimiter) || str.includes('\n') || str.includes('"');
|
||||
};
|
||||
return DefaultFieldStringifier;
|
||||
}(FieldStringifier));
|
||||
var ForceQuoteFieldStringifier = /** @class */ (function (_super) {
|
||||
__extends(ForceQuoteFieldStringifier, _super);
|
||||
function ForceQuoteFieldStringifier() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
ForceQuoteFieldStringifier.prototype.stringify = function (value) {
|
||||
return this.isEmpty(value) ? '' : this.quoteField(String(value));
|
||||
};
|
||||
return ForceQuoteFieldStringifier;
|
||||
}(FieldStringifier));
|
||||
function createFieldStringifier(fieldDelimiter, alwaysQuote) {
|
||||
if (fieldDelimiter === void 0) { fieldDelimiter = DEFAULT_FIELD_DELIMITER; }
|
||||
if (alwaysQuote === void 0) { alwaysQuote = false; }
|
||||
_validateFieldDelimiter(fieldDelimiter);
|
||||
return alwaysQuote ? new ForceQuoteFieldStringifier(fieldDelimiter) : new DefaultFieldStringifier(fieldDelimiter);
|
||||
}
|
||||
exports.createFieldStringifier = createFieldStringifier;
|
||||
function _validateFieldDelimiter(delimiter) {
|
||||
if (VALID_FIELD_DELIMITERS.indexOf(delimiter) === -1) {
|
||||
throw new Error("Invalid field delimiter `" + delimiter + "` is specified");
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=field-stringifier.js.map
|
||||
1
node_modules/csv-writer/dist/lib/field-stringifier.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/field-stringifier.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"field-stringifier.js","sourceRoot":"","sources":["../../src/lib/field-stringifier.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAEA,IAAM,uBAAuB,GAAG,GAAG,CAAC;AACpC,IAAM,sBAAsB,GAAG,CAAC,uBAAuB,EAAE,GAAG,CAAC,CAAC;AAE9D;IACI,0BAA4B,cAAsB;QAAtB,mBAAc,GAAd,cAAc,CAAQ;IAAG,CAAC;IAI5C,kCAAO,GAAjB,UAAkB,KAAa;QAC3B,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,EAAE,CAAC;IAC1E,CAAC;IAES,qCAAU,GAApB,UAAqB,KAAa;QAC9B,OAAO,OAAI,KAAK,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,OAAG,CAAC;IAC5C,CAAC;IACL,uBAAC;AAAD,CAAC,AAZD,IAYC;AAZqB,4CAAgB;AActC;IAAsC,2CAAgB;IAAtD;;IAUA,CAAC;IATG,2CAAS,GAAT,UAAU,KAAa;QACnB,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC;YAAE,OAAO,EAAE,CAAC;QACnC,IAAM,GAAG,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;QAC1B,OAAO,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC7D,CAAC;IAEO,4CAAU,GAAlB,UAAmB,GAAW;QAC1B,OAAO,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IACxF,CAAC;IACL,8BAAC;AAAD,CAAC,AAVD,CAAsC,gBAAgB,GAUrD;AAED;IAAyC,8CAAgB;IAAzD;;IAIA,CAAC;IAHG,8CAAS,GAAT,UAAU,KAAa;QACnB,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;IACrE,CAAC;IACL,iCAAC;AAAD,CAAC,AAJD,CAAyC,gBAAgB,GAIxD;AAED,SAAgB,sBAAsB,CAAC,cAAgD,EAAE,WAAmB;IAArE,+BAAA,EAAA,wCAAgD;IAAE,4BAAA,EAAA,mBAAmB;IACxG,uBAAuB,CAAC,cAAc,CAAC,CAAC;IACxC,OAAO,WAAW,CAAC,CAAC,CAAC,IAAI,0BAA0B,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,IAAI,uBAAuB,CAAC,cAAc,CAAC,CAAC;AACtH,CAAC;AAHD,wDAGC;AAED,SAAS,uBAAuB,CAAC,SAAiB;IAC9C,IAAI,sBAAsB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE;QAClD,MAAM,IAAI,KAAK,CAAC,8BAA6B,SAAS,mBAAiB,CAAC,CAAC;KAC5E;AACL,CAAC"}
|
||||
72
node_modules/csv-writer/dist/lib/file-writer.js
generated
vendored
Normal file
72
node_modules/csv-writer/dist/lib/file-writer.js
generated
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var promise_1 = require("./lang/promise");
|
||||
var fs_1 = require("fs");
|
||||
var writeFilePromise = promise_1.promisify(fs_1.writeFile);
|
||||
var DEFAULT_ENCODING = 'utf8';
|
||||
var FileWriter = /** @class */ (function () {
|
||||
function FileWriter(path, append, encoding) {
|
||||
if (encoding === void 0) { encoding = DEFAULT_ENCODING; }
|
||||
this.path = path;
|
||||
this.append = append;
|
||||
this.encoding = encoding;
|
||||
}
|
||||
FileWriter.prototype.write = function (string) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writeFilePromise(this.path, string, this.getWriteOption())];
|
||||
case 1:
|
||||
_a.sent();
|
||||
this.append = true;
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
FileWriter.prototype.getWriteOption = function () {
|
||||
return {
|
||||
encoding: this.encoding,
|
||||
flag: this.append ? 'a' : 'w'
|
||||
};
|
||||
};
|
||||
return FileWriter;
|
||||
}());
|
||||
exports.FileWriter = FileWriter;
|
||||
//# sourceMappingURL=file-writer.js.map
|
||||
1
node_modules/csv-writer/dist/lib/file-writer.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/file-writer.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"file-writer.js","sourceRoot":"","sources":["../../src/lib/file-writer.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,0CAAyC;AACzC,yBAA6B;AAE7B,IAAM,gBAAgB,GAAG,mBAAS,CAAC,cAAS,CAAC,CAAC;AAE9C,IAAM,gBAAgB,GAAG,MAAM,CAAC;AAEhC;IAEI,oBAA6B,IAAY,EACrB,MAAe,EACN,QAA2B;QAA3B,yBAAA,EAAA,2BAA2B;QAF3B,SAAI,GAAJ,IAAI,CAAQ;QACrB,WAAM,GAAN,MAAM,CAAS;QACN,aAAQ,GAAR,QAAQ,CAAmB;IACxD,CAAC;IAEK,0BAAK,GAAX,UAAY,MAAc;;;;4BACtB,qBAAM,gBAAgB,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,cAAc,EAAE,CAAC,EAAA;;wBAAhE,SAAgE,CAAC;wBACjE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;;;;;KACtB;IAEO,mCAAc,GAAtB;QACI,OAAO;YACH,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;SAChC,CAAC;IACN,CAAC;IACL,iBAAC;AAAD,CAAC,AAlBD,IAkBC;AAlBY,gCAAU"}
|
||||
6
node_modules/csv-writer/dist/lib/lang/object.js
generated
vendored
Normal file
6
node_modules/csv-writer/dist/lib/lang/object.js
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isObject = function (value) {
|
||||
return Object.prototype.toString.call(value) === '[object Object]';
|
||||
};
|
||||
//# sourceMappingURL=object.js.map
|
||||
1
node_modules/csv-writer/dist/lib/lang/object.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/lang/object.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"object.js","sourceRoot":"","sources":["../../../src/lib/lang/object.ts"],"names":[],"mappings":";;AAAa,QAAA,QAAQ,GAAG,UAAC,KAAU;IAC/B,OAAA,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,iBAAiB;AAA3D,CAA2D,CAAC"}
|
||||
28
node_modules/csv-writer/dist/lib/lang/promise.js
generated
vendored
Normal file
28
node_modules/csv-writer/dist/lib/lang/promise.js
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
"use strict";
|
||||
var __spreadArrays = (this && this.__spreadArrays) || function () {
|
||||
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
|
||||
for (var r = Array(s), k = 0, i = 0; i < il; i++)
|
||||
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
|
||||
r[k] = a[j];
|
||||
return r;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
function promisify(fn) {
|
||||
return function () {
|
||||
var args = [];
|
||||
for (var _i = 0; _i < arguments.length; _i++) {
|
||||
args[_i] = arguments[_i];
|
||||
}
|
||||
return new Promise(function (resolve, reject) {
|
||||
var nodeCallback = function (err, result) {
|
||||
if (err)
|
||||
reject(err);
|
||||
else
|
||||
resolve(result);
|
||||
};
|
||||
fn.apply(null, __spreadArrays(args, [nodeCallback]));
|
||||
});
|
||||
};
|
||||
}
|
||||
exports.promisify = promisify;
|
||||
//# sourceMappingURL=promise.js.map
|
||||
1
node_modules/csv-writer/dist/lib/lang/promise.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/lang/promise.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"promise.js","sourceRoot":"","sources":["../../../src/lib/lang/promise.ts"],"names":[],"mappings":";;;;;;;;;AAGA,SAAgB,SAAS,CAAC,EAA4B;IAClD,OAAO;QAAC,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QAClB,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;YAC/B,IAAM,YAAY,GAAG,UAAC,GAAkB,EAAE,MAAW;gBACjD,IAAI,GAAG;oBAAE,MAAM,CAAC,GAAG,CAAC,CAAC;;oBAChB,OAAO,CAAC,MAAM,CAAC,CAAC;YACzB,CAAC,CAAC;YACF,EAAE,CAAC,KAAK,CAAC,IAAI,iBAAM,IAAI,GAAE,YAAY,GAAE,CAAC;QAC5C,CAAC,CAAC,CAAC;IACP,CAAC,CAAC;AACN,CAAC;AAVD,8BAUC"}
|
||||
3
node_modules/csv-writer/dist/lib/record.js
generated
vendored
Normal file
3
node_modules/csv-writer/dist/lib/record.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=record.js.map
|
||||
1
node_modules/csv-writer/dist/lib/record.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/record.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"record.js","sourceRoot":"","sources":["../../src/lib/record.ts"],"names":[],"mappings":""}
|
||||
114
node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js
generated
vendored
Normal file
114
node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js
generated
vendored
Normal file
@ -0,0 +1,114 @@
|
||||
"use strict";
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var delimiter_1 = require("../helper/delimiter");
|
||||
var index_1 = require("../../index");
|
||||
var assert_1 = require("assert");
|
||||
describe('ArrayCsvStringifier', function () {
|
||||
var records = [
|
||||
['FIELD_A1', 'FIELD_B1'],
|
||||
['FIELD_A2', 'FIELD_B2']
|
||||
];
|
||||
describe('When field delimiter is comma', generateTestCases());
|
||||
describe('When field delimiter is semicolon', generateTestCases(';'));
|
||||
describe('When field delimiter is neither comma nor semicolon', function () {
|
||||
it('throws an exception', function () {
|
||||
assert_1.throws(function () {
|
||||
index_1.createArrayCsvStringifier({ fieldDelimiter: '/' });
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('When record delimiter is neither LF nor CR+LF', function () {
|
||||
it('throws an exception', function () {
|
||||
assert_1.throws(function () {
|
||||
index_1.createArrayCsvStringifier({ recordDelimiter: '\r' });
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('When records input is an iterable other than an array', function () {
|
||||
var stringifier = index_1.createArrayCsvStringifier({
|
||||
header: ['TITLE_A', 'TITLE_B']
|
||||
});
|
||||
function recordGenerator() {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, records[0]];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [4 /*yield*/, records[1]];
|
||||
case 2:
|
||||
_a.sent();
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}
|
||||
it('converts the records into CSV', function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(recordGenerator()), 'FIELD_A1,FIELD_B1\nFIELD_A2,FIELD_B2\n');
|
||||
});
|
||||
});
|
||||
describe('When `alwaysQuote` flag is set', function () {
|
||||
var stringifier = index_1.createArrayCsvStringifier({
|
||||
header: ['TITLE_A', 'TITLE_B'],
|
||||
alwaysQuote: true
|
||||
});
|
||||
it('quotes all header fields', function () {
|
||||
assert_1.strictEqual(stringifier.getHeaderString(), '"TITLE_A","TITLE_B"\n');
|
||||
});
|
||||
it('quotes all data fields', function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(records), '"FIELD_A1","FIELD_B1"\n"FIELD_A2","FIELD_B2"\n');
|
||||
});
|
||||
});
|
||||
function generateTestCases(fieldDelimiter) {
|
||||
var delim = delimiter_1.resolveDelimiterChar(fieldDelimiter);
|
||||
return function () {
|
||||
describe('header is specified as a list of column titles', function () {
|
||||
var stringifier = index_1.createArrayCsvStringifier({
|
||||
header: ['TITLE_A', 'TITLE_B'],
|
||||
fieldDelimiter: fieldDelimiter
|
||||
});
|
||||
it("returns a header line with field separated by \"" + delim + "\"", function () {
|
||||
assert_1.strictEqual(stringifier.getHeaderString(), "TITLE_A" + delim + "TITLE_B\n");
|
||||
});
|
||||
it("converts given data records into CSV lines with field separated by \"" + delim + "\"", function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(records), "FIELD_A1" + delim + "FIELD_B1\nFIELD_A2" + delim + "FIELD_B2\n");
|
||||
});
|
||||
});
|
||||
describe('header is not specified', function () {
|
||||
var stringifier = index_1.createArrayCsvStringifier({ fieldDelimiter: fieldDelimiter });
|
||||
it('returns null for header line', function () {
|
||||
assert_1.strictEqual(stringifier.getHeaderString(), null);
|
||||
});
|
||||
it("converts given data records into CSV lines with field separated by \"" + delim + "\"", function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(records), "FIELD_A1" + delim + "FIELD_B1\nFIELD_A2" + delim + "FIELD_B2\n");
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=array.test.js.map
|
||||
1
node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"array.test.js","sourceRoot":"","sources":["../../../src/test/csv-stringifiers/array.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,iDAAyD;AACzD,qCAAsD;AACtD,iCAA2C;AAE3C,QAAQ,CAAC,qBAAqB,EAAE;IAC5B,IAAM,OAAO,GAAG;QACZ,CAAC,UAAU,EAAE,UAAU,CAAC;QACxB,CAAC,UAAU,EAAE,UAAU,CAAC;KAC3B,CAAC;IAEF,QAAQ,CAAC,+BAA+B,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAE/D,QAAQ,CAAC,mCAAmC,EAAE,iBAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;IAEtE,QAAQ,CAAC,qDAAqD,EAAE;QAC5D,EAAE,CAAC,qBAAqB,EAAE;YACtB,eAAM,CAAC;gBACH,iCAAyB,CAAC,EAAC,cAAc,EAAE,GAAG,EAAC,CAAC,CAAC;YACrD,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,+CAA+C,EAAE;QACtD,EAAE,CAAC,qBAAqB,EAAE;YACtB,eAAM,CAAC;gBACH,iCAAyB,CAAC,EAAC,eAAe,EAAE,IAAI,EAAC,CAAC,CAAC;YACvD,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,uDAAuD,EAAE;QAC9D,IAAM,WAAW,GAAG,iCAAyB,CAAC;YAC1C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;SACjC,CAAC,CAAC;QACH,SAAW,eAAe;;;4BACtB,qBAAM,OAAO,CAAC,CAAC,CAAC,EAAA;;wBAAhB,SAAgB,CAAC;wBACjB,qBAAM,OAAO,CAAC,CAAC,CAAC,EAAA;;wBAAhB,SAAgB,CAAC;;;;SACpB;QAED,EAAE,CAAC,+BAA+B,EAAE;YAChC,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,eAAe,EAAE,CAAC,EAC/C,wCAAwC,CAC3C,CAAC;QACN,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,gCAAgC,EAAE;QACvC,IAAM,WAAW,GAAG,iCAAyB,CAAC;YAC1C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;YAC9B,WAAW,EAAE,IAAI;SACpB,CAAC,CAAC;QAEH,EAAE,CAAC,0BAA0B,EAAE;YAC3B,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,uBAAuB,CAAC,CAAC;QACxE,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,wBAAwB,EAAE;YACzB,oBAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAAE,gDAAgD,CAAC,CAAC;QACzG,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,SAAS,iBAAiB,CAAC,cAAuB;QAC9C,IAAM,KAAK,GAAG,gCAAoB,CAAC,cAAc,CAAC,CAAC;QACnD,OAAO;YACH,QAAQ,CAAC,gDAAgD,EAAE;gBACvD,IAAM,WAAW,GAAG,iCAAyB,CAAC;oBAC1C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;oBAC9B,cAAc,gBAAA;iBACjB,CAAC,CAAC;gBAEH,EAAE,CAAC,qDAAkD,KAAK,OAAG,EAAE;oBAC3D,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,YAAU,KAAK,cAAW,CAAC,CAAC;gBAC3E,CAAC,CAAC,CAAC;gBAEH,EAAE,CAAC,0EAAuE,KAAK,OAAG,EAAE;oBAChF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;YAEH,QAAQ,CAAC,yBAAyB,EAAE;gBAChC,IAAM,WAAW,GAAG,iCAAyB,CAAC,EAAC,cAAc,gBAAA,EAAC,CAAC,CAAC;gBAEhE,EAAE,CAAC,8BAA8B,EAAE;oBAC/B,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,IAAI,CAAC,CAAC;gBACrD,CAAC,CAAC,CAAC;gBAEH,EAAE,CAAC,0EAAuE,KAAK,OAAG,EAAE;oBAChF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;QACP,CAAC,CAAC;IACN,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
156
node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js
generated
vendored
Normal file
156
node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js
generated
vendored
Normal file
@ -0,0 +1,156 @@
|
||||
"use strict";
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var delimiter_1 = require("../helper/delimiter");
|
||||
var index_1 = require("../../index");
|
||||
var assert_1 = require("assert");
|
||||
describe('ObjectCsvStringifier', function () {
|
||||
var records = [
|
||||
{ FIELD_A: 'VALUE_A1', FIELD_B: 'VALUE_B1' },
|
||||
{ FIELD_A: 'VALUE_A2', FIELD_B: 'VALUE_B2', OTHERS: { FIELD_C: 'VALUE_C2' } }
|
||||
];
|
||||
describe('When field delimiter is comma', generateTestCases());
|
||||
describe('When field delimiter is semicolon', generateTestCases(';'));
|
||||
describe('When field delimiter is neither comma nor semicolon', function () {
|
||||
it('throws an exception', function () {
|
||||
assert_1.throws(function () {
|
||||
index_1.createObjectCsvStringifier({
|
||||
header: ['FIELD_A', 'FIELD_B'],
|
||||
fieldDelimiter: '/'
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('When record delimiter is neither LF nor CR+LF', function () {
|
||||
it('throws an exception', function () {
|
||||
assert_1.throws(function () {
|
||||
index_1.createObjectCsvStringifier({
|
||||
header: ['FIELD_A', 'FIELD_B'],
|
||||
recordDelimiter: '\r'
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('When records input is an iterable other than an array', function () {
|
||||
var stringifier = index_1.createObjectCsvStringifier({
|
||||
header: ['FIELD_A', 'FIELD_B']
|
||||
});
|
||||
function recordGenerator() {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, records[0]];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [4 /*yield*/, records[1]];
|
||||
case 2:
|
||||
_a.sent();
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}
|
||||
it('converts the records into CSV', function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(recordGenerator()), 'VALUE_A1,VALUE_B1\nVALUE_A2,VALUE_B2\n');
|
||||
});
|
||||
});
|
||||
describe('When `alwaysQuote` flag is set', function () {
|
||||
var stringifier = index_1.createObjectCsvStringifier({
|
||||
header: [
|
||||
{ id: 'FIELD_A', title: 'TITLE_A' },
|
||||
{ id: 'FIELD_B', title: 'TITLE_B' }
|
||||
],
|
||||
alwaysQuote: true
|
||||
});
|
||||
it('quotes all header fields', function () {
|
||||
assert_1.strictEqual(stringifier.getHeaderString(), '"TITLE_A","TITLE_B"\n');
|
||||
});
|
||||
it('quotes all data fields', function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(records), '"VALUE_A1","VALUE_B1"\n"VALUE_A2","VALUE_B2"\n');
|
||||
});
|
||||
});
|
||||
describe('When `headerIdDelimiter` is set', function () {
|
||||
var stringifier = index_1.createObjectCsvStringifier({
|
||||
header: [
|
||||
{ id: 'FIELD_A', title: 'TITLE_A' },
|
||||
{ id: 'OTHERS/FIELD_C', title: 'TITLE_C' }
|
||||
],
|
||||
headerIdDelimiter: '/'
|
||||
});
|
||||
it('uses the title as is', function () {
|
||||
assert_1.strictEqual(stringifier.getHeaderString(), 'TITLE_A,TITLE_C\n');
|
||||
});
|
||||
it('picks up a value in nested objects', function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(records), 'VALUE_A1,\nVALUE_A2,VALUE_C2\n');
|
||||
});
|
||||
});
|
||||
function generateTestCases(fieldDelimiter) {
|
||||
var delim = delimiter_1.resolveDelimiterChar(fieldDelimiter);
|
||||
return function () {
|
||||
describe('header is specified with title', function () {
|
||||
var stringifier = index_1.createObjectCsvStringifier({
|
||||
header: [
|
||||
{ id: 'FIELD_A', title: 'TITLE_A' },
|
||||
{ id: 'FIELD_B', title: 'TITLE_B' }
|
||||
],
|
||||
fieldDelimiter: fieldDelimiter
|
||||
});
|
||||
it("returns a header line with field separated by \"" + delim + "\"", function () {
|
||||
assert_1.strictEqual(stringifier.getHeaderString(), "TITLE_A" + delim + "TITLE_B\n");
|
||||
});
|
||||
it("converts given data records into CSV lines with field separated by \"" + delim + "\"", function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(records), "VALUE_A1" + delim + "VALUE_B1\nVALUE_A2" + delim + "VALUE_B2\n");
|
||||
});
|
||||
});
|
||||
describe('header is specified without title', function () {
|
||||
var stringifier = index_1.createObjectCsvStringifier({
|
||||
header: ['FIELD_A', 'FIELD_B'],
|
||||
fieldDelimiter: fieldDelimiter
|
||||
});
|
||||
it('returns null for header line', function () {
|
||||
assert_1.strictEqual(stringifier.getHeaderString(), null);
|
||||
});
|
||||
it("converts given data records into CSV lines with field separated by \"" + delim + "\"", function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(records), "VALUE_A1" + delim + "VALUE_B1\nVALUE_A2" + delim + "VALUE_B2\n");
|
||||
});
|
||||
});
|
||||
describe('header columns are given with reverse order', function () {
|
||||
var stringifier = index_1.createObjectCsvStringifier({
|
||||
header: [
|
||||
{ id: 'FIELD_B', title: 'TITLE_B' },
|
||||
{ id: 'FIELD_A', title: 'TITLE_A' }
|
||||
],
|
||||
fieldDelimiter: fieldDelimiter
|
||||
});
|
||||
it("layouts fields with the order of headers given with field separated by \"" + delim + "\"", function () {
|
||||
assert_1.strictEqual(stringifier.stringifyRecords(records), "VALUE_B1" + delim + "VALUE_A1\nVALUE_B2" + delim + "VALUE_A2\n");
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=object.test.js.map
|
||||
1
node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"object.test.js","sourceRoot":"","sources":["../../../src/test/csv-stringifiers/object.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,iDAAyD;AACzD,qCAAuD;AACvD,iCAA2C;AAE3C,QAAQ,CAAC,sBAAsB,EAAE;IAC7B,IAAM,OAAO,GAAG;QACZ,EAAC,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU,EAAC;QAC1C,EAAC,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,EAAC,OAAO,EAAE,UAAU,EAAC,EAAC;KAC5E,CAAC;IAEF,QAAQ,CAAC,+BAA+B,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAE/D,QAAQ,CAAC,mCAAmC,EAAE,iBAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;IAEtE,QAAQ,CAAC,qDAAqD,EAAE;QAC5D,EAAE,CAAC,qBAAqB,EAAE;YACtB,eAAM,CAAC;gBACH,kCAA0B,CAAC;oBACvB,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;oBAC9B,cAAc,EAAE,GAAG;iBACtB,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,+CAA+C,EAAE;QACtD,EAAE,CAAC,qBAAqB,EAAE;YACtB,eAAM,CAAC;gBACH,kCAA0B,CAAC;oBACvB,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;oBAC9B,eAAe,EAAE,IAAI;iBACxB,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,uDAAuD,EAAE;QAC9D,IAAM,WAAW,GAAG,kCAA0B,CAAC;YAC3C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;SACjC,CAAC,CAAC;QACH,SAAW,eAAe;;;4BACtB,qBAAM,OAAO,CAAC,CAAC,CAAC,EAAA;;wBAAhB,SAAgB,CAAC;wBACjB,qBAAM,OAAO,CAAC,CAAC,CAAC,EAAA;;wBAAhB,SAAgB,CAAC;;;;SACpB;QAED,EAAE,CAAC,+BAA+B,EAAE;YAChC,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,eAAe,EAAE,CAAC,EAC/C,wCAAwC,CAC3C,CAAC;QACN,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,gCAAgC,EAAE;QACvC,IAAM,WAAW,GAAG,kCAA0B,CAAC;YAC3C,MAAM,EAAE;gBACJ,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;gBACjC,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;aACpC;YACD,WAAW,EAAE,IAAI;SACpB,CAAC,CAAC;QAEH,EAAE,CAAC,0BAA0B,EAAE;YAC3B,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,uBAAuB,CAAC,CAAC;QACxE,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,wBAAwB,EAAE;YACzB,oBAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAAE,gDAAgD,CAAC,CAAC;QACzG,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,iCAAiC,EAAE;QACxC,IAAM,WAAW,GAAG,kCAA0B,CAAC;YAC3C,MAAM,EAAE;gBACJ,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;gBACjC,EAAC,EAAE,EAAE,gBAAgB,EAAE,KAAK,EAAE,SAAS,EAAC;aAC3C;YACD,iBAAiB,EAAE,GAAG;SACzB,CAAC,CAAC;QAEH,EAAE,CAAC,sBAAsB,EAAE;YACvB,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,mBAAmB,CAAC,CAAC;QACpE,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,oCAAoC,EAAE;YACrC,oBAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAAE,gCAAgC,CAAC,CAAC;QACzF,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,SAAS,iBAAiB,CAAC,cAAuB;QAC9C,IAAM,KAAK,GAAG,gCAAoB,CAAC,cAAc,CAAC,CAAC;QACnD,OAAO;YACH,QAAQ,CAAC,gCAAgC,EAAE;gBACvC,IAAM,WAAW,GAAG,kCAA0B,CAAC;oBAC3C,MAAM,EAAE;wBACJ,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;wBACjC,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;qBACpC;oBACD,cAAc,gBAAA;iBACjB,CAAC,CAAC;gBAEH,EAAE,CAAC,qDAAkD,KAAK,OAAG,EAAE;oBAC3D,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,YAAU,KAAK,cAAW,CAAC,CAAC;gBAC3E,CAAC,CAAC,CAAC;gBAEH,EAAE,CAAC,0EAAuE,KAAK,OAAG,EAAE;oBAChF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;YAEH,QAAQ,CAAC,mCAAmC,EAAE;gBAC1C,IAAM,WAAW,GAAG,kCAA0B,CAAC;oBAC3C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;oBAC9B,cAAc,gBAAA;iBACjB,CAAC,CAAC;gBAEH,EAAE,CAAC,8BAA8B,EAAE;oBAC/B,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,IAAI,CAAC,CAAC;gBACrD,CAAC,CAAC,CAAC;gBAEH,EAAE,CAAC,0EAAuE,KAAK,OAAG,EAAE;oBAChF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;YAEH,QAAQ,CAAC,6CAA6C,EAAE;gBACpD,IAAM,WAAW,GAAG,kCAA0B,CAAC;oBAC3C,MAAM,EAAE;wBACJ,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;wBACjC,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;qBACpC;oBACD,cAAc,gBAAA;iBACjB,CAAC,CAAC;gBAEH,EAAE,CAAC,8EAA2E,KAAK,OAAG,EAAE;oBACpF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;QACP,CAAC,CAAC;IACN,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
79
node_modules/csv-writer/dist/test/field-stringifier.test.js
generated
vendored
Normal file
79
node_modules/csv-writer/dist/test/field-stringifier.test.js
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var delimiter_1 = require("./helper/delimiter");
|
||||
var field_stringifier_1 = require("../lib/field-stringifier");
|
||||
var assert_1 = require("assert");
|
||||
describe('DefaultFieldStringifier', function () {
|
||||
describe('When field delimiter is comma', generateTestCases(','));
|
||||
describe('When field delimiter is semicolon', generateTestCases(';'));
|
||||
describe('When all fields needs to be quoted', function () {
|
||||
var stringifier = field_stringifier_1.createFieldStringifier(',', true);
|
||||
it('quotes a field', function () {
|
||||
assert_1.strictEqual(stringifier.stringify('VALUE'), '"VALUE"');
|
||||
});
|
||||
it('does not quote a field of value undefined', function () {
|
||||
assert_1.strictEqual(stringifier.stringify(), '');
|
||||
});
|
||||
it('does not quote a field of value null', function () {
|
||||
assert_1.strictEqual(stringifier.stringify(null), '');
|
||||
});
|
||||
it('does not quote a field of value empty string', function () {
|
||||
assert_1.strictEqual(stringifier.stringify(''), '');
|
||||
});
|
||||
});
|
||||
function generateTestCases(fieldDelimiter) {
|
||||
var delim = delimiter_1.resolveDelimiterChar(fieldDelimiter);
|
||||
return function () {
|
||||
var stringifier = field_stringifier_1.createFieldStringifier(fieldDelimiter);
|
||||
it('returns the same string', function () {
|
||||
assert_1.strictEqual(stringifier.stringify('VALUE'), 'VALUE');
|
||||
});
|
||||
it('preserves the whitespace characters', function () {
|
||||
assert_1.strictEqual(stringifier.stringify(' VALUE\tA '), ' VALUE\tA ');
|
||||
});
|
||||
it("wraps a field value with double quotes if the field contains \"" + delim + "\"", function () {
|
||||
assert_1.strictEqual(stringifier.stringify("VALUE" + delim + "A"), "\"VALUE" + delim + "A\"");
|
||||
});
|
||||
it('wraps a field value with double quotes if the field contains newline', function () {
|
||||
assert_1.strictEqual(stringifier.stringify('VALUE\nA'), '"VALUE\nA"');
|
||||
});
|
||||
it('wraps a field value with double quotes and escape the double quotes if they are used in the field', function () {
|
||||
assert_1.strictEqual(stringifier.stringify('VALUE"A'), '"VALUE""A"');
|
||||
});
|
||||
it('escapes double quotes even if double quotes are only on the both edges of the field', function () {
|
||||
assert_1.strictEqual(stringifier.stringify('"VALUE"'), '"""VALUE"""');
|
||||
});
|
||||
it('converts a number into a string', function () {
|
||||
assert_1.strictEqual(stringifier.stringify(1), '1');
|
||||
});
|
||||
it('converts undefined into an empty string', function () {
|
||||
assert_1.strictEqual(stringifier.stringify(), '');
|
||||
});
|
||||
it('converts null into an empty string', function () {
|
||||
assert_1.strictEqual(stringifier.stringify(null), '');
|
||||
});
|
||||
it('converts an object into toString-ed value', function () {
|
||||
var obj = {
|
||||
name: 'OBJECT_NAME',
|
||||
toString: function () { return "Name: " + this.name; }
|
||||
};
|
||||
assert_1.strictEqual(stringifier.stringify(obj), 'Name: OBJECT_NAME');
|
||||
});
|
||||
it("wraps a toString-ed field value with double quote if the value contains \"" + delim + "\"", function () {
|
||||
var obj = {
|
||||
name: "OBJECT" + delim + "NAME",
|
||||
toString: function () { return "Name: " + this.name; }
|
||||
};
|
||||
assert_1.strictEqual(stringifier.stringify(obj), "\"Name: OBJECT" + delim + "NAME\"");
|
||||
});
|
||||
it('escapes double quotes in a toString-ed field value if the value has double quotes', function () {
|
||||
var obj = {
|
||||
name: 'OBJECT_NAME"',
|
||||
toString: function () { return "Name: " + this.name; }
|
||||
};
|
||||
assert_1.strictEqual(stringifier.stringify(obj), '"Name: OBJECT_NAME"""');
|
||||
});
|
||||
};
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=field-stringifier.test.js.map
|
||||
1
node_modules/csv-writer/dist/test/field-stringifier.test.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/test/field-stringifier.test.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"field-stringifier.test.js","sourceRoot":"","sources":["../../src/test/field-stringifier.test.ts"],"names":[],"mappings":";;AAAA,gDAAwD;AACxD,8DAAgE;AAChE,iCAAmC;AAEnC,QAAQ,CAAC,yBAAyB,EAAE;IAEhC,QAAQ,CAAC,+BAA+B,EAAE,iBAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;IAElE,QAAQ,CAAC,mCAAmC,EAAE,iBAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;IAEtE,QAAQ,CAAC,oCAAoC,EAAE;QAC3C,IAAM,WAAW,GAAG,0CAAsB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAEtD,EAAE,CAAC,gBAAgB,EAAE;YACjB,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,SAAS,CAAC,CAAC;QAC3D,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,2CAA2C,EAAE;YAC5C,oBAAW,CAAC,WAAW,CAAC,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7C,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,sCAAsC,EAAE;YACvC,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;QACjD,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,8CAA8C,EAAE;YAC/C,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;QAC/C,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,SAAS,iBAAiB,CAAC,cAAsB;QAC7C,IAAM,KAAK,GAAG,gCAAoB,CAAC,cAAc,CAAC,CAAC;QACnD,OAAO;YACH,IAAM,WAAW,GAAG,0CAAsB,CAAC,cAAc,CAAC,CAAC;YAE3D,EAAE,CAAC,yBAAyB,EAAE;gBAC1B,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC;YACzD,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,qCAAqC,EAAE;gBACtC,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,aAAa,CAAC,EAAE,aAAa,CAAC,CAAC;YACrE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,oEAAiE,KAAK,OAAG,EAAE;gBAC1E,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,UAAQ,KAAK,MAAG,CAAC,EAAE,YAAS,KAAK,QAAI,CAAC,CAAC;YAC7E,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,sEAAsE,EAAE;gBACvE,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,UAAU,CAAC,EAAE,YAAY,CAAC,CAAC;YACjE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,mGAAmG,EAAE;gBACpG,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE,YAAY,CAAC,CAAC;YAChE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,qFAAqF,EAAE;gBACtF,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE,aAAa,CAAC,CAAC;YACjE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,iCAAiC,EAAE;gBAClC,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;YAC/C,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,yCAAyC,EAAE;gBAC1C,oBAAW,CAAC,WAAW,CAAC,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;YAC7C,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,oCAAoC,EAAE;gBACrC,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;YACjD,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,2CAA2C,EAAE;gBAC5C,IAAM,GAAG,GAAG;oBACR,IAAI,EAAE,aAAa;oBACnB,QAAQ,EAAE,cAAc,OAAO,WAAS,IAAI,CAAC,IAAM,CAAC,CAAC,CAAC;iBACzD,CAAC;gBACF,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,mBAAmB,CAAC,CAAC;YACjE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,+EAA4E,KAAK,OAAG,EAAE;gBACrF,IAAM,GAAG,GAAG;oBACR,IAAI,EAAE,WAAS,KAAK,SAAM;oBAC1B,QAAQ,EAAE,cAAc,OAAO,WAAS,IAAI,CAAC,IAAM,CAAC,CAAC,CAAC;iBACzD,CAAC;gBACF,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,mBAAgB,KAAK,WAAO,CAAC,CAAC;YAC1E,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,mFAAmF,EAAE;gBACpF,IAAM,GAAG,GAAG;oBACR,IAAI,EAAE,cAAc;oBACpB,QAAQ,EAAE,cAAc,OAAO,WAAS,IAAI,CAAC,IAAM,CAAC,CAAC,CAAC;iBACzD,CAAC;gBACF,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,uBAAuB,CAAC,CAAC;YACrE,CAAC,CAAC,CAAC;QACP,CAAC,CAAC;IACN,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
13
node_modules/csv-writer/dist/test/helper.js
generated
vendored
Normal file
13
node_modules/csv-writer/dist/test/helper.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var assert_1 = require("assert");
|
||||
var fs_1 = require("fs");
|
||||
exports.testFilePath = function (id) { return "./test-tmp/" + id + ".csv"; };
|
||||
exports.assertFile = function (path, expectedContents, encoding) {
|
||||
var actualContents = fs_1.readFileSync(path, encoding || 'utf8');
|
||||
assert_1.strictEqual(actualContents, expectedContents);
|
||||
};
|
||||
exports.assertRejected = function (p, message) {
|
||||
return p.then(function () { return new Error('Should not have been called'); }, function (e) { assert_1.strictEqual(e.message, message); });
|
||||
};
|
||||
//# sourceMappingURL=helper.js.map
|
||||
1
node_modules/csv-writer/dist/test/helper.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/test/helper.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"helper.js","sourceRoot":"","sources":["../../src/test/helper.ts"],"names":[],"mappings":";;AAAA,iCAAmC;AACnC,yBAAgC;AAEnB,QAAA,YAAY,GAAG,UAAC,EAAU,IAAK,OAAA,gBAAc,EAAE,SAAM,EAAtB,CAAsB,CAAC;AAEtD,QAAA,UAAU,GAAG,UAAC,IAAY,EAAE,gBAAwB,EAAE,QAAiB;IAChF,IAAM,cAAc,GAAG,iBAAY,CAAC,IAAI,EAAE,QAAQ,IAAI,MAAM,CAAC,CAAC;IAC9D,oBAAW,CAAC,cAAc,EAAE,gBAAgB,CAAC,CAAC;AAClD,CAAC,CAAC;AAEW,QAAA,cAAc,GAAG,UAAC,CAAe,EAAE,OAAe;IAC3D,OAAO,CAAC,CAAC,IAAI,CACT,cAAM,OAAA,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAxC,CAAwC,EAC9C,UAAC,CAAQ,IAAO,oBAAW,CAAC,CAAC,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,CAAC,CACrD,CAAC;AACN,CAAC,CAAC"}
|
||||
10
node_modules/csv-writer/dist/test/helper/delimiter.js
generated
vendored
Normal file
10
node_modules/csv-writer/dist/test/helper/delimiter.js
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.resolveDelimiterChar = function (char) {
|
||||
if (char === ',' || char === ';')
|
||||
return char;
|
||||
if (typeof char === 'undefined')
|
||||
return ',';
|
||||
throw new Error('Invalid field delimiter');
|
||||
};
|
||||
//# sourceMappingURL=delimiter.js.map
|
||||
1
node_modules/csv-writer/dist/test/helper/delimiter.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/test/helper/delimiter.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"delimiter.js","sourceRoot":"","sources":["../../../src/test/helper/delimiter.ts"],"names":[],"mappings":";;AACa,QAAA,oBAAoB,GAAG,UAAC,IAAa;IAC9C,IAAI,IAAI,KAAK,GAAG,IAAI,IAAI,KAAK,GAAG;QAAE,OAAO,IAAI,CAAC;IAC9C,IAAI,OAAO,IAAI,KAAK,WAAW;QAAE,OAAO,GAAG,CAAC;IAC5C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;AAC/C,CAAC,CAAC"}
|
||||
76
node_modules/csv-writer/dist/test/lang/promise.test.js
generated
vendored
Normal file
76
node_modules/csv-writer/dist/test/lang/promise.test.js
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var promise_1 = require("../../lib/lang/promise");
|
||||
var assert_1 = require("assert");
|
||||
var helper_1 = require("../helper");
|
||||
describe('Promise', function () {
|
||||
var greetAsync = function (name, callback) {
|
||||
setTimeout(function () {
|
||||
if (name === 'foo')
|
||||
callback(null, "Hello, " + name + "!");
|
||||
else
|
||||
callback(new Error("We don't know " + name));
|
||||
}, 0);
|
||||
};
|
||||
var promisifiedFn = promise_1.promisify(greetAsync);
|
||||
it('promisify node style callback', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
var _a;
|
||||
return __generator(this, function (_b) {
|
||||
switch (_b.label) {
|
||||
case 0:
|
||||
_a = assert_1.strictEqual;
|
||||
return [4 /*yield*/, promisifiedFn('foo')];
|
||||
case 1:
|
||||
_a.apply(void 0, [_b.sent(), 'Hello, foo!']);
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
it('raise an error for error', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, helper_1.assertRejected(promisifiedFn('bar'), "We don't know bar")];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
//# sourceMappingURL=promise.test.js.map
|
||||
1
node_modules/csv-writer/dist/test/lang/promise.test.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/test/lang/promise.test.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"promise.test.js","sourceRoot":"","sources":["../../../src/test/lang/promise.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,kDAAiD;AACjD,iCAAmC;AACnC,oCAAyC;AAEzC,QAAQ,CAAC,SAAS,EAAE;IAChB,IAAM,UAAU,GAAG,UAAC,IAAY,EAAE,QAAsD;QACpF,UAAU,CAAC;YACP,IAAI,IAAI,KAAK,KAAK;gBAAE,QAAQ,CAAC,IAAI,EAAE,YAAU,IAAI,MAAG,CAAC,CAAC;;gBACjD,QAAQ,CAAC,IAAI,KAAK,CAAC,mBAAiB,IAAM,CAAC,CAAC,CAAC;QACtD,CAAC,EAAE,CAAC,CAAC,CAAC;IACV,CAAC,CAAC;IACF,IAAM,aAAa,GAAG,mBAAS,CAAC,UAAU,CAAC,CAAC;IAE5C,EAAE,CAAC,+BAA+B,EAAE;;;;;oBAChC,KAAA,oBAAW,CAAA;oBAAC,qBAAM,aAAa,CAAC,KAAK,CAAC,EAAA;;oBAAtC,kBAAY,SAA0B,EAAE,aAAa,EAAC,CAAC;;;;SAC1D,CAAC,CAAC;IAEH,EAAE,CAAC,0BAA0B,EAAE;;;wBAC3B,qBAAM,uBAAc,CAAC,aAAa,CAAC,KAAK,CAAC,EAAE,mBAAmB,CAAC,EAAA;;oBAA/D,SAA+D,CAAC;;;;SACnE,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||
209
node_modules/csv-writer/dist/test/write-array-records.test.js
generated
vendored
Normal file
209
node_modules/csv-writer/dist/test/write-array-records.test.js
generated
vendored
Normal file
@ -0,0 +1,209 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var helper_1 = require("./helper");
|
||||
var fs_1 = require("fs");
|
||||
var index_1 = require("../index");
|
||||
describe('Write array records into CSV', function () {
|
||||
var makeFilePath = function (id) { return helper_1.testFilePath("array-" + id); };
|
||||
var records = [
|
||||
['Bob', 'French'],
|
||||
['Mary', 'English']
|
||||
];
|
||||
describe('When only path is specified', function () {
|
||||
var filePath = makeFilePath('minimum');
|
||||
var writer;
|
||||
beforeEach(function () {
|
||||
writer = index_1.createArrayCsvWriter({ path: filePath });
|
||||
});
|
||||
it('writes records to a new file', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Bob,French\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
it('appends records when requested to write to the same file', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords([records[0]])];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [4 /*yield*/, writer.writeRecords([records[1]])];
|
||||
case 2:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Bob,French\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When field header is given', function () {
|
||||
var filePath = makeFilePath('header');
|
||||
var writer;
|
||||
beforeEach(function () {
|
||||
writer = index_1.createArrayCsvWriter({
|
||||
path: filePath,
|
||||
header: ['NAME', 'LANGUAGE']
|
||||
});
|
||||
});
|
||||
it('writes a header', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
it('appends records without headers', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords([records[0]])];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [4 /*yield*/, writer.writeRecords([records[1]])];
|
||||
case 2:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When `append` flag is specified', function () {
|
||||
var filePath = makeFilePath('append');
|
||||
fs_1.writeFileSync(filePath, 'Mike,German\n', 'utf8');
|
||||
var writer = index_1.createArrayCsvWriter({
|
||||
path: filePath,
|
||||
append: true
|
||||
});
|
||||
it('do not overwrite the existing contents and appends records to them', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords([records[1]])];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Mike,German\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When encoding is specified', function () {
|
||||
var filePath = makeFilePath('encoding');
|
||||
var writer = index_1.createArrayCsvWriter({
|
||||
path: filePath,
|
||||
encoding: 'utf16le'
|
||||
});
|
||||
it('writes to a file with the specified encoding', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Bob,French\nMary,English\n', 'utf16le');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When semicolon is specified as a field delimiter', function () {
|
||||
var filePath = makeFilePath('field-delimiter');
|
||||
var writer = index_1.createArrayCsvWriter({
|
||||
path: filePath,
|
||||
header: ['NAME', 'LANGUAGE'],
|
||||
fieldDelimiter: ';'
|
||||
});
|
||||
it('uses semicolon instead of comma to separate fields', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'NAME;LANGUAGE\nBob;French\nMary;English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When newline is specified', function () {
|
||||
var filePath = makeFilePath('newline');
|
||||
var writer = index_1.createArrayCsvWriter({
|
||||
path: filePath,
|
||||
recordDelimiter: '\r\n'
|
||||
});
|
||||
it('writes to a file with the specified newline character', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Bob,French\r\nMary,English\r\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When `alwaysQuote` flag is set', function () {
|
||||
var filePath = makeFilePath('always-quote');
|
||||
var writer = index_1.createArrayCsvWriter({
|
||||
path: filePath,
|
||||
header: ['NAME', 'LANGUAGE'],
|
||||
alwaysQuote: true
|
||||
});
|
||||
it('quotes all fields', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, '"NAME","LANGUAGE"\n"Bob","French"\n"Mary","English"\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=write-array-records.test.js.map
|
||||
1
node_modules/csv-writer/dist/test/write-array-records.test.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/test/write-array-records.test.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"write-array-records.test.js","sourceRoot":"","sources":["../../src/test/write-array-records.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,mCAAkD;AAElD,yBAAiC;AACjC,kCAA8C;AAE9C,QAAQ,CAAC,8BAA8B,EAAE;IAErC,IAAM,YAAY,GAAG,UAAC,EAAU,IAAK,OAAA,qBAAY,CAAC,WAAS,EAAI,CAAC,EAA3B,CAA2B,CAAC;IACjE,IAAM,OAAO,GAAG;QACZ,CAAC,KAAK,EAAE,QAAQ,CAAC;QACjB,CAAC,MAAM,EAAE,SAAS,CAAC;KACtB,CAAC;IAEF,QAAQ,CAAC,6BAA6B,EAAE;QACpC,IAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;QACzC,IAAI,MAA2B,CAAC;QAEhC,UAAU,CAAC;YACP,MAAM,GAAG,4BAAoB,CAAC,EAAC,IAAI,EAAE,QAAQ,EAAC,CAAC,CAAC;QACpD,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,8BAA8B,EAAE;;;4BAC/B,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;QAEH,EAAE,CAAC,0DAA0D,EAAE;;;4BAC3D,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,4BAA4B,EAAE;QACnC,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,IAAI,MAA2B,CAAC;QAEhC,UAAU,CAAC;YACP,MAAM,GAAG,4BAAoB,CAAC;gBAC1B,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;aAC/B,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,iBAAiB,EAAE;;;4BAClB,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;QAEH,EAAE,CAAC,iCAAiC,EAAE;;;4BAClC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,iCAAiC,EAAE;QACxC,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,kBAAa,CAAC,QAAQ,EAAE,eAAe,EAAE,MAAM,CAAC,CAAC;QACjD,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,IAAI;SACf,CAAC,CAAC;QAEH,EAAE,CAAC,oEAAoE,EAAE;;;4BACrE,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,6BAA6B,CAAC,CAAC;;;;aACvD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,4BAA4B,EAAE;QACnC,IAAM,QAAQ,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC;QAC1C,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,QAAQ,EAAE,SAAS;SACtB,CAAC,CAAC;QAEH,EAAE,CAAC,8CAA8C,EAAE;;;4BAC/C,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,EAAE,SAAS,CAAC,CAAC;;;;aACjE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,kDAAkD,EAAE;QACzD,IAAM,QAAQ,GAAG,YAAY,CAAC,iBAAiB,CAAC,CAAC;QACjD,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;YAC5B,cAAc,EAAE,GAAG;SACtB,CAAC,CAAC;QAEH,EAAE,CAAC,oDAAoD,EAAE;;;4BACrD,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,2BAA2B,EAAE;QAClC,IAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;QACzC,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,eAAe,EAAE,MAAM;SAC1B,CAAC,CAAC;QAEH,EAAE,CAAC,uDAAuD,EAAE;;;4BACxD,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,gCAAgC,CAAC,CAAC;;;;aAC1D,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,gCAAgC,EAAE;QACvC,IAAM,QAAQ,GAAG,YAAY,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;YAC5B,WAAW,EAAE,IAAI;SACpB,CAAC,CAAC;QAEH,EAAE,CAAC,mBAAmB,EAAE;;;4BACpB,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,uDAAuD,CAAC,CAAC;;;;aACjF,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||
252
node_modules/csv-writer/dist/test/write-object-records.test.js
generated
vendored
Normal file
252
node_modules/csv-writer/dist/test/write-object-records.test.js
generated
vendored
Normal file
@ -0,0 +1,252 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var helper_1 = require("./helper");
|
||||
var fs_1 = require("fs");
|
||||
var index_1 = require("../index");
|
||||
describe('Write object records into CSV', function () {
|
||||
var makeFilePath = function (id) { return helper_1.testFilePath("object-" + id); };
|
||||
var records = [
|
||||
{ name: 'Bob', lang: 'French', address: { country: 'France' } },
|
||||
{ name: 'Mary', lang: 'English' }
|
||||
];
|
||||
describe('When only path and header ids are given', function () {
|
||||
var filePath = makeFilePath('minimum');
|
||||
var writer;
|
||||
beforeEach(function () {
|
||||
writer = index_1.createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['name', 'lang']
|
||||
});
|
||||
});
|
||||
it('writes records to a new file', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Bob,French\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
it('appends records when requested to write to the same file', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords([records[0]])];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [4 /*yield*/, writer.writeRecords([records[1]])];
|
||||
case 2:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Bob,French\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When header ids are given with reverse order', function () {
|
||||
var filePath = makeFilePath('column-order');
|
||||
var writer = index_1.createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['lang', 'name']
|
||||
});
|
||||
it('also writes columns with reverse order', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'French,Bob\nEnglish,Mary\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When field header is given with titles', function () {
|
||||
var filePath = makeFilePath('header');
|
||||
var writer;
|
||||
beforeEach(function () {
|
||||
writer = index_1.createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: [{ id: 'name', title: 'NAME' }, { id: 'lang', title: 'LANGUAGE' }]
|
||||
});
|
||||
});
|
||||
it('writes a header', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
it('appends records without headers', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords([records[0]])];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [4 /*yield*/, writer.writeRecords([records[1]])];
|
||||
case 2:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When `append` flag is specified', function () {
|
||||
var filePath = makeFilePath('append');
|
||||
fs_1.writeFileSync(filePath, 'Mike,German\n', 'utf8');
|
||||
var writer = index_1.createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['name', 'lang'],
|
||||
append: true
|
||||
});
|
||||
it('do not overwrite the existing contents and appends records to them', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords([records[1]])];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Mike,German\nMary,English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When encoding is specified', function () {
|
||||
var filePath = makeFilePath('encoding');
|
||||
var writer = index_1.createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['name', 'lang'],
|
||||
encoding: 'utf16le'
|
||||
});
|
||||
it('writes to a file with the specified encoding', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Bob,French\nMary,English\n', 'utf16le');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When semicolon is specified as a field delimiter', function () {
|
||||
var filePath = makeFilePath('field-delimiter');
|
||||
var writer = index_1.createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: [{ id: 'name', title: 'NAME' }, { id: 'lang', title: 'LANGUAGE' }],
|
||||
fieldDelimiter: ';'
|
||||
});
|
||||
it('uses semicolon instead of comma to separate fields', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'NAME;LANGUAGE\nBob;French\nMary;English\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When newline is specified', function () {
|
||||
var filePath = makeFilePath('newline');
|
||||
var writer = index_1.createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['name', 'lang'],
|
||||
recordDelimiter: '\r\n'
|
||||
});
|
||||
it('writes to a file with the specified newline character', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'Bob,French\r\nMary,English\r\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When `alwaysQuote` flag is set', function () {
|
||||
var filePath = makeFilePath('always-quote');
|
||||
var writer = index_1.createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: [{ id: 'name', title: 'NAME' }, { id: 'lang', title: 'LANGUAGE' }],
|
||||
alwaysQuote: true
|
||||
});
|
||||
it('quotes all fields', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, '"NAME","LANGUAGE"\n"Bob","French"\n"Mary","English"\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
describe('When `headerIdDelimiter` flag is set', function () {
|
||||
var filePath = makeFilePath('nested');
|
||||
var writer = index_1.createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: [{ id: 'name', title: 'NAME' }, { id: 'address.country', title: 'COUNTRY' }],
|
||||
headerIdDelimiter: '.'
|
||||
});
|
||||
it('breaks keys into key paths', function () { return __awaiter(void 0, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, writer.writeRecords(records)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
helper_1.assertFile(filePath, 'NAME,COUNTRY\nBob,France\nMary,\n');
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=write-object-records.test.js.map
|
||||
1
node_modules/csv-writer/dist/test/write-object-records.test.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/test/write-object-records.test.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"write-object-records.test.js","sourceRoot":"","sources":["../../src/test/write-object-records.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,mCAAkD;AAElD,yBAAiC;AACjC,kCAA+C;AAG/C,QAAQ,CAAC,+BAA+B,EAAE;IAEtC,IAAM,YAAY,GAAG,UAAC,EAAU,IAAK,OAAA,qBAAY,CAAC,YAAU,EAAI,CAAC,EAA5B,CAA4B,CAAC;IAClE,IAAM,OAAO,GAAG;QACZ,EAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAC,OAAO,EAAE,QAAQ,EAAC,EAAC;QAC3D,EAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,SAAS,EAAC;KAClC,CAAC;IAEF,QAAQ,CAAC,yCAAyC,EAAE;QAChD,IAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;QACzC,IAAI,MAAiC,CAAC;QAEtC,UAAU,CAAC;YACP,MAAM,GAAG,6BAAqB,CAAC;gBAC3B,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;aAC3B,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,8BAA8B,EAAE;;;4BAC/B,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;QAEH,EAAE,CAAC,0DAA0D,EAAE;;;4BAC3D,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,8CAA8C,EAAE;QACrD,IAAM,QAAQ,GAAG,YAAY,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;SAC3B,CAAC,CAAC;QAEH,EAAE,CAAC,wCAAwC,EAAE;;;4BACzC,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,wCAAwC,EAAE;QAC/C,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,IAAI,MAAiC,CAAC;QAEtC,UAAU,CAAC;YACP,MAAM,GAAG,6BAAqB,CAAC;gBAC3B,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,CAAC,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAC,EAAE,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,UAAU,EAAC,CAAC;aACzE,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,iBAAiB,EAAE;;;4BAClB,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;QAEH,EAAE,CAAC,iCAAiC,EAAE;;;4BAClC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,iCAAiC,EAAE;QACxC,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,kBAAa,CAAC,QAAQ,EAAE,eAAe,EAAE,MAAM,CAAC,CAAC;QACjD,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;YACxB,MAAM,EAAE,IAAI;SACf,CAAC,CAAC;QAEH,EAAE,CAAC,oEAAoE,EAAE;;;4BACrE,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,6BAA6B,CAAC,CAAC;;;;aACvD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,4BAA4B,EAAE;QACnC,IAAM,QAAQ,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC;QAC1C,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;YACxB,QAAQ,EAAE,SAAS;SACtB,CAAC,CAAC;QAEH,EAAE,CAAC,8CAA8C,EAAE;;;4BAC/C,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,EAAE,SAAS,CAAC,CAAC;;;;aACjE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,kDAAkD,EAAE;QACzD,IAAM,QAAQ,GAAG,YAAY,CAAC,iBAAiB,CAAC,CAAC;QACjD,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAC,EAAE,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,UAAU,EAAC,CAAC;YACtE,cAAc,EAAE,GAAG;SACtB,CAAC,CAAC;QAEH,EAAE,CAAC,oDAAoD,EAAE;;;4BACrD,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,2BAA2B,EAAE;QAClC,IAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;QACzC,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;YACxB,eAAe,EAAE,MAAM;SAC1B,CAAC,CAAC;QAEH,EAAE,CAAC,uDAAuD,EAAE;;;4BACxD,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,gCAAgC,CAAC,CAAC;;;;aAC1D,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,gCAAgC,EAAE;QACvC,IAAM,QAAQ,GAAG,YAAY,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAC,EAAE,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,UAAU,EAAC,CAAC;YACtE,WAAW,EAAE,IAAI;SACpB,CAAC,CAAC;QAEH,EAAE,CAAC,mBAAmB,EAAE;;;4BACpB,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,uDAAuD,CAAC,CAAC;;;;aACjF,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,sCAAsC,EAAE;QAC7C,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAC,EAAE,EAAC,EAAE,EAAE,iBAAiB,EAAE,KAAK,EAAE,SAAS,EAAC,CAAC;YAChF,iBAAiB,EAAE,GAAG;SACzB,CAAC,CAAC;QAEH,EAAE,CAAC,4BAA4B,EAAE;;;4BAC7B,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,mCAAmC,CAAC,CAAC;;;;aAC7D,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||
43
node_modules/csv-writer/package.json
generated
vendored
Normal file
43
node_modules/csv-writer/package.json
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "csv-writer",
|
||||
"version": "1.6.0",
|
||||
"description": "Convert objects/arrays into a CSV string or write them into a CSV file",
|
||||
"main": "dist/index.js",
|
||||
"types": "src/index.ts",
|
||||
"scripts": {
|
||||
"compile": "tsc -p ./",
|
||||
"test": "npm run test:unit && npm run test:it",
|
||||
"pretest:unit": "rm -rf test-tmp && mkdir test-tmp",
|
||||
"test:unit": "mocha --require ts-node/register --recursive 'src/test/**/*.ts'",
|
||||
"test:it": "test-integration/test.sh",
|
||||
"coverage": "nyc npm run test:unit",
|
||||
"lint": "tslint -p .",
|
||||
"prepare": "npm run compile"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ryu1kn/csv-writer.git"
|
||||
},
|
||||
"keywords": [
|
||||
"csv",
|
||||
"writer",
|
||||
"stringify"
|
||||
],
|
||||
"author": "Ryuichi Inagaki",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/ryu1kn/csv-writer/issues"
|
||||
},
|
||||
"homepage": "https://github.com/ryu1kn/csv-writer#readme",
|
||||
"devDependencies": {
|
||||
"@types/mocha": "^5.2.7",
|
||||
"@types/node": "^12.12.25",
|
||||
"codeclimate-test-reporter": "^0.5.1",
|
||||
"coveralls": "^3.0.9",
|
||||
"mocha": "^7.0.0",
|
||||
"nyc": "^15.0.0",
|
||||
"ts-node": "^8.6.2",
|
||||
"tslint": "^5.20.1",
|
||||
"typescript": "^3.7.5"
|
||||
}
|
||||
}
|
||||
21
node_modules/csv-writer/src/index.ts
generated
vendored
Normal file
21
node_modules/csv-writer/src/index.ts
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import {
|
||||
ArrayCsvStringifierParams,
|
||||
CsvStringifierFactory,
|
||||
ObjectCsvStringifierParams
|
||||
} from './lib/csv-stringifier-factory';
|
||||
import {ArrayCsvWriterParams, CsvWriterFactory, ObjectCsvWriterParams} from './lib/csv-writer-factory';
|
||||
|
||||
const csvStringifierFactory = new CsvStringifierFactory();
|
||||
const csvWriterFactory = new CsvWriterFactory(csvStringifierFactory);
|
||||
|
||||
export const createArrayCsvStringifier = (params: ArrayCsvStringifierParams) =>
|
||||
csvStringifierFactory.createArrayCsvStringifier(params);
|
||||
|
||||
export const createObjectCsvStringifier = (params: ObjectCsvStringifierParams) =>
|
||||
csvStringifierFactory.createObjectCsvStringifier(params);
|
||||
|
||||
export const createArrayCsvWriter = (params: ArrayCsvWriterParams) =>
|
||||
csvWriterFactory.createArrayCsvWriter(params);
|
||||
|
||||
export const createObjectCsvWriter = (params: ObjectCsvWriterParams) =>
|
||||
csvWriterFactory.createObjectCsvWriter(params);
|
||||
33
node_modules/csv-writer/src/lib/csv-stringifier-factory.ts
generated
vendored
Normal file
33
node_modules/csv-writer/src/lib/csv-stringifier-factory.ts
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
import {ArrayCsvStringifier} from './csv-stringifiers/array';
|
||||
import {createFieldStringifier} from './field-stringifier';
|
||||
import {ObjectCsvStringifier} from './csv-stringifiers/object';
|
||||
import {ObjectStringifierHeader} from './record';
|
||||
|
||||
export interface ArrayCsvStringifierParams {
|
||||
header?: string[];
|
||||
fieldDelimiter?: string;
|
||||
recordDelimiter?: string;
|
||||
alwaysQuote?: boolean;
|
||||
}
|
||||
|
||||
export interface ObjectCsvStringifierParams {
|
||||
header: ObjectStringifierHeader;
|
||||
fieldDelimiter?: string;
|
||||
recordDelimiter?: string;
|
||||
headerIdDelimiter?: string;
|
||||
alwaysQuote?: boolean;
|
||||
}
|
||||
|
||||
export class CsvStringifierFactory {
|
||||
|
||||
createArrayCsvStringifier(params: ArrayCsvStringifierParams) {
|
||||
const fieldStringifier = createFieldStringifier(params.fieldDelimiter, params.alwaysQuote);
|
||||
return new ArrayCsvStringifier(fieldStringifier, params.recordDelimiter, params.header);
|
||||
}
|
||||
|
||||
createObjectCsvStringifier(params: ObjectCsvStringifierParams) {
|
||||
const fieldStringifier = createFieldStringifier(params.fieldDelimiter, params.alwaysQuote);
|
||||
return new ObjectCsvStringifier(fieldStringifier, params.header, params.recordDelimiter, params.headerIdDelimiter);
|
||||
}
|
||||
|
||||
}
|
||||
43
node_modules/csv-writer/src/lib/csv-stringifiers/abstract.ts
generated
vendored
Normal file
43
node_modules/csv-writer/src/lib/csv-stringifiers/abstract.ts
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
import {FieldStringifier} from '../field-stringifier';
|
||||
import {Field} from '../record';
|
||||
|
||||
const DEFAULT_RECORD_DELIMITER = '\n';
|
||||
const VALID_RECORD_DELIMITERS = [DEFAULT_RECORD_DELIMITER, '\r\n'];
|
||||
|
||||
export abstract class CsvStringifier<T> {
|
||||
|
||||
constructor(private readonly fieldStringifier: FieldStringifier,
|
||||
private readonly recordDelimiter = DEFAULT_RECORD_DELIMITER) {
|
||||
_validateRecordDelimiter(recordDelimiter);
|
||||
}
|
||||
|
||||
getHeaderString(): string | null {
|
||||
const headerRecord = this.getHeaderRecord();
|
||||
return headerRecord ? this.joinRecords([this.getCsvLine(headerRecord)]) : null;
|
||||
}
|
||||
|
||||
stringifyRecords(records: IterableIterator<T> | T[]): string {
|
||||
const csvLines = Array.from(records, record => this.getCsvLine(this.getRecordAsArray(record)));
|
||||
return this.joinRecords(csvLines);
|
||||
}
|
||||
|
||||
protected abstract getRecordAsArray(_record: T): Field[];
|
||||
|
||||
protected abstract getHeaderRecord(): string[] | null | undefined;
|
||||
|
||||
private getCsvLine(record: Field[]): string {
|
||||
return record
|
||||
.map(fieldValue => this.fieldStringifier.stringify(fieldValue))
|
||||
.join(this.fieldStringifier.fieldDelimiter);
|
||||
}
|
||||
|
||||
private joinRecords(records: string[]) {
|
||||
return records.join(this.recordDelimiter) + this.recordDelimiter;
|
||||
}
|
||||
}
|
||||
|
||||
function _validateRecordDelimiter(delimiter: string): void {
|
||||
if (VALID_RECORD_DELIMITERS.indexOf(delimiter) === -1) {
|
||||
throw new Error(`Invalid record delimiter \`${delimiter}\` is specified`);
|
||||
}
|
||||
}
|
||||
20
node_modules/csv-writer/src/lib/csv-stringifiers/array.ts
generated
vendored
Normal file
20
node_modules/csv-writer/src/lib/csv-stringifiers/array.ts
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
import {CsvStringifier} from './abstract';
|
||||
import {FieldStringifier} from '../field-stringifier';
|
||||
import {Field} from '../record';
|
||||
|
||||
export class ArrayCsvStringifier extends CsvStringifier<Field[]> {
|
||||
|
||||
constructor(fieldStringifier: FieldStringifier,
|
||||
recordDelimiter?: string,
|
||||
private readonly header?: string[]) {
|
||||
super(fieldStringifier, recordDelimiter);
|
||||
}
|
||||
|
||||
protected getHeaderRecord() {
|
||||
return this.header;
|
||||
}
|
||||
|
||||
protected getRecordAsArray(record: Field[]): Field[] {
|
||||
return record;
|
||||
}
|
||||
}
|
||||
36
node_modules/csv-writer/src/lib/csv-stringifiers/object.ts
generated
vendored
Normal file
36
node_modules/csv-writer/src/lib/csv-stringifiers/object.ts
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
import {CsvStringifier} from './abstract';
|
||||
import {FieldStringifier} from '../field-stringifier';
|
||||
import {Field, ObjectHeaderItem, ObjectStringifierHeader} from '../record';
|
||||
import {isObject, ObjectMap} from '../lang/object';
|
||||
|
||||
export class ObjectCsvStringifier extends CsvStringifier<ObjectMap<Field>> {
|
||||
|
||||
constructor(fieldStringifier: FieldStringifier,
|
||||
private readonly header: ObjectStringifierHeader,
|
||||
recordDelimiter?: string,
|
||||
private readonly headerIdDelimiter?: string) {
|
||||
super(fieldStringifier, recordDelimiter);
|
||||
}
|
||||
|
||||
protected getHeaderRecord(): string[] | null {
|
||||
if (!this.isObjectHeader) return null;
|
||||
return (this.header as ObjectHeaderItem[]).map(field => field.title);
|
||||
}
|
||||
|
||||
protected getRecordAsArray(record: ObjectMap<Field>): Field[] {
|
||||
return this.fieldIds.map(fieldId => this.getNestedValue(record, fieldId));
|
||||
}
|
||||
|
||||
private getNestedValue(obj: ObjectMap<Field>, key: string) {
|
||||
if (!this.headerIdDelimiter) return obj[key];
|
||||
return key.split(this.headerIdDelimiter).reduce((subObj, keyPart) => (subObj || {})[keyPart], obj);
|
||||
}
|
||||
|
||||
private get fieldIds(): string[] {
|
||||
return this.isObjectHeader ? (this.header as ObjectHeaderItem[]).map(column => column.id) : (this.header as string[]);
|
||||
}
|
||||
|
||||
private get isObjectHeader(): boolean {
|
||||
return isObject(this.header && this.header[0]);
|
||||
}
|
||||
}
|
||||
49
node_modules/csv-writer/src/lib/csv-writer-factory.ts
generated
vendored
Normal file
49
node_modules/csv-writer/src/lib/csv-writer-factory.ts
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
import {CsvWriter} from './csv-writer';
|
||||
import {CsvStringifierFactory} from './csv-stringifier-factory';
|
||||
import {ObjectStringifierHeader} from './record';
|
||||
|
||||
export interface ArrayCsvWriterParams {
|
||||
path: string;
|
||||
header?: string[];
|
||||
fieldDelimiter?: string;
|
||||
recordDelimiter?: string;
|
||||
alwaysQuote?: boolean;
|
||||
encoding?: string;
|
||||
append?: boolean;
|
||||
}
|
||||
|
||||
export interface ObjectCsvWriterParams {
|
||||
path: string;
|
||||
header: ObjectStringifierHeader;
|
||||
fieldDelimiter?: string;
|
||||
recordDelimiter?: string;
|
||||
headerIdDelimiter?: string;
|
||||
alwaysQuote?: boolean;
|
||||
encoding?: string;
|
||||
append?: boolean;
|
||||
}
|
||||
|
||||
export class CsvWriterFactory {
|
||||
constructor(private readonly csvStringifierFactory: CsvStringifierFactory) {}
|
||||
|
||||
createArrayCsvWriter(params: ArrayCsvWriterParams) {
|
||||
const csvStringifier = this.csvStringifierFactory.createArrayCsvStringifier({
|
||||
header: params.header,
|
||||
fieldDelimiter: params.fieldDelimiter,
|
||||
recordDelimiter: params.recordDelimiter,
|
||||
alwaysQuote: params.alwaysQuote
|
||||
});
|
||||
return new CsvWriter(csvStringifier, params.path, params.encoding, params.append);
|
||||
}
|
||||
|
||||
createObjectCsvWriter(params: ObjectCsvWriterParams) {
|
||||
const csvStringifier = this.csvStringifierFactory.createObjectCsvStringifier({
|
||||
header: params.header,
|
||||
fieldDelimiter: params.fieldDelimiter,
|
||||
recordDelimiter: params.recordDelimiter,
|
||||
headerIdDelimiter: params.headerIdDelimiter,
|
||||
alwaysQuote: params.alwaysQuote
|
||||
});
|
||||
return new CsvWriter(csvStringifier, params.path, params.encoding, params.append);
|
||||
}
|
||||
}
|
||||
27
node_modules/csv-writer/src/lib/csv-writer.ts
generated
vendored
Normal file
27
node_modules/csv-writer/src/lib/csv-writer.ts
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
import {CsvStringifier} from './csv-stringifiers/abstract';
|
||||
import {FileWriter} from './file-writer';
|
||||
|
||||
const DEFAULT_INITIAL_APPEND_FLAG = false;
|
||||
|
||||
export class CsvWriter<T> {
|
||||
private readonly fileWriter: FileWriter;
|
||||
|
||||
constructor(private readonly csvStringifier: CsvStringifier<T>,
|
||||
path: string,
|
||||
encoding?: string,
|
||||
private append = DEFAULT_INITIAL_APPEND_FLAG) {
|
||||
this.fileWriter = new FileWriter(path, this.append, encoding);
|
||||
}
|
||||
|
||||
async writeRecords(records: T[]): Promise<void> {
|
||||
const recordsString = this.csvStringifier.stringifyRecords(records);
|
||||
const writeString = this.headerString + recordsString;
|
||||
await this.fileWriter.write(writeString);
|
||||
this.append = true;
|
||||
}
|
||||
|
||||
private get headerString(): string {
|
||||
const headerString = !this.append && this.csvStringifier.getHeaderString();
|
||||
return headerString || '';
|
||||
}
|
||||
}
|
||||
47
node_modules/csv-writer/src/lib/field-stringifier.ts
generated
vendored
Normal file
47
node_modules/csv-writer/src/lib/field-stringifier.ts
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
import {Field} from './record';
|
||||
|
||||
const DEFAULT_FIELD_DELIMITER = ',';
|
||||
const VALID_FIELD_DELIMITERS = [DEFAULT_FIELD_DELIMITER, ';'];
|
||||
|
||||
export abstract class FieldStringifier {
|
||||
constructor(public readonly fieldDelimiter: string) {}
|
||||
|
||||
abstract stringify(value?: Field): string;
|
||||
|
||||
protected isEmpty(value?: Field): boolean {
|
||||
return typeof value === 'undefined' || value === null || value === '';
|
||||
}
|
||||
|
||||
protected quoteField(field: string): string {
|
||||
return `"${field.replace(/"/g, '""')}"`;
|
||||
}
|
||||
}
|
||||
|
||||
class DefaultFieldStringifier extends FieldStringifier {
|
||||
stringify(value?: Field): string {
|
||||
if (this.isEmpty(value)) return '';
|
||||
const str = String(value);
|
||||
return this.needsQuote(str) ? this.quoteField(str) : str;
|
||||
}
|
||||
|
||||
private needsQuote(str: string): boolean {
|
||||
return str.includes(this.fieldDelimiter) || str.includes('\n') || str.includes('"');
|
||||
}
|
||||
}
|
||||
|
||||
class ForceQuoteFieldStringifier extends FieldStringifier {
|
||||
stringify(value?: Field): string {
|
||||
return this.isEmpty(value) ? '' : this.quoteField(String(value));
|
||||
}
|
||||
}
|
||||
|
||||
export function createFieldStringifier(fieldDelimiter: string = DEFAULT_FIELD_DELIMITER, alwaysQuote = false) {
|
||||
_validateFieldDelimiter(fieldDelimiter);
|
||||
return alwaysQuote ? new ForceQuoteFieldStringifier(fieldDelimiter) : new DefaultFieldStringifier(fieldDelimiter);
|
||||
}
|
||||
|
||||
function _validateFieldDelimiter(delimiter: string): void {
|
||||
if (VALID_FIELD_DELIMITERS.indexOf(delimiter) === -1) {
|
||||
throw new Error(`Invalid field delimiter \`${delimiter}\` is specified`);
|
||||
}
|
||||
}
|
||||
26
node_modules/csv-writer/src/lib/file-writer.ts
generated
vendored
Normal file
26
node_modules/csv-writer/src/lib/file-writer.ts
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
import {promisify} from './lang/promise';
|
||||
import {writeFile} from 'fs';
|
||||
|
||||
const writeFilePromise = promisify(writeFile);
|
||||
|
||||
const DEFAULT_ENCODING = 'utf8';
|
||||
|
||||
export class FileWriter {
|
||||
|
||||
constructor(private readonly path: string,
|
||||
private append: boolean,
|
||||
private readonly encoding = DEFAULT_ENCODING) {
|
||||
}
|
||||
|
||||
async write(string: string): Promise<void> {
|
||||
await writeFilePromise(this.path, string, this.getWriteOption());
|
||||
this.append = true;
|
||||
}
|
||||
|
||||
private getWriteOption() {
|
||||
return {
|
||||
encoding: this.encoding,
|
||||
flag: this.append ? 'a' : 'w'
|
||||
};
|
||||
}
|
||||
}
|
||||
6
node_modules/csv-writer/src/lib/lang/object.ts
generated
vendored
Normal file
6
node_modules/csv-writer/src/lib/lang/object.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
export const isObject = (value: any) =>
|
||||
Object.prototype.toString.call(value) === '[object Object]';
|
||||
|
||||
export interface ObjectMap<T> {
|
||||
[k: string]: T;
|
||||
}
|
||||
14
node_modules/csv-writer/src/lib/lang/promise.ts
generated
vendored
Normal file
14
node_modules/csv-writer/src/lib/lang/promise.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
type NullableError = Error | null;
|
||||
|
||||
export function promisify(fn: (...args: any[]) => void): (...args: any[]) => any {
|
||||
return (...args: any[]) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nodeCallback = (err: NullableError, result: any) => {
|
||||
if (err) reject(err);
|
||||
else resolve(result);
|
||||
};
|
||||
fn.apply(null, [...args, nodeCallback]);
|
||||
});
|
||||
};
|
||||
}
|
||||
5
node_modules/csv-writer/src/lib/record.ts
generated
vendored
Normal file
5
node_modules/csv-writer/src/lib/record.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
|
||||
export type Field = any;
|
||||
|
||||
export type ObjectHeaderItem = { id: string; title: string };
|
||||
export type ObjectStringifierHeader = ObjectHeaderItem[] | string[];
|
||||
100
node_modules/csv-writer/src/test/csv-stringifiers/array.test.ts
generated
vendored
Normal file
100
node_modules/csv-writer/src/test/csv-stringifiers/array.test.ts
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
import {resolveDelimiterChar} from '../helper/delimiter';
|
||||
import {createArrayCsvStringifier} from '../../index';
|
||||
import {strictEqual, throws} from 'assert';
|
||||
|
||||
describe('ArrayCsvStringifier', () => {
|
||||
const records = [
|
||||
['FIELD_A1', 'FIELD_B1'],
|
||||
['FIELD_A2', 'FIELD_B2']
|
||||
];
|
||||
|
||||
describe('When field delimiter is comma', generateTestCases());
|
||||
|
||||
describe('When field delimiter is semicolon', generateTestCases(';'));
|
||||
|
||||
describe('When field delimiter is neither comma nor semicolon', () => {
|
||||
it('throws an exception', () => {
|
||||
throws(() => {
|
||||
createArrayCsvStringifier({fieldDelimiter: '/'});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When record delimiter is neither LF nor CR+LF', () => {
|
||||
it('throws an exception', () => {
|
||||
throws(() => {
|
||||
createArrayCsvStringifier({recordDelimiter: '\r'});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When records input is an iterable other than an array', () => {
|
||||
const stringifier = createArrayCsvStringifier({
|
||||
header: ['TITLE_A', 'TITLE_B']
|
||||
});
|
||||
function * recordGenerator() {
|
||||
yield records[0];
|
||||
yield records[1];
|
||||
}
|
||||
|
||||
it('converts the records into CSV', () => {
|
||||
strictEqual(
|
||||
stringifier.stringifyRecords(recordGenerator()),
|
||||
'FIELD_A1,FIELD_B1\nFIELD_A2,FIELD_B2\n'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When `alwaysQuote` flag is set', () => {
|
||||
const stringifier = createArrayCsvStringifier({
|
||||
header: ['TITLE_A', 'TITLE_B'],
|
||||
alwaysQuote: true
|
||||
});
|
||||
|
||||
it('quotes all header fields', () => {
|
||||
strictEqual(stringifier.getHeaderString(), '"TITLE_A","TITLE_B"\n');
|
||||
});
|
||||
|
||||
it('quotes all data fields', () => {
|
||||
strictEqual(stringifier.stringifyRecords(records), '"FIELD_A1","FIELD_B1"\n"FIELD_A2","FIELD_B2"\n');
|
||||
});
|
||||
});
|
||||
|
||||
function generateTestCases(fieldDelimiter?: string) {
|
||||
const delim = resolveDelimiterChar(fieldDelimiter);
|
||||
return () => {
|
||||
describe('header is specified as a list of column titles', () => {
|
||||
const stringifier = createArrayCsvStringifier({
|
||||
header: ['TITLE_A', 'TITLE_B'],
|
||||
fieldDelimiter
|
||||
});
|
||||
|
||||
it(`returns a header line with field separated by "${delim}"`, () => {
|
||||
strictEqual(stringifier.getHeaderString(), `TITLE_A${delim}TITLE_B\n`);
|
||||
});
|
||||
|
||||
it(`converts given data records into CSV lines with field separated by "${delim}"`, () => {
|
||||
strictEqual(
|
||||
stringifier.stringifyRecords(records),
|
||||
`FIELD_A1${delim}FIELD_B1\nFIELD_A2${delim}FIELD_B2\n`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('header is not specified', () => {
|
||||
const stringifier = createArrayCsvStringifier({fieldDelimiter});
|
||||
|
||||
it('returns null for header line', () => {
|
||||
strictEqual(stringifier.getHeaderString(), null);
|
||||
});
|
||||
|
||||
it(`converts given data records into CSV lines with field separated by "${delim}"`, () => {
|
||||
strictEqual(
|
||||
stringifier.stringifyRecords(records),
|
||||
`FIELD_A1${delim}FIELD_B1\nFIELD_A2${delim}FIELD_B2\n`
|
||||
);
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
});
|
||||
150
node_modules/csv-writer/src/test/csv-stringifiers/object.test.ts
generated
vendored
Normal file
150
node_modules/csv-writer/src/test/csv-stringifiers/object.test.ts
generated
vendored
Normal file
@ -0,0 +1,150 @@
|
||||
import {resolveDelimiterChar} from '../helper/delimiter';
|
||||
import {createObjectCsvStringifier} from '../../index';
|
||||
import {strictEqual, throws} from 'assert';
|
||||
|
||||
describe('ObjectCsvStringifier', () => {
|
||||
const records = [
|
||||
{FIELD_A: 'VALUE_A1', FIELD_B: 'VALUE_B1'},
|
||||
{FIELD_A: 'VALUE_A2', FIELD_B: 'VALUE_B2', OTHERS: {FIELD_C: 'VALUE_C2'}}
|
||||
];
|
||||
|
||||
describe('When field delimiter is comma', generateTestCases());
|
||||
|
||||
describe('When field delimiter is semicolon', generateTestCases(';'));
|
||||
|
||||
describe('When field delimiter is neither comma nor semicolon', () => {
|
||||
it('throws an exception', () => {
|
||||
throws(() => {
|
||||
createObjectCsvStringifier({
|
||||
header: ['FIELD_A', 'FIELD_B'],
|
||||
fieldDelimiter: '/'
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When record delimiter is neither LF nor CR+LF', () => {
|
||||
it('throws an exception', () => {
|
||||
throws(() => {
|
||||
createObjectCsvStringifier({
|
||||
header: ['FIELD_A', 'FIELD_B'],
|
||||
recordDelimiter: '\r'
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When records input is an iterable other than an array', () => {
|
||||
const stringifier = createObjectCsvStringifier({
|
||||
header: ['FIELD_A', 'FIELD_B']
|
||||
});
|
||||
function * recordGenerator() {
|
||||
yield records[0];
|
||||
yield records[1];
|
||||
}
|
||||
|
||||
it('converts the records into CSV', () => {
|
||||
strictEqual(
|
||||
stringifier.stringifyRecords(recordGenerator()),
|
||||
'VALUE_A1,VALUE_B1\nVALUE_A2,VALUE_B2\n'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When `alwaysQuote` flag is set', () => {
|
||||
const stringifier = createObjectCsvStringifier({
|
||||
header: [
|
||||
{id: 'FIELD_A', title: 'TITLE_A'},
|
||||
{id: 'FIELD_B', title: 'TITLE_B'}
|
||||
],
|
||||
alwaysQuote: true
|
||||
});
|
||||
|
||||
it('quotes all header fields', () => {
|
||||
strictEqual(stringifier.getHeaderString(), '"TITLE_A","TITLE_B"\n');
|
||||
});
|
||||
|
||||
it('quotes all data fields', () => {
|
||||
strictEqual(stringifier.stringifyRecords(records), '"VALUE_A1","VALUE_B1"\n"VALUE_A2","VALUE_B2"\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When `headerIdDelimiter` is set', () => {
|
||||
const stringifier = createObjectCsvStringifier({
|
||||
header: [
|
||||
{id: 'FIELD_A', title: 'TITLE_A'},
|
||||
{id: 'OTHERS/FIELD_C', title: 'TITLE_C'}
|
||||
],
|
||||
headerIdDelimiter: '/'
|
||||
});
|
||||
|
||||
it('uses the title as is', () => {
|
||||
strictEqual(stringifier.getHeaderString(), 'TITLE_A,TITLE_C\n');
|
||||
});
|
||||
|
||||
it('picks up a value in nested objects', () => {
|
||||
strictEqual(stringifier.stringifyRecords(records), 'VALUE_A1,\nVALUE_A2,VALUE_C2\n');
|
||||
});
|
||||
});
|
||||
|
||||
function generateTestCases(fieldDelimiter?: string) {
|
||||
const delim = resolveDelimiterChar(fieldDelimiter);
|
||||
return () => {
|
||||
describe('header is specified with title', () => {
|
||||
const stringifier = createObjectCsvStringifier({
|
||||
header: [
|
||||
{id: 'FIELD_A', title: 'TITLE_A'},
|
||||
{id: 'FIELD_B', title: 'TITLE_B'}
|
||||
],
|
||||
fieldDelimiter
|
||||
});
|
||||
|
||||
it(`returns a header line with field separated by "${delim}"`, () => {
|
||||
strictEqual(stringifier.getHeaderString(), `TITLE_A${delim}TITLE_B\n`);
|
||||
});
|
||||
|
||||
it(`converts given data records into CSV lines with field separated by "${delim}"`, () => {
|
||||
strictEqual(
|
||||
stringifier.stringifyRecords(records),
|
||||
`VALUE_A1${delim}VALUE_B1\nVALUE_A2${delim}VALUE_B2\n`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('header is specified without title', () => {
|
||||
const stringifier = createObjectCsvStringifier({
|
||||
header: ['FIELD_A', 'FIELD_B'],
|
||||
fieldDelimiter
|
||||
});
|
||||
|
||||
it('returns null for header line', () => {
|
||||
strictEqual(stringifier.getHeaderString(), null);
|
||||
});
|
||||
|
||||
it(`converts given data records into CSV lines with field separated by "${delim}"`, () => {
|
||||
strictEqual(
|
||||
stringifier.stringifyRecords(records),
|
||||
`VALUE_A1${delim}VALUE_B1\nVALUE_A2${delim}VALUE_B2\n`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('header columns are given with reverse order', () => {
|
||||
const stringifier = createObjectCsvStringifier({
|
||||
header: [
|
||||
{id: 'FIELD_B', title: 'TITLE_B'},
|
||||
{id: 'FIELD_A', title: 'TITLE_A'}
|
||||
],
|
||||
fieldDelimiter
|
||||
});
|
||||
|
||||
it(`layouts fields with the order of headers given with field separated by "${delim}"`, () => {
|
||||
strictEqual(
|
||||
stringifier.stringifyRecords(records),
|
||||
`VALUE_B1${delim}VALUE_A1\nVALUE_B2${delim}VALUE_A2\n`
|
||||
);
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
});
|
||||
97
node_modules/csv-writer/src/test/field-stringifier.test.ts
generated
vendored
Normal file
97
node_modules/csv-writer/src/test/field-stringifier.test.ts
generated
vendored
Normal file
@ -0,0 +1,97 @@
|
||||
import {resolveDelimiterChar} from './helper/delimiter';
|
||||
import {createFieldStringifier} from '../lib/field-stringifier';
|
||||
import {strictEqual} from 'assert';
|
||||
|
||||
describe('DefaultFieldStringifier', () => {
|
||||
|
||||
describe('When field delimiter is comma', generateTestCases(','));
|
||||
|
||||
describe('When field delimiter is semicolon', generateTestCases(';'));
|
||||
|
||||
describe('When all fields needs to be quoted', () => {
|
||||
const stringifier = createFieldStringifier(',', true);
|
||||
|
||||
it('quotes a field', () => {
|
||||
strictEqual(stringifier.stringify('VALUE'), '"VALUE"');
|
||||
});
|
||||
|
||||
it('does not quote a field of value undefined', () => {
|
||||
strictEqual(stringifier.stringify(), '');
|
||||
});
|
||||
|
||||
it('does not quote a field of value null', () => {
|
||||
strictEqual(stringifier.stringify(null), '');
|
||||
});
|
||||
|
||||
it('does not quote a field of value empty string', () => {
|
||||
strictEqual(stringifier.stringify(''), '');
|
||||
});
|
||||
});
|
||||
|
||||
function generateTestCases(fieldDelimiter: string) {
|
||||
const delim = resolveDelimiterChar(fieldDelimiter);
|
||||
return () => {
|
||||
const stringifier = createFieldStringifier(fieldDelimiter);
|
||||
|
||||
it('returns the same string', () => {
|
||||
strictEqual(stringifier.stringify('VALUE'), 'VALUE');
|
||||
});
|
||||
|
||||
it('preserves the whitespace characters', () => {
|
||||
strictEqual(stringifier.stringify(' VALUE\tA '), ' VALUE\tA ');
|
||||
});
|
||||
|
||||
it(`wraps a field value with double quotes if the field contains "${delim}"`, () => {
|
||||
strictEqual(stringifier.stringify(`VALUE${delim}A`), `"VALUE${delim}A"`);
|
||||
});
|
||||
|
||||
it('wraps a field value with double quotes if the field contains newline', () => {
|
||||
strictEqual(stringifier.stringify('VALUE\nA'), '"VALUE\nA"');
|
||||
});
|
||||
|
||||
it('wraps a field value with double quotes and escape the double quotes if they are used in the field', () => {
|
||||
strictEqual(stringifier.stringify('VALUE"A'), '"VALUE""A"');
|
||||
});
|
||||
|
||||
it('escapes double quotes even if double quotes are only on the both edges of the field', () => {
|
||||
strictEqual(stringifier.stringify('"VALUE"'), '"""VALUE"""');
|
||||
});
|
||||
|
||||
it('converts a number into a string', () => {
|
||||
strictEqual(stringifier.stringify(1), '1');
|
||||
});
|
||||
|
||||
it('converts undefined into an empty string', () => {
|
||||
strictEqual(stringifier.stringify(), '');
|
||||
});
|
||||
|
||||
it('converts null into an empty string', () => {
|
||||
strictEqual(stringifier.stringify(null), '');
|
||||
});
|
||||
|
||||
it('converts an object into toString-ed value', () => {
|
||||
const obj = {
|
||||
name: 'OBJECT_NAME',
|
||||
toString: function () { return `Name: ${this.name}`; }
|
||||
};
|
||||
strictEqual(stringifier.stringify(obj), 'Name: OBJECT_NAME');
|
||||
});
|
||||
|
||||
it(`wraps a toString-ed field value with double quote if the value contains "${delim}"`, () => {
|
||||
const obj = {
|
||||
name: `OBJECT${delim}NAME`,
|
||||
toString: function () { return `Name: ${this.name}`; }
|
||||
};
|
||||
strictEqual(stringifier.stringify(obj), `"Name: OBJECT${delim}NAME"`);
|
||||
});
|
||||
|
||||
it('escapes double quotes in a toString-ed field value if the value has double quotes', () => {
|
||||
const obj = {
|
||||
name: 'OBJECT_NAME"',
|
||||
toString: function () { return `Name: ${this.name}`; }
|
||||
};
|
||||
strictEqual(stringifier.stringify(obj), '"Name: OBJECT_NAME"""');
|
||||
});
|
||||
};
|
||||
}
|
||||
});
|
||||
16
node_modules/csv-writer/src/test/helper.ts
generated
vendored
Normal file
16
node_modules/csv-writer/src/test/helper.ts
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
import {strictEqual} from 'assert';
|
||||
import {readFileSync} from 'fs';
|
||||
|
||||
export const testFilePath = (id: string) => `./test-tmp/${id}.csv`;
|
||||
|
||||
export const assertFile = (path: string, expectedContents: string, encoding?: string) => {
|
||||
const actualContents = readFileSync(path, encoding || 'utf8');
|
||||
strictEqual(actualContents, expectedContents);
|
||||
};
|
||||
|
||||
export const assertRejected = (p: Promise<any>, message: string) => {
|
||||
return p.then(
|
||||
() => new Error('Should not have been called'),
|
||||
(e: Error) => { strictEqual(e.message, message); }
|
||||
);
|
||||
};
|
||||
6
node_modules/csv-writer/src/test/helper/delimiter.ts
generated
vendored
Normal file
6
node_modules/csv-writer/src/test/helper/delimiter.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
|
||||
export const resolveDelimiterChar = (char?: string) => {
|
||||
if (char === ',' || char === ';') return char;
|
||||
if (typeof char === 'undefined') return ',';
|
||||
throw new Error('Invalid field delimiter');
|
||||
};
|
||||
21
node_modules/csv-writer/src/test/lang/promise.test.ts
generated
vendored
Normal file
21
node_modules/csv-writer/src/test/lang/promise.test.ts
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import {promisify} from '../../lib/lang/promise';
|
||||
import {strictEqual} from 'assert';
|
||||
import {assertRejected} from '../helper';
|
||||
|
||||
describe('Promise', () => {
|
||||
const greetAsync = (name: string, callback: (err: Error | null, result?: string) => void) => {
|
||||
setTimeout(() => {
|
||||
if (name === 'foo') callback(null, `Hello, ${name}!`);
|
||||
else callback(new Error(`We don't know ${name}`));
|
||||
}, 0);
|
||||
};
|
||||
const promisifiedFn = promisify(greetAsync);
|
||||
|
||||
it('promisify node style callback', async () => {
|
||||
strictEqual(await promisifiedFn('foo'), 'Hello, foo!');
|
||||
});
|
||||
|
||||
it('raise an error for error', async () => {
|
||||
await assertRejected(promisifiedFn('bar'), "We don't know bar");
|
||||
});
|
||||
});
|
||||
124
node_modules/csv-writer/src/test/write-array-records.test.ts
generated
vendored
Normal file
124
node_modules/csv-writer/src/test/write-array-records.test.ts
generated
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
import {assertFile, testFilePath} from './helper';
|
||||
import {CsvWriter} from '../lib/csv-writer';
|
||||
import {writeFileSync} from 'fs';
|
||||
import {createArrayCsvWriter} from '../index';
|
||||
|
||||
describe('Write array records into CSV', () => {
|
||||
|
||||
const makeFilePath = (id: string) => testFilePath(`array-${id}`);
|
||||
const records = [
|
||||
['Bob', 'French'],
|
||||
['Mary', 'English']
|
||||
];
|
||||
|
||||
describe('When only path is specified', () => {
|
||||
const filePath = makeFilePath('minimum');
|
||||
let writer: CsvWriter<string[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
writer = createArrayCsvWriter({path: filePath});
|
||||
});
|
||||
|
||||
it('writes records to a new file', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'Bob,French\nMary,English\n');
|
||||
});
|
||||
|
||||
it('appends records when requested to write to the same file', async () => {
|
||||
await writer.writeRecords([records[0]]);
|
||||
await writer.writeRecords([records[1]]);
|
||||
assertFile(filePath, 'Bob,French\nMary,English\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When field header is given', () => {
|
||||
const filePath = makeFilePath('header');
|
||||
let writer: CsvWriter<string[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
writer = createArrayCsvWriter({
|
||||
path: filePath,
|
||||
header: ['NAME', 'LANGUAGE']
|
||||
});
|
||||
});
|
||||
|
||||
it('writes a header', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n');
|
||||
});
|
||||
|
||||
it('appends records without headers', async () => {
|
||||
await writer.writeRecords([records[0]]);
|
||||
await writer.writeRecords([records[1]]);
|
||||
assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When `append` flag is specified', () => {
|
||||
const filePath = makeFilePath('append');
|
||||
writeFileSync(filePath, 'Mike,German\n', 'utf8');
|
||||
const writer = createArrayCsvWriter({
|
||||
path: filePath,
|
||||
append: true
|
||||
});
|
||||
|
||||
it('do not overwrite the existing contents and appends records to them', async () => {
|
||||
await writer.writeRecords([records[1]]);
|
||||
assertFile(filePath, 'Mike,German\nMary,English\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When encoding is specified', () => {
|
||||
const filePath = makeFilePath('encoding');
|
||||
const writer = createArrayCsvWriter({
|
||||
path: filePath,
|
||||
encoding: 'utf16le'
|
||||
});
|
||||
|
||||
it('writes to a file with the specified encoding', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'Bob,French\nMary,English\n', 'utf16le');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When semicolon is specified as a field delimiter', () => {
|
||||
const filePath = makeFilePath('field-delimiter');
|
||||
const writer = createArrayCsvWriter({
|
||||
path: filePath,
|
||||
header: ['NAME', 'LANGUAGE'],
|
||||
fieldDelimiter: ';'
|
||||
});
|
||||
|
||||
it('uses semicolon instead of comma to separate fields', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'NAME;LANGUAGE\nBob;French\nMary;English\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When newline is specified', () => {
|
||||
const filePath = makeFilePath('newline');
|
||||
const writer = createArrayCsvWriter({
|
||||
path: filePath,
|
||||
recordDelimiter: '\r\n'
|
||||
});
|
||||
|
||||
it('writes to a file with the specified newline character', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'Bob,French\r\nMary,English\r\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When `alwaysQuote` flag is set', () => {
|
||||
const filePath = makeFilePath('always-quote');
|
||||
const writer = createArrayCsvWriter({
|
||||
path: filePath,
|
||||
header: ['NAME', 'LANGUAGE'],
|
||||
alwaysQuote: true
|
||||
});
|
||||
|
||||
it('quotes all fields', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, '"NAME","LANGUAGE"\n"Bob","French"\n"Mary","English"\n');
|
||||
});
|
||||
});
|
||||
});
|
||||
158
node_modules/csv-writer/src/test/write-object-records.test.ts
generated
vendored
Normal file
158
node_modules/csv-writer/src/test/write-object-records.test.ts
generated
vendored
Normal file
@ -0,0 +1,158 @@
|
||||
import {assertFile, testFilePath} from './helper';
|
||||
import {CsvWriter} from '../lib/csv-writer';
|
||||
import {writeFileSync} from 'fs';
|
||||
import {createObjectCsvWriter} from '../index';
|
||||
import {ObjectMap} from '../lib/lang/object';
|
||||
|
||||
describe('Write object records into CSV', () => {
|
||||
|
||||
const makeFilePath = (id: string) => testFilePath(`object-${id}`);
|
||||
const records = [
|
||||
{name: 'Bob', lang: 'French', address: {country: 'France'}},
|
||||
{name: 'Mary', lang: 'English'}
|
||||
];
|
||||
|
||||
describe('When only path and header ids are given', () => {
|
||||
const filePath = makeFilePath('minimum');
|
||||
let writer: CsvWriter<ObjectMap<any>>;
|
||||
|
||||
beforeEach(() => {
|
||||
writer = createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['name', 'lang']
|
||||
});
|
||||
});
|
||||
|
||||
it('writes records to a new file', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'Bob,French\nMary,English\n');
|
||||
});
|
||||
|
||||
it('appends records when requested to write to the same file', async () => {
|
||||
await writer.writeRecords([records[0]]);
|
||||
await writer.writeRecords([records[1]]);
|
||||
assertFile(filePath, 'Bob,French\nMary,English\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When header ids are given with reverse order', () => {
|
||||
const filePath = makeFilePath('column-order');
|
||||
const writer = createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['lang', 'name']
|
||||
});
|
||||
|
||||
it('also writes columns with reverse order', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'French,Bob\nEnglish,Mary\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When field header is given with titles', () => {
|
||||
const filePath = makeFilePath('header');
|
||||
let writer: CsvWriter<ObjectMap<any>>;
|
||||
|
||||
beforeEach(() => {
|
||||
writer = createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: [{id: 'name', title: 'NAME'}, {id: 'lang', title: 'LANGUAGE'}]
|
||||
});
|
||||
});
|
||||
|
||||
it('writes a header', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n');
|
||||
});
|
||||
|
||||
it('appends records without headers', async () => {
|
||||
await writer.writeRecords([records[0]]);
|
||||
await writer.writeRecords([records[1]]);
|
||||
assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When `append` flag is specified', () => {
|
||||
const filePath = makeFilePath('append');
|
||||
writeFileSync(filePath, 'Mike,German\n', 'utf8');
|
||||
const writer = createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['name', 'lang'],
|
||||
append: true
|
||||
});
|
||||
|
||||
it('do not overwrite the existing contents and appends records to them', async () => {
|
||||
await writer.writeRecords([records[1]]);
|
||||
assertFile(filePath, 'Mike,German\nMary,English\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When encoding is specified', () => {
|
||||
const filePath = makeFilePath('encoding');
|
||||
const writer = createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['name', 'lang'],
|
||||
encoding: 'utf16le'
|
||||
});
|
||||
|
||||
it('writes to a file with the specified encoding', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'Bob,French\nMary,English\n', 'utf16le');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When semicolon is specified as a field delimiter', () => {
|
||||
const filePath = makeFilePath('field-delimiter');
|
||||
const writer = createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: [{id: 'name', title: 'NAME'}, {id: 'lang', title: 'LANGUAGE'}],
|
||||
fieldDelimiter: ';'
|
||||
});
|
||||
|
||||
it('uses semicolon instead of comma to separate fields', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'NAME;LANGUAGE\nBob;French\nMary;English\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When newline is specified', () => {
|
||||
const filePath = makeFilePath('newline');
|
||||
const writer = createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: ['name', 'lang'],
|
||||
recordDelimiter: '\r\n'
|
||||
});
|
||||
|
||||
it('writes to a file with the specified newline character', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'Bob,French\r\nMary,English\r\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When `alwaysQuote` flag is set', () => {
|
||||
const filePath = makeFilePath('always-quote');
|
||||
const writer = createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: [{id: 'name', title: 'NAME'}, {id: 'lang', title: 'LANGUAGE'}],
|
||||
alwaysQuote: true
|
||||
});
|
||||
|
||||
it('quotes all fields', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, '"NAME","LANGUAGE"\n"Bob","French"\n"Mary","English"\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When `headerIdDelimiter` flag is set', () => {
|
||||
const filePath = makeFilePath('nested');
|
||||
const writer = createObjectCsvWriter({
|
||||
path: filePath,
|
||||
header: [{id: 'name', title: 'NAME'}, {id: 'address.country', title: 'COUNTRY'}],
|
||||
headerIdDelimiter: '.'
|
||||
});
|
||||
|
||||
it('breaks keys into key paths', async () => {
|
||||
await writer.writeRecords(records);
|
||||
assertFile(filePath, 'NAME,COUNTRY\nBob,France\nMary,\n');
|
||||
});
|
||||
});
|
||||
});
|
||||
14
node_modules/line-reader/.eslintrc
generated
vendored
Normal file
14
node_modules/line-reader/.eslintrc
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"env": {
|
||||
"node": true,
|
||||
"mocha": true
|
||||
},
|
||||
"extends": "airbnb/base",
|
||||
"rules": {
|
||||
"no-var": 0,
|
||||
"indent": [2, 2, {"VariableDeclarator": { "var": 2 }}],
|
||||
"func-names": 0,
|
||||
"strict": [2, "global"],
|
||||
}
|
||||
}
|
||||
|
||||
35
node_modules/line-reader/.npmignore
generated
vendored
Normal file
35
node_modules/line-reader/.npmignore
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
|
||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules
|
||||
jspm_packages
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
20
node_modules/line-reader/LICENSE
generated
vendored
Normal file
20
node_modules/line-reader/LICENSE
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
Copyright (C) 2012 by Nick Ewing
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
166
node_modules/line-reader/README.md
generated
vendored
Normal file
166
node_modules/line-reader/README.md
generated
vendored
Normal file
@ -0,0 +1,166 @@
|
||||
Line Reader
|
||||
===========
|
||||
|
||||
Asynchronous, buffered, line-by-line file/stream reader with support for
|
||||
user-defined line separators.
|
||||
|
||||
Install
|
||||
-------
|
||||
|
||||
`npm install line-reader`
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
The `eachLine` function reads each line of the given file. Upon each new line,
|
||||
the given callback function is called with two parameters: the line read and a
|
||||
boolean value specifying whether the line read was the last line of the file.
|
||||
If the callback returns `false`, reading will stop and the file will be closed.
|
||||
|
||||
var lineReader = require('line-reader');
|
||||
|
||||
lineReader.eachLine('file.txt', function(line, last) {
|
||||
console.log(line);
|
||||
|
||||
if (/* done */) {
|
||||
return false; // stop reading
|
||||
}
|
||||
});
|
||||
|
||||
`eachLine` can also be used in an asynchronous manner by providing a third
|
||||
callback parameter like so:
|
||||
|
||||
var lineReader = require('line-reader');
|
||||
|
||||
lineReader.eachLine('file.txt', function(line, last, cb) {
|
||||
console.log(line);
|
||||
|
||||
if (/* done */) {
|
||||
cb(false); // stop reading
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
});
|
||||
|
||||
You can provide an optional second node-style callback that will be called with
|
||||
`(err)` on failure or `()` when finished (even if you manually terminate iteration
|
||||
by returning `false` from the iteratee):
|
||||
|
||||
var lineReader = require('line-reader');
|
||||
|
||||
// read all lines:
|
||||
lineReader.eachLine('file.txt', function(line) {
|
||||
console.log(line);
|
||||
}).then(function (err) {
|
||||
if (err) throw err;
|
||||
console.log("I'm done!!");
|
||||
});
|
||||
|
||||
For more granular control, `open`, `hasNextLine`, and `nextLine` maybe be used
|
||||
to iterate a file (but you must `close` it yourself):
|
||||
|
||||
// or read line by line:
|
||||
lineReader.open('file.txt', function(err, reader) {
|
||||
if (err) throw err;
|
||||
if (reader.hasNextLine()) {
|
||||
reader.nextLine(function(err, line) {
|
||||
try {
|
||||
if (err) throw err;
|
||||
console.log(line);
|
||||
} finally {
|
||||
reader.close(function(err) {
|
||||
if (err) throw err;
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
reader.close(function(err) {
|
||||
if (err) throw err;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
You may provide additional options in a hash before the callbacks to `eachLine` or `open`:
|
||||
* `separator` - a `string` or `RegExp` separator (defaults to `/\r\n?|\n/`)
|
||||
* `encoding` - file encoding (defaults to `'utf8'`)
|
||||
* `bufferSize` - amount of bytes to buffer (defaults to 1024)
|
||||
|
||||
For example:
|
||||
|
||||
lineReader.eachLine('file.txt', {separator: ';', encoding: 'utf8'}, function(line, last, cb) {
|
||||
console.log(line);
|
||||
});
|
||||
lineReader.open('file.txt', {bufferSize: 1024}, function(err, reader) {
|
||||
...
|
||||
});
|
||||
|
||||
Streams
|
||||
-------
|
||||
|
||||
Both `eachLine` and `open` support passing either a file name or a read stream:
|
||||
|
||||
// reading from stdin
|
||||
lineReader.eachLine(process.stdin, function(line) {});
|
||||
|
||||
// reading with file position boundaries
|
||||
var readStream = fs.createReadStream('test.log', { start: 0, end: 10000 });
|
||||
lineReader.eachLine(readStream, function(line) {});
|
||||
|
||||
Note however that if you're reading user input from stdin then the
|
||||
[readline module](https://nodejs.org/api/readline.html) is probably a better choice.
|
||||
|
||||
Promises
|
||||
--------
|
||||
|
||||
`eachLine` and `open` are compatible with `promisify` from [bluebird](https://github.com/petkaantonov/bluebird/blob/master/API.md#promisepromisifyfunction-nodefunction--dynamic-receiver---function):
|
||||
|
||||
var lineReader = require('line-reader'),
|
||||
Promise = require('bluebird');
|
||||
|
||||
var eachLine = Promise.promisify(lineReader.eachLine);
|
||||
eachLine('file.txt', function(line) {
|
||||
console.log(line);
|
||||
}).then(function() {
|
||||
console.log('done');
|
||||
}).catch(function(err) {
|
||||
console.error(err);
|
||||
});
|
||||
|
||||
If you're using a promise library that doesn't have a promisify function, here's how you can do it:
|
||||
|
||||
var lineReader = require('line-reader'),
|
||||
Promise = require(...);
|
||||
|
||||
var eachLine = function(filename, options, iteratee) {
|
||||
return new Promise(function(resolve, reject) {
|
||||
lineReader.eachLine(filename, options, iteratee, function(err) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
eachLine('file.txt', function(line) {
|
||||
console.log(line);
|
||||
}).then(function() {
|
||||
console.log('done');
|
||||
}).catch(function(err) {
|
||||
console.error(err);
|
||||
});
|
||||
|
||||
Contributors
|
||||
------------
|
||||
|
||||
* Nick Ewing
|
||||
* Andy Edwards (jedwards1211)
|
||||
* Jameson Little (beatgammit)
|
||||
* Masum (masumsoft)
|
||||
* Matthew Caruana Galizia (mattcg)
|
||||
* Ricardo Bin (ricardohbin)
|
||||
|
||||
Paul Em has also written a reverse-version of this gem to read files from bottom to top: [reverse-line-reader](https://github.com/paul-em/reverse-line-reader).
|
||||
|
||||
Copyright 2011 Nick Ewing.
|
||||
306
node_modules/line-reader/lib/line_reader.js
generated
vendored
Normal file
306
node_modules/line-reader/lib/line_reader.js
generated
vendored
Normal file
@ -0,0 +1,306 @@
|
||||
'use strict';
|
||||
|
||||
var fs = require('fs'),
|
||||
StringDecoder = require('string_decoder').StringDecoder;
|
||||
|
||||
function createLineReader(readStream, options, creationCb) {
|
||||
if (options instanceof Function) {
|
||||
creationCb = options;
|
||||
options = undefined;
|
||||
}
|
||||
if (!options) options = {};
|
||||
|
||||
var encoding = options.encoding || 'utf8',
|
||||
separator = options.separator || /\r\n?|\n/,
|
||||
bufferSize = options.bufferSize || 1024,
|
||||
bufferStr = '',
|
||||
decoder = new StringDecoder(encoding),
|
||||
closed = false,
|
||||
eof = false,
|
||||
separatorIndex = -1,
|
||||
separatorLen,
|
||||
readDefer,
|
||||
moreToRead = false,
|
||||
findSeparator;
|
||||
|
||||
if (separator instanceof RegExp) {
|
||||
findSeparator = function() {
|
||||
var result = separator.exec(bufferStr);
|
||||
if (result && (result.index + result[0].length < bufferStr.length || eof)) {
|
||||
separatorIndex = result.index;
|
||||
separatorLen = result[0].length;
|
||||
} else {
|
||||
separatorIndex = -1;
|
||||
separatorLen = 0;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
separatorLen = separator.length;
|
||||
findSeparator = function() {
|
||||
separatorIndex = bufferStr.indexOf(separator);
|
||||
};
|
||||
}
|
||||
|
||||
function getReadStream() {
|
||||
return readStream;
|
||||
}
|
||||
|
||||
function close(cb) {
|
||||
if (!closed) {
|
||||
closed = true;
|
||||
if (typeof readStream.close == 'function') {
|
||||
readStream.close();
|
||||
}
|
||||
setImmediate(cb);
|
||||
}
|
||||
}
|
||||
|
||||
function onFailure(err) {
|
||||
close(function(err2) {
|
||||
return creationCb(err || err2);
|
||||
});
|
||||
}
|
||||
|
||||
function isOpen() {
|
||||
return !closed;
|
||||
}
|
||||
|
||||
function isClosed() {
|
||||
return closed;
|
||||
}
|
||||
|
||||
function waitForMoreToRead(cb) {
|
||||
if (moreToRead) {
|
||||
cb();
|
||||
} else {
|
||||
readDefer = cb;
|
||||
}
|
||||
}
|
||||
|
||||
function resumeDeferredRead() {
|
||||
if (readDefer) {
|
||||
readDefer();
|
||||
readDefer = null;
|
||||
}
|
||||
}
|
||||
|
||||
function read(cb) {
|
||||
waitForMoreToRead(function() {
|
||||
var chunk;
|
||||
|
||||
try {
|
||||
chunk = readStream.read(bufferSize);
|
||||
} catch (err) {
|
||||
cb(err);
|
||||
}
|
||||
|
||||
if (chunk) {
|
||||
bufferStr += decoder.write(chunk.slice(0, chunk.length));
|
||||
} else {
|
||||
moreToRead = false;
|
||||
}
|
||||
|
||||
cb();
|
||||
});
|
||||
}
|
||||
|
||||
function onStreamReadable() {
|
||||
moreToRead = true;
|
||||
resumeDeferredRead();
|
||||
}
|
||||
|
||||
function onStreamEnd() {
|
||||
eof = true;
|
||||
resumeDeferredRead();
|
||||
}
|
||||
|
||||
readStream.on('readable', onStreamReadable);
|
||||
readStream.on('end', onStreamEnd);
|
||||
readStream.on('error', onFailure);
|
||||
|
||||
function shouldReadMore() {
|
||||
findSeparator();
|
||||
|
||||
return separatorIndex < 0 && !eof;
|
||||
}
|
||||
|
||||
function callWhile(conditionFn, bodyFn, doneCallback) {
|
||||
if (conditionFn()) {
|
||||
bodyFn(function (err) {
|
||||
if (err) {
|
||||
doneCallback(err);
|
||||
} else {
|
||||
setImmediate(callWhile, conditionFn, bodyFn, doneCallback);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
doneCallback();
|
||||
}
|
||||
}
|
||||
|
||||
function readToSeparator(cb) {
|
||||
callWhile(shouldReadMore, read, cb);
|
||||
}
|
||||
|
||||
function hasNextLine() {
|
||||
return bufferStr.length > 0 || !eof;
|
||||
}
|
||||
|
||||
function nextLine(cb) {
|
||||
if (closed) {
|
||||
return cb(new Error('LineReader has been closed'));
|
||||
}
|
||||
|
||||
function getLine(err) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
if (separatorIndex < 0 && eof) {
|
||||
separatorIndex = bufferStr.length;
|
||||
}
|
||||
var ret = bufferStr.substring(0, separatorIndex);
|
||||
|
||||
bufferStr = bufferStr.substring(separatorIndex + separatorLen);
|
||||
separatorIndex = -1;
|
||||
cb(undefined, ret);
|
||||
}
|
||||
|
||||
findSeparator();
|
||||
|
||||
if (separatorIndex < 0) {
|
||||
if (eof) {
|
||||
if (hasNextLine()) {
|
||||
separatorIndex = bufferStr.length;
|
||||
getLine();
|
||||
} else {
|
||||
return cb(new Error('No more lines to read.'));
|
||||
}
|
||||
} else {
|
||||
readToSeparator(getLine);
|
||||
}
|
||||
} else {
|
||||
getLine();
|
||||
}
|
||||
}
|
||||
|
||||
readToSeparator(function(err) {
|
||||
if (err) {
|
||||
onFailure(err);
|
||||
} else {
|
||||
return creationCb(undefined, {
|
||||
hasNextLine: hasNextLine,
|
||||
nextLine: nextLine,
|
||||
close: close,
|
||||
isOpen: isOpen,
|
||||
isClosed: isClosed,
|
||||
getReadStream: getReadStream
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function open(filenameOrStream, options, cb) {
|
||||
if (options instanceof Function) {
|
||||
cb = options;
|
||||
options = undefined;
|
||||
}
|
||||
|
||||
var readStream;
|
||||
|
||||
if (typeof filenameOrStream.read == 'function') {
|
||||
readStream = filenameOrStream;
|
||||
} else if (typeof filenameOrStream === 'string' || filenameOrStream instanceof String) {
|
||||
readStream = fs.createReadStream(filenameOrStream);
|
||||
} else {
|
||||
cb(new Error('Invalid file argument for LineReader.open. Must be filename or stream.'));
|
||||
return;
|
||||
}
|
||||
|
||||
readStream.pause();
|
||||
createLineReader(readStream, options, cb);
|
||||
}
|
||||
|
||||
function eachLine(filename, options, iteratee, cb) {
|
||||
if (options instanceof Function) {
|
||||
cb = iteratee;
|
||||
iteratee = options;
|
||||
options = undefined;
|
||||
}
|
||||
var asyncIteratee = iteratee.length === 3;
|
||||
|
||||
var theReader;
|
||||
var getReaderCb;
|
||||
|
||||
open(filename, options, function(err, reader) {
|
||||
theReader = reader;
|
||||
if (getReaderCb) {
|
||||
getReaderCb(reader);
|
||||
}
|
||||
|
||||
if (err) {
|
||||
if (cb) cb(err);
|
||||
return;
|
||||
}
|
||||
|
||||
function finish(err) {
|
||||
reader.close(function(err2) {
|
||||
if (cb) cb(err || err2);
|
||||
});
|
||||
}
|
||||
|
||||
function newRead() {
|
||||
if (reader.hasNextLine()) {
|
||||
setImmediate(readNext);
|
||||
} else {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
function continueCb(continueReading) {
|
||||
if (continueReading !== false) {
|
||||
newRead();
|
||||
} else {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
function readNext() {
|
||||
reader.nextLine(function(err, line) {
|
||||
if (err) {
|
||||
finish(err);
|
||||
}
|
||||
|
||||
var last = !reader.hasNextLine();
|
||||
|
||||
if (asyncIteratee) {
|
||||
iteratee(line, last, continueCb);
|
||||
} else {
|
||||
if (iteratee(line, last) !== false) {
|
||||
newRead();
|
||||
} else {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
newRead();
|
||||
});
|
||||
|
||||
// this hook is only for the sake of testing; if you choose to use it,
|
||||
// please don't file any issues (unless you can also reproduce them without
|
||||
// using this).
|
||||
return {
|
||||
getReader: function(cb) {
|
||||
if (theReader) {
|
||||
cb(theReader);
|
||||
} else {
|
||||
getReaderCb = cb;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports.open = open;
|
||||
module.exports.eachLine = eachLine;
|
||||
31
node_modules/line-reader/package.json
generated
vendored
Normal file
31
node_modules/line-reader/package.json
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
{
|
||||
"name": "line-reader",
|
||||
"version": "0.4.0",
|
||||
"description": "Asynchronous, buffered, line-by-line file/stream reader",
|
||||
"url": "https://github.com/nickewing/line-reader",
|
||||
"keywords": [
|
||||
"file",
|
||||
"line",
|
||||
"reader",
|
||||
"scanner"
|
||||
],
|
||||
"author": "Nick Ewing <nick@nickewing.net>",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/nickewing/line-reader/issues"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"main": "./lib/line_reader",
|
||||
"scripts": {
|
||||
"test": "node_modules/.bin/mocha test/line_reader.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://nickewing@github.com/nickewing/line-reader.git"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^2.4.5"
|
||||
}
|
||||
}
|
||||
89
node_modules/line-reader/test.js
generated
vendored
Normal file
89
node_modules/line-reader/test.js
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
// var file = './test/data/multibyte_file.txt';
|
||||
var file = './test/data/three_line_file.txt';
|
||||
// var file = './test/data/mac_os_9_file.txt';
|
||||
// var file = './test/data/separator_file.txt';
|
||||
|
||||
var util = require('util');
|
||||
|
||||
// var lineReader = require('readline').createInterface({
|
||||
// input: require('fs').createReadStream(file)
|
||||
// });
|
||||
|
||||
// lineReader.on('line', function (line) {
|
||||
// console.log('Line from file:', util.inspect(line));
|
||||
// });
|
||||
|
||||
|
||||
|
||||
var fs = require('fs');
|
||||
|
||||
// var readStream = fs.createReadStream(file);
|
||||
// // var hash = crypto.createHash('sha1');
|
||||
|
||||
// readStream
|
||||
// .on('readable', function () {
|
||||
// var chunk;
|
||||
// while (null !== (chunk = readStream.read())) {
|
||||
// console.log(chunk.length);
|
||||
// }
|
||||
// })
|
||||
// .on('end', function () {
|
||||
// console.log('done!');
|
||||
// });
|
||||
|
||||
|
||||
|
||||
// var readable = process.stdin;
|
||||
//
|
||||
|
||||
// var readable = fs.createReadStream(file);
|
||||
// readable.pause();
|
||||
|
||||
// console.log(readable.isPaused());
|
||||
|
||||
// readable.on('readable', () => {
|
||||
// var chunk;
|
||||
// console.log('called');
|
||||
// while (null !== (chunk = readable.read(4))) {
|
||||
// console.log('got %d bytes of data: %s', chunk.length, util.inspect(chunk.toString()));
|
||||
// }
|
||||
// });
|
||||
|
||||
// readable.on('end', () => {
|
||||
// console.log('done!');
|
||||
// });
|
||||
//
|
||||
//
|
||||
|
||||
var lineReader = require('./lib/line_reader');
|
||||
var readStream = fs.createReadStream('development.log', { start: 0, end: 10000 });
|
||||
lineReader.eachLine(readStream, (line) => console.log(line));
|
||||
|
||||
|
||||
|
||||
// var lineReader = require('./lib/line_reader'),
|
||||
// Promise = require('bluebird');
|
||||
|
||||
// var eachLine = Promise.promisify(lineReader.eachLine);
|
||||
// eachLine(process.stdin, function(line) {
|
||||
// console.log(line);
|
||||
// }).then(function() {
|
||||
// console.log('DONE');
|
||||
// }).catch(function(err) {
|
||||
// console.error(err);
|
||||
// });
|
||||
|
||||
|
||||
// const readline = require('readline');
|
||||
|
||||
// const rl = readline.createInterface({
|
||||
// input: process.stdin,
|
||||
// output: process.stdout
|
||||
// });
|
||||
|
||||
// rl.question('What do you think of Node.js? ', (answer) => {
|
||||
// TODO: Log the answer in a database
|
||||
// console.log('Thank you for your valuable feedback:', answer);
|
||||
|
||||
// rl.close();
|
||||
// });
|
||||
0
node_modules/line-reader/test/data/empty_file.txt
generated
vendored
Normal file
0
node_modules/line-reader/test/data/empty_file.txt
generated
vendored
Normal file
1
node_modules/line-reader/test/data/mac_os_9_file.txt
generated
vendored
Normal file
1
node_modules/line-reader/test/data/mac_os_9_file.txt
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
Jabberwocky
’Twas brillig, and the slithy toves
Did gyre and gimble in the wabe;
|
||||
2
node_modules/line-reader/test/data/multi_separator_file.txt
generated
vendored
Normal file
2
node_modules/line-reader/test/data/multi_separator_file.txt
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
foo||bar
|
||||
||baz
|
||||
2
node_modules/line-reader/test/data/multibyte_file.txt
generated
vendored
Normal file
2
node_modules/line-reader/test/data/multibyte_file.txt
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
ふうりうの初やおくの田植うた
|
||||
|
||||
6
node_modules/line-reader/test/data/normal_file.txt
generated
vendored
Normal file
6
node_modules/line-reader/test/data/normal_file.txt
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
Jabberwocky
|
||||
|
||||
’Twas brillig, and the slithy toves
|
||||
Did gyre and gimble in the wabe;
|
||||
|
||||
|
||||
1
node_modules/line-reader/test/data/one_line_file.txt
generated
vendored
Normal file
1
node_modules/line-reader/test/data/one_line_file.txt
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
one line file
|
||||
1
node_modules/line-reader/test/data/one_line_file_no_endline.txt
generated
vendored
Normal file
1
node_modules/line-reader/test/data/one_line_file_no_endline.txt
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
one line file no endline
|
||||
2
node_modules/line-reader/test/data/separator_file.txt
generated
vendored
Normal file
2
node_modules/line-reader/test/data/separator_file.txt
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
foo;bar
|
||||
;baz
|
||||
3
node_modules/line-reader/test/data/three_line_file.txt
generated
vendored
Normal file
3
node_modules/line-reader/test/data/three_line_file.txt
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
This is line one.
|
||||
This is line two.
|
||||
This is line three.
|
||||
6
node_modules/line-reader/test/data/unix_file.txt
generated
vendored
Normal file
6
node_modules/line-reader/test/data/unix_file.txt
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
Jabberwocky
|
||||
|
||||
’Twas brillig, and the slithy toves
|
||||
Did gyre and gimble in the wabe;
|
||||
|
||||
|
||||
2
node_modules/line-reader/test/data/windows_buffer_overlap_file.txt
generated
vendored
Normal file
2
node_modules/line-reader/test/data/windows_buffer_overlap_file.txt
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
test
|
||||
file
|
||||
6
node_modules/line-reader/test/data/windows_file.txt
generated
vendored
Normal file
6
node_modules/line-reader/test/data/windows_file.txt
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
Jabberwocky
|
||||
|
||||
’Twas brillig, and the slithy toves
|
||||
Did gyre and gimble in the wabe;
|
||||
|
||||
|
||||
412
node_modules/line-reader/test/line_reader.js
generated
vendored
Normal file
412
node_modules/line-reader/test/line_reader.js
generated
vendored
Normal file
@ -0,0 +1,412 @@
|
||||
var lineReader = require('../lib/line_reader'),
|
||||
assert = require('assert'),
|
||||
fs = require('fs'),
|
||||
testFilePath = __dirname + '/data/normal_file.txt',
|
||||
windowsFilePath = __dirname + '/data/windows_file.txt',
|
||||
windowsBufferOverlapFilePath = __dirname + '/data/windows_buffer_overlap_file.txt',
|
||||
unixFilePath = __dirname + '/data/unix_file.txt',
|
||||
macOs9FilePath = __dirname + '/data/mac_os_9_file.txt',
|
||||
separatorFilePath = __dirname + '/data/separator_file.txt',
|
||||
multiSeparatorFilePath = __dirname + '/data/multi_separator_file.txt',
|
||||
multibyteFilePath = __dirname + '/data/multibyte_file.txt',
|
||||
emptyFilePath = __dirname + '/data/empty_file.txt',
|
||||
oneLineFilePath = __dirname + '/data/one_line_file.txt',
|
||||
oneLineFileNoEndlinePath = __dirname + '/data/one_line_file_no_endline.txt',
|
||||
threeLineFilePath = __dirname + '/data/three_line_file.txt',
|
||||
testSeparatorFile = ['foo', 'bar\n', 'baz\n'],
|
||||
testFile = [
|
||||
'Jabberwocky',
|
||||
'',
|
||||
'’Twas brillig, and the slithy toves',
|
||||
'Did gyre and gimble in the wabe;',
|
||||
'',
|
||||
''
|
||||
],
|
||||
testBufferOverlapFile = [
|
||||
'test',
|
||||
'file'
|
||||
];
|
||||
|
||||
describe("lineReader", function() {
|
||||
describe("eachLine", function() {
|
||||
it("should read lines using the default separator", function(done) {
|
||||
var i = 0;
|
||||
|
||||
lineReader.eachLine(testFilePath, function(line, last) {
|
||||
assert.equal(testFile[i], line, 'Each line should be what we expect');
|
||||
i += 1;
|
||||
|
||||
if (i === 6) {
|
||||
assert.ok(last);
|
||||
} else {
|
||||
assert.ok(!last);
|
||||
}
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(6, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should read windows files by default", function(done) {
|
||||
var i = 0;
|
||||
|
||||
lineReader.eachLine(windowsFilePath, function(line, last) {
|
||||
assert.equal(testFile[i], line, 'Each line should be what we expect');
|
||||
i += 1;
|
||||
|
||||
if (i === 6) {
|
||||
assert.ok(last);
|
||||
} else {
|
||||
assert.ok(!last);
|
||||
}
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(6, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle \\r\\n overlapping buffer window correctly", function(done) {
|
||||
var i = 0;
|
||||
var bufferSize = 5;
|
||||
|
||||
lineReader.eachLine(windowsBufferOverlapFilePath, {bufferSize: bufferSize}, function(line, last) {
|
||||
assert.equal(testBufferOverlapFile[i], line, 'Each line should be what we expect');
|
||||
i += 1;
|
||||
|
||||
if (i === 2) {
|
||||
assert.ok(last);
|
||||
} else {
|
||||
assert.ok(!last);
|
||||
}
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(2, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should read unix files by default", function(done) {
|
||||
var i = 0;
|
||||
|
||||
lineReader.eachLine(unixFilePath, function(line, last) {
|
||||
assert.equal(testFile[i], line, 'Each line should be what we expect');
|
||||
i += 1;
|
||||
|
||||
if (i === 6) {
|
||||
assert.ok(last);
|
||||
} else {
|
||||
assert.ok(!last);
|
||||
}
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(6, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should read mac os 9 files by default", function(done) {
|
||||
var i = 0;
|
||||
|
||||
lineReader.eachLine(macOs9FilePath, function(line, last) {
|
||||
assert.equal(testFile[i], line, 'Each line should be what we expect');
|
||||
i += 1;
|
||||
|
||||
if (i === 6) {
|
||||
assert.ok(last);
|
||||
} else {
|
||||
assert.ok(!last);
|
||||
}
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(6, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should allow continuation of line reading via a callback", function(done) {
|
||||
var i = 0;
|
||||
|
||||
lineReader.eachLine(testFilePath, function(line, last, cb) {
|
||||
assert.equal(testFile[i], line, 'Each line should be what we expect');
|
||||
i += 1;
|
||||
|
||||
if (i === 6) {
|
||||
assert.ok(last);
|
||||
} else {
|
||||
assert.ok(!last);
|
||||
}
|
||||
|
||||
process.nextTick(cb);
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(6, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should separate files using given separator", function(done) {
|
||||
var i = 0;
|
||||
lineReader.eachLine(separatorFilePath, {separator: ';'}, function(line, last) {
|
||||
assert.equal(testSeparatorFile[i], line);
|
||||
i += 1;
|
||||
|
||||
if (i === 3) {
|
||||
assert.ok(last);
|
||||
} else {
|
||||
assert.ok(!last);
|
||||
}
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(3, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should separate files using given separator with more than one character", function(done) {
|
||||
var i = 0;
|
||||
lineReader.eachLine(multiSeparatorFilePath, {separator: '||'}, function(line, last) {
|
||||
assert.equal(testSeparatorFile[i], line);
|
||||
i += 1;
|
||||
|
||||
if (i === 3) {
|
||||
assert.ok(last);
|
||||
} else {
|
||||
assert.ok(!last);
|
||||
}
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(3, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should allow early termination of line reading", function(done) {
|
||||
var i = 0;
|
||||
lineReader.eachLine(testFilePath, function(line, last) {
|
||||
assert.equal(testFile[i], line, 'Each line should be what we expect');
|
||||
i += 1;
|
||||
|
||||
if (i === 2) {
|
||||
return false;
|
||||
}
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(2, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should allow early termination of line reading via a callback", function(done) {
|
||||
var i = 0;
|
||||
lineReader.eachLine(testFilePath, function(line, last, cb) {
|
||||
assert.equal(testFile[i], line, 'Each line should be what we expect');
|
||||
i += 1;
|
||||
|
||||
if (i === 2) {
|
||||
cb(false);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.equal(2, i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should not call callback on empty file", function(done) {
|
||||
lineReader.eachLine(emptyFilePath, function(line) {
|
||||
assert.ok(false, "Empty file should not cause any callbacks");
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
done()
|
||||
});
|
||||
});
|
||||
|
||||
it("should error when the user tries calls nextLine on a closed LineReader", function(done) {
|
||||
lineReader.open(oneLineFilePath, function(err, reader) {
|
||||
assert.ok(!err);
|
||||
reader.close(function(err) {
|
||||
assert.ok(!err);
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(err, "nextLine should have errored because the reader is closed");
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("should work with a file containing only one line", function(done) {
|
||||
lineReader.eachLine(oneLineFilePath, function(line, last) {
|
||||
return true;
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should work with a file containing only one line and no endline character.", function(done) {
|
||||
var count = 0; var isDone = false;
|
||||
lineReader.eachLine(oneLineFileNoEndlinePath, function(line, last) {
|
||||
assert.equal(last, true, 'last should be true');
|
||||
return true;
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should close the file when eachLine finishes", function(done) {
|
||||
var reader;
|
||||
lineReader.eachLine(oneLineFilePath, function(line, last) {
|
||||
return false;
|
||||
}, function(err) {
|
||||
assert.ok(!err);
|
||||
assert.ok(reader.isClosed());
|
||||
done();
|
||||
}).getReader(function(_reader) {
|
||||
reader = _reader;
|
||||
});
|
||||
});
|
||||
|
||||
it("should close the file if there is an error during eachLine", function(done) {
|
||||
lineReader.eachLine(testFilePath, {bufferSize: 10}, function(line, last) {
|
||||
}, function(err) {
|
||||
assert.equal('a test error', err.message);
|
||||
assert.ok(reader.isClosed());
|
||||
done();
|
||||
}).getReader(function(_reader) {
|
||||
reader = _reader;
|
||||
|
||||
reader.getReadStream().read = function() {
|
||||
throw new Error('a test error');
|
||||
};
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("open", function() {
|
||||
it("should return a reader object and allow calls to nextLine", function(done) {
|
||||
lineReader.open(testFilePath, function(err, reader) {
|
||||
assert.ok(!err);
|
||||
assert.ok(reader.hasNextLine());
|
||||
|
||||
assert.ok(reader.hasNextLine(), 'Calling hasNextLine multiple times should be ok');
|
||||
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal('Jabberwocky', line);
|
||||
assert.ok(reader.hasNextLine());
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal('', line);
|
||||
assert.ok(reader.hasNextLine());
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal('’Twas brillig, and the slithy toves', line);
|
||||
assert.ok(reader.hasNextLine());
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal('Did gyre and gimble in the wabe;', line);
|
||||
assert.ok(reader.hasNextLine());
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal('', line);
|
||||
assert.ok(reader.hasNextLine());
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal('', line);
|
||||
assert.ok(!reader.hasNextLine());
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("should work with a file containing only one line", function(done) {
|
||||
lineReader.open(oneLineFilePath, function(err, reader) {
|
||||
assert.ok(!err);
|
||||
reader.close(function(err) {
|
||||
assert.ok(!err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("should read multibyte characters on the buffer boundary", function(done) {
|
||||
lineReader.open(multibyteFilePath, {separator: '\n', encoding: 'utf8', bufferSize: 2}, function(err, reader) {
|
||||
assert.ok(!err);
|
||||
assert.ok(reader.hasNextLine());
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal('ふうりうの初やおくの田植うた', line,
|
||||
"Should read multibyte characters on buffer boundary");
|
||||
reader.close(function(err) {
|
||||
assert.ok(!err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("should support opened streams", function() {
|
||||
var readStream = fs.createReadStream(testFilePath);
|
||||
|
||||
lineReader.open(readStream, function(err, reader) {
|
||||
assert.ok(!err);
|
||||
assert.ok(reader.hasNextLine());
|
||||
|
||||
assert.ok(reader.hasNextLine(), 'Calling hasNextLine multiple times should be ok');
|
||||
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal('Jabberwocky', line);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle error while opening read stream", function() {
|
||||
lineReader.open('a file that does not exist', function(err, reader) {
|
||||
assert.ok(err);
|
||||
assert.ok(reader.isClosed());
|
||||
});
|
||||
});
|
||||
|
||||
describe("hasNextLine", function() {
|
||||
it("should return true when buffer is empty but not at EOF", function(done) {
|
||||
lineReader.open(threeLineFilePath, {separator: '\n', encoding: 'utf8', bufferSize: 36}, function(err, reader) {
|
||||
assert.ok(!err);
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal("This is line one.", line);
|
||||
assert.ok(reader.hasNextLine());
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal("This is line two.", line);
|
||||
assert.ok(reader.hasNextLine());
|
||||
reader.nextLine(function(err, line) {
|
||||
assert.ok(!err);
|
||||
assert.equal("This is line three.", line);
|
||||
assert.ok(!reader.hasNextLine());
|
||||
reader.close(function(err) {
|
||||
assert.ok(!err);
|
||||
done();
|
||||
})
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
141
node_modules/puppeteer-cluster/CHANGELOG.md
generated
vendored
Normal file
141
node_modules/puppeteer-cluster/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
||||
# Changelog
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [0.24.0] - 2024-03-17
|
||||
Note: Use a version older than `0.24.0` if you want to use a puppeteer version older than 22.0.0.
|
||||
### Changed
|
||||
- Updated all dependencies to their latest versions
|
||||
- Added a missing timeout case when closing the browser
|
||||
- Removed `package-lock.json` as otherwise yarn complains (`warning package-lock.json found. Your project contains lock files generated by tools other than Yarn. It is advised not to mix package managers in order to avoid resolution inconsistencies caused by unsynchronized lock files. To clear this warning, remove package-lock.json.`).
|
||||
- Dropped support (in tests) for Node.js <=16 as puppeteer also support for these versions
|
||||
### Fixed
|
||||
- Fixed breaking puppeteer changes introduced in version 22 (renamed `createIncognitoBrowserContext` to `createBrowserContext`)
|
||||
|
||||
## [0.23.0] - 2022-01-23
|
||||
### Changed
|
||||
- Updated dependencies which haven't been updated for more than one year
|
||||
- Moved from Travis to Github Actions
|
||||
- Remove the linter for now, but will probably add it back later after having a look at the pull requests.
|
||||
### Fixed
|
||||
- Fixed bug #395 not using `perBrowserOptions`
|
||||
|
||||
## [0.22.0] - 2020-08-06
|
||||
### Changed
|
||||
- Updated dependencies to their latest versions
|
||||
- In particular the devDependencies `puppeteer` and `puppeteer-core` received a major upgrade (to version `5`)
|
||||
- `peerDependencies` was changed accordingly.
|
||||
|
||||
## [0.21.0] - 2020-05-24
|
||||
### Changed
|
||||
- Updated dependencies to their latest versions
|
||||
- In particular the devDependencies `puppeteer` and `puppeteer-core` received a major upgrade (to version `3`)
|
||||
- `peerDependencies` was changed to `^1.5.0 || ^2.0.0 || ^3.0.0` as there were no changes regarding the API that this library uses.
|
||||
- Removed support for Node.js version 8 as puppeteer [dropped support](https://github.com/puppeteer/puppeteer/releases/tag/v3.0.0) for it.
|
||||
- Added check to ensure `maxConcurrency` is set (#243)
|
||||
|
||||
## [0.20.0] - 2020-03-26
|
||||
### Added
|
||||
- Add a new option `perBrowserOptions` for using different args for each puppeteer instance
|
||||
- Allowing to use a different proxy for each worker
|
||||
### Changed
|
||||
- Updated dev dependencies to their latest versions
|
||||
|
||||
## [0.19.0] - 2020-02-16
|
||||
### Changed
|
||||
- Added the parameter `jobWillRetry` to the `'taskerror'` Event
|
||||
|
||||
## [0.18.0] - 2019-12-05
|
||||
### Changed
|
||||
- Updated dependencies to their latest versions
|
||||
- In particular the devDependencies `puppeteer` and `puppeteer-core` received a major upgrade (to version `2`)
|
||||
- `peerDependencies` was changed to `^1.5.0 || ^2.0.0` as there were no changes regarding the API that this library uses.
|
||||
- Removed support for Node.js version 6 as puppeteer [dropped support](https://github.com/puppeteer/puppeteer/pull/5045) for it.
|
||||
|
||||
## [0.17.0] - 2019-08-03
|
||||
### Changed
|
||||
- Updated dependencies to their latest versions
|
||||
### Fixed
|
||||
- Fixed bug making it impossible to queue `null` (issue #178)
|
||||
|
||||
## [0.16.0] - 2019-05-11
|
||||
### Changed
|
||||
- License changed to MIT
|
||||
- Updated dependencies to their latest versions
|
||||
|
||||
## [0.15.2] - 2019-03-09
|
||||
### Fixed
|
||||
- Fixed bug, which crashed the cluster in some environments (#113)
|
||||
|
||||
## [0.15.1] - 2019-03-06
|
||||
### Changed
|
||||
- Changed when the `queue` event is emitted (after the job is queued into the internal queue).
|
||||
|
||||
## [0.15.0] - 2019-03-06
|
||||
### Added
|
||||
- Event `queue` added
|
||||
|
||||
## [0.14.0] - 2019-02-28
|
||||
### Added
|
||||
- Support for generics via `Cluster<InType, OutType>`
|
||||
|
||||
## [0.13.2] - 2019-02-26
|
||||
### Fixed
|
||||
- `package-lock.json` file was not updated
|
||||
|
||||
## [0.13.1] - 2019-02-26
|
||||
### Fixed
|
||||
- Internal helper function was accidentally set to public
|
||||
|
||||
## [0.13.0] - 2019-02-25
|
||||
### Added
|
||||
- `Cluster.execute` function added
|
||||
- Examples for `Cluster.execute` added
|
||||
|
||||
## [0.12.1] - 2018-11-08
|
||||
### Fixed
|
||||
- Fixed support for custom concurrency implementations
|
||||
|
||||
## [0.12.0] - 2018-11-07
|
||||
### Added
|
||||
- Support for custom puppeteer libraries added
|
||||
- Support for custom concurrency implementations added
|
||||
### Changed
|
||||
- Updated dependencies to their latest versions
|
||||
|
||||
## [0.11.2] - 2018-09-07
|
||||
### Fixed
|
||||
- Fixed another sameDomainDelay bug leading to high CPU usage
|
||||
|
||||
## [0.11.1] - 2018-09-06
|
||||
### Fixed
|
||||
- Fixed `sameDomainDelay` bug (issue #11)
|
||||
|
||||
## [0.11.0] - 2018-09-05
|
||||
### Fixed
|
||||
- Fixed rarely happening bug (issue #3), which made browser not able to restart
|
||||
|
||||
## [0.10.0] - 2018-08-21
|
||||
### Added
|
||||
- Node.js support for version 6 and 7 added
|
||||
|
||||
## [0.9.1] - 2018-07-18
|
||||
### Fixed
|
||||
- Circular structures led to crashs in case of crawling errors.
|
||||
|
||||
## [0.9.0] - 2018-07-13
|
||||
### Changed
|
||||
- Cluster.task function signature changed from `Cluster.task(page, url)` to `Cluster.task({ page, data })`. `Cluster.queue` can be passed any data instead of a string or object.
|
||||
|
||||
## 0.8.1 - 2018-07-08
|
||||
### Fixed
|
||||
- The timeout-promise for a task was not canceled when a task threw an error.
|
||||
|
||||
## 0.8.0 - 2018-07-04
|
||||
### Added
|
||||
- Cluster can be used without defining a Cluster.task function by queuing only functions.
|
||||
|
||||
### Fixed
|
||||
- Page errors ("Page crashed!") were not caught so far
|
||||
|
||||
### Removed
|
||||
- maxCPU and maxMemory options removed as they made no sense (better to check how much chromium your machine can handle.)
|
||||
76
node_modules/puppeteer-cluster/CONTRIBUTING.md
generated
vendored
Normal file
76
node_modules/puppeteer-cluster/CONTRIBUTING.md
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
# Contributing
|
||||
|
||||
## Pull requests are welcome
|
||||
|
||||
Feel free to open a pull request. Just be aware that it might take some time for me to review your request.
|
||||
|
||||
#### How to make a good pull request
|
||||
- Make sure you do not break the API.
|
||||
- Make sure the tests succeed.
|
||||
- If you added functionality, add a test case if possible.
|
||||
- If you added a function or modified the API, change the documentation accordingly.
|
||||
- Don't change the `package.json` or `package-lock.json` files to bump the version number of this library.
|
||||
- Feel free to change the `CHANGELOG.md` to include your change. You don't need to add a date though. I'll do it, when I publish the next version.
|
||||
|
||||
Thank you very much for contributing :)
|
||||
|
||||
## Release Process
|
||||
|
||||
**There is no need to read the following guide if you want to make a pull request. This is basically my TODO list for publishing.**
|
||||
|
||||
Releases are automatically published to npm by Travis CI. To successfully create a release the following preconditions have to be met:
|
||||
- The commit has to be on the master.
|
||||
- The commit has to be tagged.
|
||||
- The build has to pass the tests.
|
||||
|
||||
To create a new release, follow these steps (my publishing TODO list).
|
||||
|
||||
#### Prepare Release
|
||||
1. Make sure all tests pass: `npm run test`
|
||||
2. Make sure [CHANGELOG.md](./CHANGELOG.md) contains the changes and the current date next to the version.
|
||||
|
||||
#### Release
|
||||
1. `npm version [patch|minor|major]`
|
||||
2. `git push --follow-tags`
|
||||
|
||||
After 2-30min, a new version should be published on npm. To check which files are being published, check Travis log or [unpkg.com](https://unpkg.com/puppeteer-cluster/).
|
||||
|
||||
#### Failed release
|
||||
Sometimes a test fails on Travis and a new version is not published. In that case do the following:
|
||||
|
||||
1. Delete the tag from the local repository
|
||||
- `git tag -d v0.X.X`
|
||||
2. Delete the tag from the remote repository
|
||||
- `git push --delete origin v0.X.X`
|
||||
3. Fix the problem
|
||||
4. Commit and push the (untagged) changes and make sure the tests succeed.
|
||||
5. Manually tag the commit which fixes the problem
|
||||
- `git tag v0.X.X`
|
||||
6. Push the tag
|
||||
- `git push --tags`
|
||||
|
||||
Now Travis will run again, hopefully succeeding this time.
|
||||
|
||||
## Dependency Upgrades
|
||||
|
||||
The repository has [dependabot](https://github.com/dependabot) enabled. At the start of each month, the bot will open one pull request for each dependency which needs to be upgraded. Most dependencies are uncritical and used for testing. If the tests succeed they can be merged.
|
||||
|
||||
### Manually upgrading dependencies
|
||||
|
||||
If there are a lot of dependencies with updates available it might be easier to upgrade them all at once manually:
|
||||
|
||||
1. Check which dependencies are outdated (this will return nothing if all dependencies are up-to-date)
|
||||
- `npm outdated`
|
||||
2. Update all dependencies to their latest version (according to `package.json`). This should do all minor updates.
|
||||
- `npm update`
|
||||
3. In case there are any vulnerabilities (`found ... high severity vulnerabilities`), fix them so that people installing this library, don't get warnings.
|
||||
- `npm audit fix`
|
||||
4. Test if everything went okay, all tests should pass.
|
||||
- `npm run test`
|
||||
5. Check again if there are any outdated dependencies. This might be major upgrades which are not performed automatically.
|
||||
- `npm outdated`
|
||||
6. Manually decide for each package if it should be upgraded and upgrade them:
|
||||
- `npm install PACKAGE_NAME@latest --save`
|
||||
7. Rerun tests.
|
||||
- `npm run test`
|
||||
8. If everything worked, the changes can be pushed, probably followed by a new release.
|
||||
21
node_modules/puppeteer-cluster/LICENSE
generated
vendored
Normal file
21
node_modules/puppeteer-cluster/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 Thomas Dondorf
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
252
node_modules/puppeteer-cluster/README.md
generated
vendored
Normal file
252
node_modules/puppeteer-cluster/README.md
generated
vendored
Normal file
@ -0,0 +1,252 @@
|
||||
# Puppeteer Cluster
|
||||
|
||||
[](https://github.com/thomasdondorf/puppeteer-cluster/actions/workflows/actions.yml)
|
||||
[](https://www.npmjs.com/package/puppeteer-cluster)
|
||||
[](https://www.npmjs.com/package/puppeteer-cluster)
|
||||
[](https://coveralls.io/github/thomasdondorf/puppeteer-cluster?branch=master)
|
||||
[](https://snyk.io/test/github/thomasdondorf/puppeteer-cluster)
|
||||
[](#license)
|
||||
|
||||
Create a cluster of puppeteer workers. This library spawns a pool of Chromium instances via [Puppeteer] and helps to keep track of jobs and errors. This is helpful if you want to crawl multiple pages or run tests in parallel. Puppeteer Cluster takes care of reusing Chromium and restarting the browser in case of errors.
|
||||
|
||||
- [Installation](#installation)
|
||||
- [Usage](#usage)
|
||||
- [Examples](#examples)
|
||||
- [Concurrency implementations](#concurrency-implementations)
|
||||
- [Typings for input/output (via TypeScript Generics)](#typings-for-inputoutput-via-typescript-generics)
|
||||
- [Debugging](#debugging)
|
||||
- [API](#api)
|
||||
- [License](#license)
|
||||
|
||||
###### What does this library do?
|
||||
|
||||
* Handling of crawling errors
|
||||
* Auto restarts the browser in case of a crash
|
||||
* Can automatically retry if a job fails
|
||||
* Different concurrency models to choose from (pages, contexts, browsers)
|
||||
* Simple to use, small boilerplate
|
||||
* Progress view and monitoring statistics (see below)
|
||||
|
||||
<p align="center">
|
||||
<img src="https://i.imgur.com/koGNkBN.gif" height="250">
|
||||
</p>
|
||||
|
||||
## Installation
|
||||
|
||||
Install using your favorite package manager:
|
||||
|
||||
```sh
|
||||
npm install --save puppeteer # in case you don't already have it installed
|
||||
npm install --save puppeteer-cluster
|
||||
```
|
||||
|
||||
Alternatively, use `yarn`:
|
||||
```sh
|
||||
yarn add puppeteer puppeteer-cluster
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
The following is a typical example of using puppeteer-cluster. A cluster is created with 2 concurrent workers. Then a task is defined which includes going to the URL and taking a screenshot. We then queue two jobs and wait for the cluster to finish.
|
||||
|
||||
```js
|
||||
const { Cluster } = require('puppeteer-cluster');
|
||||
|
||||
(async () => {
|
||||
const cluster = await Cluster.launch({
|
||||
concurrency: Cluster.CONCURRENCY_CONTEXT,
|
||||
maxConcurrency: 2,
|
||||
});
|
||||
|
||||
await cluster.task(async ({ page, data: url }) => {
|
||||
await page.goto(url);
|
||||
const screen = await page.screenshot();
|
||||
// Store screenshot, do something else
|
||||
});
|
||||
|
||||
cluster.queue('http://www.google.com/');
|
||||
cluster.queue('http://www.wikipedia.org/');
|
||||
// many more pages
|
||||
|
||||
await cluster.idle();
|
||||
await cluster.close();
|
||||
})();
|
||||
```
|
||||
|
||||
## Examples
|
||||
* [Simple example](examples/minimal.js)
|
||||
* [Wait for a task to be executed](examples/execute.js)
|
||||
* [Minimal screenshot server with express](examples/express-screenshot.js)
|
||||
* [Deep crawling the Google search results](examples/deep-google-crawler.js)
|
||||
* [Crawling the Alexa Top 1 Million](examples/alexa-1m.js)
|
||||
* [Queuing functions (simple)](examples/function-queuing-simple.js)
|
||||
* [Queuing functions (complex)](examples/function-queuing-complex.js)
|
||||
* [Error handling](examples/error-handling.js)
|
||||
* [Using a different puppeteer library (like puppeteer-core or puppeteer-firefox)](examples/different-puppeteer-library.js)
|
||||
* [Provide types for input/output with TypeScript generics](examples/typings.ts)
|
||||
|
||||
## Concurrency implementations
|
||||
|
||||
There are different concurrency models, which define how isolated each job is run. You can set it in the `options` when calling [Cluster.launch](#Clusterlaunchoptions). The default option is `Cluster.CONCURRENCY_CONTEXT`, but it is recommended to always specify which one you want to use.
|
||||
|
||||
| Concurrency | Description | Shared data |
|
||||
| --- | --- | --- |
|
||||
| `CONCURRENCY_PAGE` | One [Page] for each URL | Shares everything (cookies, localStorage, etc.) between jobs. |
|
||||
| `CONCURRENCY_CONTEXT` | Incognito page (see [BrowserContext](https://github.com/puppeteer/puppeteer/blob/main/docs/api/puppeteer.browser.createbrowsercontext.md#browsercreatebrowsercontext-method)) for each URL | No shared data. |
|
||||
| `CONCURRENCY_BROWSER` | One browser (using an incognito page) per URL. If one browser instance crashes for any reason, this will not affect other jobs. | No shared data. |
|
||||
| Custom concurrency (**experimental**) | You can create your own concurrency implementation. Copy one of the files of the `concurrency/built-in` directory and implement `ConcurrencyImplementation`. Then provide the class to the option `concurrency`. **This part of the library is currently experimental and might break in the future, even in a minor version upgrade while the version has not reached 1.0.** | Depends on your implementation |
|
||||
|
||||
## Typings for input/output (via TypeScript Generics)
|
||||
|
||||
To allow proper type checks with TypeScript you can provide generics. In case no types are provided, `any` is assumed for input and output. See the following minimal example or check out the more complex [typings example](examples/typings.ts) for more information.
|
||||
|
||||
```ts
|
||||
const cluster: Cluster<string, number> = await Cluster.launch(/* ... */);
|
||||
|
||||
await cluster.task(async ({ page, data }) => {
|
||||
// TypeScript knows that data is a string and expects this function to return a number
|
||||
return 123;
|
||||
});
|
||||
|
||||
// Typescript expects a string as argument ...
|
||||
cluster.queue('http://...');
|
||||
|
||||
// ... and will return a number when execute is called.
|
||||
const result = await cluster.execute('https://www.google.com');
|
||||
```
|
||||
|
||||
|
||||
## Debugging
|
||||
|
||||
Try to checkout the [puppeteer debugging tips](https://github.com/GoogleChrome/puppeteer#debugging-tips) first. Your problem might not be related to `puppeteer-cluster`, but `puppteer` itself. Additionally, you can enable verbose logging to see which data is consumed by which worker and some other cluster information. Set the DEBUG environment variable to `puppeteer-cluster:*`. See an example below or checkout the [debug docs](https://github.com/visionmedia/debug#windows-command-prompt-notes) for more information.
|
||||
|
||||
```bash
|
||||
# Linux
|
||||
DEBUG='puppeteer-cluster:*' node examples/minimal
|
||||
# Windows Powershell
|
||||
$env:DEBUG='puppeteer-cluster:*';node examples/minimal
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
- [class: Cluster](#class-cluster)
|
||||
* [Cluster.launch(options)](#clusterlaunchoptions)
|
||||
* [cluster.task(taskFunction)](#clustertasktaskfunction)
|
||||
* [cluster.queue([data] [, taskFunction])](#clusterqueuedata--taskfunction)
|
||||
* [cluster.execute([data] [, taskFunction])](#clusterexecutedata--taskfunction)
|
||||
* [cluster.idle()](#clusteridle)
|
||||
* [cluster.close()](#clusterclose)
|
||||
|
||||
### class: Cluster
|
||||
|
||||
Cluster module provides a method to launch a cluster of Chromium instances.
|
||||
|
||||
#### event: 'taskerror'
|
||||
- <[Error]>
|
||||
- <[string]|[Object]>
|
||||
- <[boolean]>
|
||||
|
||||
Emitted when a queued task ends in an error for some reason. Reasons might be a network error, your code throwing an error, timeout hit, etc. The first argument will the error itself. The second argument is the URL or data of the job (as given to [Cluster.queue]). If retryLimit is set to a value greater than `0`, the cluster will automatically requeue the job and retry it again later. The third argument is a boolean which indicates whether this task will be retried.
|
||||
In case the task was queued via [Cluster.execute] there will be no event fired.
|
||||
|
||||
```js
|
||||
cluster.on('taskerror', (err, data, willRetry) => {
|
||||
if (willRetry) {
|
||||
console.warn(`Encountered an error while crawling ${data}. ${err.message}\nThis job will be retried`);
|
||||
} else {
|
||||
console.error(`Failed to crawl ${data}: ${err.message}`);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
#### event: 'queue'
|
||||
- <\?[Object]>
|
||||
- <\?[function]>
|
||||
|
||||
Emitted when a task is queued via [Cluster.queue] or [Cluster.execute]. The first argument is the object containing the data (if any data is provided). The second argument is the queued function (if any). In case only a function is provided via [Cluster.queue] or [Cluster.execute], the first argument will be undefined. If only data is provided, the second argument will be undefined.
|
||||
|
||||
#### Cluster.launch(options)
|
||||
- `options` <[Object]> Set of configurable options for the cluster. Can have the following fields:
|
||||
- `concurrency` <*Cluster.CONCURRENCY_PAGE*|*Cluster.CONCURRENCY_CONTEXT*|*Cluster.CONCURRENCY_BROWSER*|ConcurrencyImplementation> The chosen concurrency model. See [Concurreny models](#concurreny-models) for more information. Defaults to `Cluster.CONCURRENCY_CONTEXT`. Alternatively you can provide a class implementing `ConcurrencyImplementation`.
|
||||
- `maxConcurrency` <[number]> Maximal number of parallel workers. Defaults to `1`.
|
||||
- `puppeteerOptions` <[Object]> Object passed to [puppeteer.launch]. See puppeteer documentation for more information. Defaults to `{}`.
|
||||
- `perBrowserOptions` <[Array]<[Object]>> Object passed to [puppeteer.launch] for each individual browser. If set, `puppeteerOptions` will be ignored. Defaults to `undefined` (meaning that `puppeteerOptions` will be used).
|
||||
- `retryLimit` <[number]> How often do you want to retry a job before marking it as failed. Ignored by tasks queued via [Cluster.execute]. Defaults to `0`.
|
||||
- `retryDelay` <[number]> How much time should pass at minimum between the job execution and its retry. Ignored by tasks queued via [Cluster.execute]. Defaults to `0`.
|
||||
- `sameDomainDelay` <[number]> How much time should pass at minimum between two requests to the same domain. If you use this field, the queued `data` must be your URL or `data` must be an object containing a field called `url`.
|
||||
- `skipDuplicateUrls` <[boolean]> If set to `true`, will skip URLs which were already crawled by the cluster. Defaults to `false`. If you use this field, the queued `data` must be your URL or `data` must be an object containing a field called `url`.
|
||||
- `timeout` <[number]> Specify a timeout for all tasks. Defaults to `30000` (30 seconds).
|
||||
- `monitor` <[boolean]> If set to `true`, will provide a small command line output to provide information about the crawling process. Defaults to `false`.
|
||||
- `workerCreationDelay` <[number]> Time between creation of two workers. Set this to a value like `100` (0.1 seconds) in case you want some time to pass before another worker is created. You can use this to prevent a network peak right at the start. Defaults to `0` (no delay).
|
||||
- `puppeteer` <[Object]> In case you want to use a different puppeteer library (like [puppeteer-core](https://github.com/GoogleChrome/puppeteer/blob/master/docs/api.md#puppeteer-vs-puppeteer-core) or [puppeteer-extra](https://github.com/berstend/puppeteer-extra)), pass the object here. If not set, will default to using puppeteer. When using `puppeteer-core`, make sure to also provide `puppeteerOptions.executablePath`.
|
||||
- returns: <[Promise]<[Cluster]>>
|
||||
|
||||
The method launches a cluster instance.
|
||||
|
||||
#### cluster.task(taskFunction)
|
||||
- `taskFunction` <[function]([string]|[Object], [Page], [Object])> Sets the function, which will be called for each job. The function will be called with an object having the following fields:
|
||||
- `page` <[Page]> The page given by puppeteer, which provides methods to interact with a single tab in Chromium.
|
||||
- `data` <any> The data of the job you provided to [Cluster.queue].
|
||||
- `worker` <[Object]> An object containing information about the worker executing the current job.
|
||||
- `id` <[number]> ID of the worker. Worker IDs start at 0.
|
||||
- returns: <[Promise]>
|
||||
|
||||
Specifies a task for the cluster. A task is called for each job you queue via [Cluster.queue]. Alternatively you can directly queue the function that you want to be executed. See [Cluster.queue] for an example.
|
||||
|
||||
#### cluster.queue([data] [, taskFunction])
|
||||
- `data` <any> Data to be queued. This might be your URL (a string) or a more complex object containing data. The data given will be provided to your task function(s). See [examples] for a more complex usage of this argument.
|
||||
- `taskFunction` <[function]> Function like the one given to [Cluster.task]. If a function is provided, this function will be called (only for this job) instead of the function provided to [Cluster.task]. The function will be called with an object having the following fields:
|
||||
- `page` <[Page]> The page given by puppeteer, which provides methods to interact with a single tab in Chromium.
|
||||
- `data` <any> The data of the job you provided as first argument to [Cluster.queue]. This might be `undefined` in case you only specified a function.
|
||||
- `worker` <[Object]> An object containing information about the worker executing the current job.
|
||||
- `id` <[number]> ID of the worker. Worker IDs start at 0.
|
||||
- returns: <[Promise]>
|
||||
|
||||
Puts a URL or data into the queue. Alternatively (or even additionally) you can queue functions. See the examples about function queuing for more information: ([Simple function queuing](examples/function-queuing-simple.js), [complex function queuing](examples/function-queuing-complex.js)).
|
||||
|
||||
Be aware that this function only returns a Promise for backward compatibility reasons. This function does not run asynchronously and will immediately return.
|
||||
|
||||
#### cluster.execute([data] [, taskFunction])
|
||||
- `data` <any> Data to be queued. This might be your URL (a string) or a more complex object containing data. The data given will be provided to your task function(s). See [examples] for a more complex usage of this argument.
|
||||
- `taskFunction` <[function]> Function like the one given to [Cluster.task]. If a function is provided, this function will be called (only for this job) instead of the function provided to [Cluster.task]. The function will be called with an object having the following fields:
|
||||
- `page` <[Page]> The page given by puppeteer, which provides methods to interact with a single tab in Chromium.
|
||||
- `data` <any> The data of the job you provided as first argument to [Cluster.queue]. This might be `undefined` in case you only specified a function.
|
||||
- `worker` <[Object]> An object containing information about the worker executing the current job.
|
||||
- `id` <[number]> ID of the worker. Worker IDs start at 0.
|
||||
- returns: <[Promise]>
|
||||
|
||||
Works like [Cluster.queue], but this function returns a Promise which will be resolved after the task is executed. That means, that the job is still queued, but the script will wait for it to be finished. In case an error happens during the execution, this function will reject the Promise with the thrown error. There will be no "taskerror" event fired. In addition, tasks queued via execute will ignore "retryLimit" and "retryDelay". For an example see the [Execute example](examples/execute.js).
|
||||
|
||||
#### cluster.idle()
|
||||
- returns: <[Promise]>
|
||||
|
||||
Promise is resolved when the queue becomes empty.
|
||||
|
||||
#### cluster.close()
|
||||
- returns: <[Promise]>
|
||||
|
||||
Closes the cluster and all opened Chromium instances including all open pages (if any were opened). It is recommended to run [Cluster.idle](#clusteridle) before calling this function. The [Cluster] object itself is considered to be disposed and cannot be used anymore.
|
||||
|
||||
## License
|
||||
|
||||
[MIT license](./LICENSE).
|
||||
|
||||
|
||||
|
||||
[Cluster.queue]: #clusterqueuedata--taskfunction "Cluster.queue"
|
||||
[Cluster.execute]: #clusterexecutedata--taskfunction "Cluster.execute"
|
||||
[Cluster.task]: #clustertasktaskfunction "Cluster.task"
|
||||
[Cluster]: #class-cluster "Cluster"
|
||||
|
||||
[Puppeteer]: https://github.com/GoogleChrome/puppeteer "Puppeteer"
|
||||
[Page]: https://github.com/GoogleChrome/puppeteer/blob/v1.5.0/docs/api.md#class-page "Page"
|
||||
[puppeteer.launch]: https://github.com/GoogleChrome/puppeteer/blob/v1.5.0/docs/api.md#puppeteerlaunchoptions "puppeteer.launch"
|
||||
|
||||
[function]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function "Function"
|
||||
[string]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type "String"
|
||||
[number]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type "Number"
|
||||
[Promise]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise "Promise"
|
||||
[boolean]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type "Boolean"
|
||||
[Object]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object "Object"
|
||||
[Error]: https://nodejs.org/api/errors.html#errors_class_error "Error"
|
||||
[Array]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array "Array"
|
||||
79
node_modules/puppeteer-cluster/dist/Cluster.d.ts
generated
vendored
Normal file
79
node_modules/puppeteer-cluster/dist/Cluster.d.ts
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
/// <reference types="node" />
|
||||
import type { Page, PuppeteerNodeLaunchOptions } from 'puppeteer';
|
||||
import { EventEmitter } from 'events';
|
||||
import { ConcurrencyImplementationClassType } from './concurrency/ConcurrencyImplementation';
|
||||
interface ClusterOptions {
|
||||
concurrency: number | ConcurrencyImplementationClassType;
|
||||
maxConcurrency: number;
|
||||
workerCreationDelay: number;
|
||||
puppeteerOptions: PuppeteerNodeLaunchOptions;
|
||||
perBrowserOptions: PuppeteerNodeLaunchOptions[] | undefined;
|
||||
monitor: boolean;
|
||||
timeout: number;
|
||||
retryLimit: number;
|
||||
retryDelay: number;
|
||||
skipDuplicateUrls: boolean;
|
||||
sameDomainDelay: number;
|
||||
puppeteer: any;
|
||||
}
|
||||
type Partial<T> = {
|
||||
[P in keyof T]?: T[P];
|
||||
};
|
||||
type ClusterOptionsArgument = Partial<ClusterOptions>;
|
||||
interface TaskFunctionArguments<JobData> {
|
||||
page: Page;
|
||||
data: JobData;
|
||||
worker: {
|
||||
id: number;
|
||||
};
|
||||
}
|
||||
export type TaskFunction<JobData, ReturnData> = (arg: TaskFunctionArguments<JobData>) => Promise<ReturnData>;
|
||||
export default class Cluster<JobData = any, ReturnData = any> extends EventEmitter {
|
||||
static CONCURRENCY_PAGE: number;
|
||||
static CONCURRENCY_CONTEXT: number;
|
||||
static CONCURRENCY_BROWSER: number;
|
||||
private options;
|
||||
private perBrowserOptions;
|
||||
private workers;
|
||||
private workersAvail;
|
||||
private workersBusy;
|
||||
private workersStarting;
|
||||
private allTargetCount;
|
||||
private jobQueue;
|
||||
private errorCount;
|
||||
private taskFunction;
|
||||
private idleResolvers;
|
||||
private waitForOneResolvers;
|
||||
private browser;
|
||||
private isClosed;
|
||||
private startTime;
|
||||
private nextWorkerId;
|
||||
private monitoringInterval;
|
||||
private display;
|
||||
private duplicateCheckUrls;
|
||||
private lastDomainAccesses;
|
||||
private systemMonitor;
|
||||
private checkForWorkInterval;
|
||||
static launch(options: ClusterOptionsArgument): Promise<Cluster<any, any>>;
|
||||
private constructor();
|
||||
private init;
|
||||
private launchWorker;
|
||||
task(taskFunction: TaskFunction<JobData, ReturnData>): Promise<void>;
|
||||
private nextWorkCall;
|
||||
private workCallTimeout;
|
||||
private work;
|
||||
private doWork;
|
||||
private lastLaunchedWorkerTime;
|
||||
private allowedToStartWorker;
|
||||
private isTaskFunction;
|
||||
private queueJob;
|
||||
queue(data: JobData, taskFunction?: TaskFunction<JobData, ReturnData>): Promise<void>;
|
||||
queue(taskFunction: TaskFunction<JobData, ReturnData>): Promise<void>;
|
||||
execute(data: JobData, taskFunction?: TaskFunction<JobData, ReturnData>): Promise<ReturnData>;
|
||||
execute(taskFunction: TaskFunction<JobData, ReturnData>): Promise<ReturnData>;
|
||||
idle(): Promise<void>;
|
||||
waitForOne(): Promise<JobData>;
|
||||
close(): Promise<void>;
|
||||
private monitor;
|
||||
}
|
||||
export {};
|
||||
416
node_modules/puppeteer-cluster/dist/Cluster.js
generated
vendored
Normal file
416
node_modules/puppeteer-cluster/dist/Cluster.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
node_modules/puppeteer-cluster/dist/Display.d.ts
generated
vendored
Normal file
7
node_modules/puppeteer-cluster/dist/Display.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
export default class Display {
|
||||
private lastLinesCount;
|
||||
private linesCount;
|
||||
log(str: string): Promise<void>;
|
||||
resetCursor(): Promise<void>;
|
||||
close(): void;
|
||||
}
|
||||
46
node_modules/puppeteer-cluster/dist/Display.js
generated
vendored
Normal file
46
node_modules/puppeteer-cluster/dist/Display.js
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const CLEAR_LINE = '\x1B[K';
|
||||
class Display {
|
||||
constructor() {
|
||||
this.lastLinesCount = 0;
|
||||
this.linesCount = 0;
|
||||
}
|
||||
log(str) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// We create an empty line at the start so that any console.log calls
|
||||
// from within the script are above our output.
|
||||
if (this.linesCount === 0) {
|
||||
console.log(CLEAR_LINE); // erases the current line
|
||||
this.linesCount += 1;
|
||||
}
|
||||
// Strip lines that are too long
|
||||
const strToLog = str.substr(0, 78);
|
||||
console.log(`${CLEAR_LINE}${strToLog}`);
|
||||
this.linesCount += 1;
|
||||
});
|
||||
}
|
||||
resetCursor() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// move cursor up to draw over out output
|
||||
process.stdout.write(`\x1B[${this.linesCount}A`);
|
||||
this.lastLinesCount = this.linesCount;
|
||||
this.linesCount = 0;
|
||||
});
|
||||
}
|
||||
close() {
|
||||
// move cursor down so that console output stays
|
||||
process.stdout.write(`\x1B[${this.lastLinesCount}B`);
|
||||
}
|
||||
}
|
||||
exports.default = Display;
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiRGlzcGxheS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9EaXNwbGF5LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7O0FBQ0EsTUFBTSxVQUFVLEdBQUcsUUFBUSxDQUFDO0FBRTVCLE1BQXFCLE9BQU87SUFBNUI7UUFFWSxtQkFBYyxHQUFXLENBQUMsQ0FBQztRQUMzQixlQUFVLEdBQVcsQ0FBQyxDQUFDO0lBNEJuQyxDQUFDO0lBMUJnQixHQUFHLENBQUMsR0FBVzs7WUFDeEIscUVBQXFFO1lBQ3JFLCtDQUErQztZQUMvQyxJQUFJLElBQUksQ0FBQyxVQUFVLEtBQUssQ0FBQyxFQUFFLENBQUM7Z0JBQ3hCLE9BQU8sQ0FBQyxHQUFHLENBQUMsVUFBVSxDQUFDLENBQUMsQ0FBQywwQkFBMEI7Z0JBQ25ELElBQUksQ0FBQyxVQUFVLElBQUksQ0FBQyxDQUFDO1lBQ3pCLENBQUM7WUFFRCxnQ0FBZ0M7WUFDaEMsTUFBTSxRQUFRLEdBQUcsR0FBRyxDQUFDLE1BQU0sQ0FBQyxDQUFDLEVBQUUsRUFBRSxDQUFDLENBQUM7WUFDbkMsT0FBTyxDQUFDLEdBQUcsQ0FBQyxHQUFHLFVBQVUsR0FBRyxRQUFRLEVBQUUsQ0FBQyxDQUFDO1lBQ3hDLElBQUksQ0FBQyxVQUFVLElBQUksQ0FBQyxDQUFDO1FBQ3pCLENBQUM7S0FBQTtJQUVZLFdBQVc7O1lBQ3BCLHlDQUF5QztZQUN6QyxPQUFPLENBQUMsTUFBTSxDQUFDLEtBQUssQ0FBQyxRQUFRLElBQUksQ0FBQyxVQUFVLEdBQUcsQ0FBQyxDQUFDO1lBQ2pELElBQUksQ0FBQyxjQUFjLEdBQUcsSUFBSSxDQUFDLFVBQVUsQ0FBQztZQUN0QyxJQUFJLENBQUMsVUFBVSxHQUFHLENBQUMsQ0FBQztRQUN4QixDQUFDO0tBQUE7SUFFTSxLQUFLO1FBQ1IsZ0RBQWdEO1FBQ2hELE9BQU8sQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUFDLFFBQVEsSUFBSSxDQUFDLGNBQWMsR0FBRyxDQUFDLENBQUM7SUFDekQsQ0FBQztDQUVKO0FBL0JELDBCQStCQyJ9
|
||||
18
node_modules/puppeteer-cluster/dist/Job.d.ts
generated
vendored
Normal file
18
node_modules/puppeteer-cluster/dist/Job.d.ts
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
import { TaskFunction } from './Cluster';
|
||||
export type ExecuteResolve = (value?: any) => void;
|
||||
export type ExecuteReject = (reason?: any) => void;
|
||||
export interface ExecuteCallbacks {
|
||||
resolve: (value?: any) => void;
|
||||
reject: ExecuteReject;
|
||||
}
|
||||
export default class Job<JobData, ReturnData> {
|
||||
data?: JobData;
|
||||
taskFunction: TaskFunction<JobData, ReturnData> | undefined;
|
||||
executeCallbacks: ExecuteCallbacks | undefined;
|
||||
private lastError;
|
||||
tries: number;
|
||||
constructor(data?: JobData, taskFunction?: TaskFunction<JobData, ReturnData>, executeCallbacks?: ExecuteCallbacks);
|
||||
getUrl(): string | undefined;
|
||||
getDomain(): string | undefined;
|
||||
addError(error: Error): void;
|
||||
}
|
||||
46
node_modules/puppeteer-cluster/dist/Job.js
generated
vendored
Normal file
46
node_modules/puppeteer-cluster/dist/Job.js
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const url_1 = require("url");
|
||||
class Job {
|
||||
constructor(data, taskFunction, executeCallbacks) {
|
||||
this.lastError = null;
|
||||
this.tries = 0;
|
||||
this.data = data;
|
||||
this.taskFunction = taskFunction;
|
||||
this.executeCallbacks = executeCallbacks;
|
||||
}
|
||||
getUrl() {
|
||||
if (!this.data) {
|
||||
return undefined;
|
||||
}
|
||||
if (typeof this.data === 'string') {
|
||||
return this.data;
|
||||
}
|
||||
if (typeof this.data.url === 'string') {
|
||||
return this.data.url;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
getDomain() {
|
||||
// TODO use tld.js to restrict to top-level domain?
|
||||
const urlStr = this.getUrl();
|
||||
if (urlStr) {
|
||||
try {
|
||||
const url = new url_1.URL(urlStr);
|
||||
return url.hostname || undefined;
|
||||
}
|
||||
catch (e) {
|
||||
// if urlStr is not a valid URL this might throw
|
||||
// but we leave this to the user
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
addError(error) {
|
||||
this.tries += 1;
|
||||
this.lastError = error;
|
||||
}
|
||||
}
|
||||
exports.default = Job;
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiSm9iLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL0pvYi50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOztBQUNBLDZCQUEwQjtBQVUxQixNQUFxQixHQUFHO0lBU3BCLFlBQ0ksSUFBYyxFQUNkLFlBQWdELEVBQ2hELGdCQUFtQztRQU4vQixjQUFTLEdBQWlCLElBQUksQ0FBQztRQUNoQyxVQUFLLEdBQVcsQ0FBQyxDQUFDO1FBT3JCLElBQUksQ0FBQyxJQUFJLEdBQUcsSUFBSSxDQUFDO1FBQ2pCLElBQUksQ0FBQyxZQUFZLEdBQUcsWUFBWSxDQUFDO1FBQ2pDLElBQUksQ0FBQyxnQkFBZ0IsR0FBRyxnQkFBZ0IsQ0FBQztJQUM3QyxDQUFDO0lBRU0sTUFBTTtRQUNULElBQUksQ0FBQyxJQUFJLENBQUMsSUFBSSxFQUFFLENBQUM7WUFDYixPQUFPLFNBQVMsQ0FBQztRQUNyQixDQUFDO1FBQ0QsSUFBSSxPQUFPLElBQUksQ0FBQyxJQUFJLEtBQUssUUFBUSxFQUFFLENBQUM7WUFDaEMsT0FBTyxJQUFJLENBQUMsSUFBSSxDQUFDO1FBQ3JCLENBQUM7UUFDRCxJQUFJLE9BQVEsSUFBSSxDQUFDLElBQVksQ0FBQyxHQUFHLEtBQUssUUFBUSxFQUFFLENBQUM7WUFDN0MsT0FBUSxJQUFJLENBQUMsSUFBWSxDQUFDLEdBQUcsQ0FBQztRQUNsQyxDQUFDO1FBQ0QsT0FBTyxTQUFTLENBQUM7SUFDckIsQ0FBQztJQUVNLFNBQVM7UUFDWixtREFBbUQ7UUFDbkQsTUFBTSxNQUFNLEdBQUcsSUFBSSxDQUFDLE1BQU0sRUFBRSxDQUFDO1FBQzdCLElBQUksTUFBTSxFQUFFLENBQUM7WUFDVCxJQUFJLENBQUM7Z0JBQ0QsTUFBTSxHQUFHLEdBQUcsSUFBSSxTQUFHLENBQUMsTUFBTSxDQUFDLENBQUM7Z0JBQzVCLE9BQU8sR0FBRyxDQUFDLFFBQVEsSUFBSSxTQUFTLENBQUM7WUFDckMsQ0FBQztZQUFDLE9BQU8sQ0FBTSxFQUFFLENBQUM7Z0JBQ2QsZ0RBQWdEO2dCQUNoRCxnQ0FBZ0M7Z0JBQ2hDLE9BQU8sU0FBUyxDQUFDO1lBQ3JCLENBQUM7UUFDTCxDQUFDO1FBQ0QsT0FBTyxTQUFTLENBQUM7SUFDckIsQ0FBQztJQUVNLFFBQVEsQ0FBQyxLQUFZO1FBQ3hCLElBQUksQ0FBQyxLQUFLLElBQUksQ0FBQyxDQUFDO1FBQ2hCLElBQUksQ0FBQyxTQUFTLEdBQUcsS0FBSyxDQUFDO0lBQzNCLENBQUM7Q0FFSjtBQXJERCxzQkFxREMifQ==
|
||||
11
node_modules/puppeteer-cluster/dist/Queue.d.ts
generated
vendored
Normal file
11
node_modules/puppeteer-cluster/dist/Queue.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
interface QueueOptions {
|
||||
delayUntil?: number;
|
||||
}
|
||||
export default class Queue<T> {
|
||||
private list;
|
||||
private delayedItems;
|
||||
size(): number;
|
||||
push(item: T, options?: QueueOptions): void;
|
||||
shift(): T | undefined;
|
||||
}
|
||||
export {};
|
||||
30
node_modules/puppeteer-cluster/dist/Queue.js
generated
vendored
Normal file
30
node_modules/puppeteer-cluster/dist/Queue.js
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
class Queue {
|
||||
constructor() {
|
||||
this.list = [];
|
||||
this.delayedItems = 0;
|
||||
}
|
||||
size() {
|
||||
return this.list.length + this.delayedItems;
|
||||
}
|
||||
push(item, options = {}) {
|
||||
if (options && options.delayUntil && options.delayUntil > Date.now()) {
|
||||
this.delayedItems += 1;
|
||||
setTimeout(() => {
|
||||
this.delayedItems -= 1;
|
||||
this.list.push(item);
|
||||
}, (options.delayUntil - Date.now()));
|
||||
}
|
||||
else {
|
||||
this.list.push(item);
|
||||
}
|
||||
}
|
||||
// Care, this function might actually return undefined even though size() returns a value > 0
|
||||
// Reason is, that there might be delayedItems (checkout QueueOptions.delayUntil)
|
||||
shift() {
|
||||
return this.list.shift();
|
||||
}
|
||||
}
|
||||
exports.default = Queue;
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiUXVldWUuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvUXVldWUudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7QUFNQSxNQUFxQixLQUFLO0lBQTFCO1FBRVksU0FBSSxHQUFRLEVBQUUsQ0FBQztRQUNmLGlCQUFZLEdBQVcsQ0FBQyxDQUFDO0lBMkJyQyxDQUFDO0lBekJVLElBQUk7UUFDUCxPQUFPLElBQUksQ0FBQyxJQUFJLENBQUMsTUFBTSxHQUFHLElBQUksQ0FBQyxZQUFZLENBQUM7SUFDaEQsQ0FBQztJQUVNLElBQUksQ0FBQyxJQUFPLEVBQUUsVUFBd0IsRUFBRTtRQUMzQyxJQUFJLE9BQU8sSUFBSSxPQUFPLENBQUMsVUFBVSxJQUFJLE9BQU8sQ0FBQyxVQUFVLEdBQUcsSUFBSSxDQUFDLEdBQUcsRUFBRSxFQUFFLENBQUM7WUFDbkUsSUFBSSxDQUFDLFlBQVksSUFBSSxDQUFDLENBQUM7WUFDdkIsVUFBVSxDQUNOLEdBQUcsRUFBRTtnQkFDRCxJQUFJLENBQUMsWUFBWSxJQUFJLENBQUMsQ0FBQztnQkFDdkIsSUFBSSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLENBQUM7WUFDekIsQ0FBQyxFQUNELENBQUMsT0FBTyxDQUFDLFVBQVUsR0FBRyxJQUFJLENBQUMsR0FBRyxFQUFFLENBQUMsQ0FDcEMsQ0FBQztRQUNOLENBQUM7YUFBTSxDQUFDO1lBQ0osSUFBSSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLENBQUM7UUFDekIsQ0FBQztJQUNMLENBQUM7SUFFRCw2RkFBNkY7SUFDN0YsaUZBQWlGO0lBQzFFLEtBQUs7UUFDUixPQUFPLElBQUksQ0FBQyxJQUFJLENBQUMsS0FBSyxFQUFFLENBQUM7SUFDN0IsQ0FBQztDQUVKO0FBOUJELHdCQThCQyJ9
|
||||
11
node_modules/puppeteer-cluster/dist/SystemMonitor.d.ts
generated
vendored
Normal file
11
node_modules/puppeteer-cluster/dist/SystemMonitor.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
export default class SystemMonitor {
|
||||
private cpuUsage;
|
||||
private memoryUsage;
|
||||
private loads;
|
||||
private interval;
|
||||
init(): Promise<void>;
|
||||
close(): void;
|
||||
private calcLoad;
|
||||
getCpuUsage(): number;
|
||||
getMemoryUsage(): number;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user