Best JavaScript code snippet using stryker-parent
test.js
Source:test.js
1const fs = require('fs');2const stream = require('stream');3const JSONStream = require('json-stream');4const aw = require('../lib/awaitify-stream.js');5const assert = require('assert');6const mainTestFile = 'test/txt/threeConstantLines.lftxt';7const expectedLines = ['foo\n', 'bar\n', 'baz\n'];8const constantLineLength = 4; // 3 letters and a newline character.9function delay(ms) {10 return new Promise((resolve) => {11 setTimeout(resolve, ms);12 })13}14// Do a second loop over all the tests to confirm that delays from asynchronous work don't affect reading or writing.15for (let readSlowly of [false, true]) {16 describe(`awaitify-stream (readSlowly: ${readSlowly})`, function() {17 describe(`just awaitify-stream`, function() {18 // Read constant-length lines without byline.19 async function readConstantLines(testFile) {20 let readStream = fs.createReadStream(testFile);21 readStream.setEncoding('utf8');22 let reader = aw.createReader(readStream);23 let line, lines = [];24 while (null !== (line = await reader.readAsync(constantLineLength))) {25 lines.push(line);26 if (readSlowly) {27 await delay(100);28 }29 }30 return lines;31 }32 it('should read test.js successfully', async function () {33 const testFile = 'test/test.js';34 let readStream = fs.createReadStream(testFile);35 let reader = aw.createReader(readStream);36 let readableListenerCount = readStream.listenerCount('readable');37 let endListenerCount = readStream.listenerCount('end');38 let errorListenerCount = readStream.listenerCount('error');39 let chunk, chunkCount = 0;40 while (null !== (chunk = await reader.readAsync())) {41 chunkCount++;42 if (readSlowly) {43 await delay(100);44 }45 }46 assert.notEqual(chunkCount, 0, 'test.js should be one chunk or more.');47 assert.equal(readStream.listenerCount('readable'), readableListenerCount);48 assert.equal(readStream.listenerCount('end'), endListenerCount);49 assert.equal(readStream.listenerCount('error'), errorListenerCount);50 });51 it('should propagate errors while reading', async function () {52 let readStream = new stream.Readable();53 readStream.read = function () {54 this.emit('error', new Error('dummy'));55 return null;56 };57 let reader = aw.createReader(readStream);58 try {59 await reader.readAsync();60 assert.fail('reader should throw sync read errors');61 } catch (ex) {62 assert.equal(ex.message, 'dummy');63 }64 });65 it('should propagate errors while waiting for read', async function () {66 let readStream = new stream.Readable();67 readStream.read = function () {68 process.nextTick(() => {69 this.emit('error', new Error('dummy'));70 });71 return null;72 };73 let reader = aw.createReader(readStream);74 try {75 await reader.readAsync();76 assert.fail('reader should throw async read errors');77 } catch (ex) {78 assert.equal(ex.message, 'dummy');79 }80 });81 it('should throw a given error only once', async function () {82 let readStream = JSONStream();83 let chunkCount = 0, errorCount = 0;84 let reader = aw.createReader(readStream);85 async function read() {86 let chunk, done;87 do {88 try {89 chunk = await reader.readAsync();90 if (chunk == null) {91 done = true;92 } else {93 chunkCount++;94 }95 } catch (ex) {96 errorCount++;97 assert.equal(errorCount, 1);98 assert.equal(ex.message, 'dummy');99 }100 if (readSlowly) {101 await delay(100);102 }103 } while (!done);104 }105 const readPromise = read();106 readStream.write('1\n');107 readStream.emit('error', new Error('dummy'));108 readStream.write('2\n');109 readStream.write('3\n');110 readStream.end();111 await readPromise;112 assert.equal(errorCount, 1, "One error should be caught.");113 assert.equal(chunkCount, 3, "Three chunks should be read.");114 });115 it('should read an empty file', async function() {116 const testFile = 'test/txt/empty.txt';117 let readStream = fs.createReadStream(testFile);118 let reader = aw.createReader(readStream);119 let chunk, chunkCount = 0;120 while (null !== (chunk = await reader.readAsync())) {121 chunkCount++;122 if (readSlowly) {123 await delay(100);124 }125 }126 assert.equal(chunkCount, 0, 'Empty file should have zero chunks.');127 });128 it('should read a one character file', async function () {129 const testFile = 'test/txt/oneChar.txt';130 let readStream = fs.createReadStream(testFile);131 let reader = aw.createReader(readStream);132 let chunk, chunkCount = 0;133 while (null !== (chunk = await reader.readAsync())) {134 chunkCount++;135 if (readSlowly) {136 await delay(100);137 }138 }139 assert.equal(chunkCount, 1, 'One character file should have one chunk.');140 });141 it('should read constant length lines', async function () {142 let lines = await readConstantLines(mainTestFile);143 assert.deepEqual(lines, expectedLines);144 });145 it('should write a file', async function () {146 const testFile = 'test/writeTest1.test_output';147 let writeStream = fs.createWriteStream(testFile);148 let writer = aw.createWriter(writeStream);149 let drainListenerCount = writeStream.listenerCount('drain');150 let errorListenerCount = writeStream.listenerCount('error');151 for (let i = 0; i < expectedLines.length; i++) {152 await writer.writeAsync(expectedLines[i]);153 if (readSlowly) {154 await delay(100);155 }156 }157 // Indicate that we're done, and wait for all the data to be flushed and the 'finish' event.158 await writer.endAsync();159 // If we waited for everything to flush, then we won't lose any data by calling close.160 writeStream.close();161 // Check the contents of the file we just wrote.162 let lines = await readConstantLines(testFile);163 assert.deepEqual(lines, expectedLines);164 assert.equal(writeStream.listenerCount('drain'), drainListenerCount);165 assert.equal(writeStream.listenerCount('error'), errorListenerCount);166 });167 it('should propagate errors while writing', async function () {168 let writeStream = new stream.Writable();169 writeStream.write = function () {170 this.emit('error', new Error('dummy'));171 return null;172 };173 let writer = aw.createWriter(writeStream);174 try {175 await writer.writeAsync('foobar');176 assert.fail('writer should throw sync write errors');177 } catch (ex) {178 assert.equal(ex.message, 'dummy');179 }180 });181 it('should propagate errors while waiting for write', async function () {182 let writeStream = new stream.Writable();183 writeStream.write = function () {184 return false;185 };186 let writer = aw.createWriter(writeStream);187 let drainListenerCount = writeStream.listenerCount('drain');188 let errorListenerCount = writeStream.listenerCount('error');189 try {190 await writer.writeAsync('foobar');191 process.nextTick(() => {192 writeStream.emit('error', new Error('dummy'));193 });194 await writer.writeAsync('foobar');195 assert.fail('writer should throw async writer errors');196 } catch (ex) {197 assert.equal(ex.message, 'dummy');198 }199 assert.equal(writeStream.listenerCount('drain'), drainListenerCount);200 assert.equal(writeStream.listenerCount('error'), errorListenerCount);201 });202 it('should propagate errors while ending', async function () {203 let writeStream = new stream.Writable();204 writeStream.end = function () {205 this.emit('error', new Error('dummy'));206 };207 let writer = aw.createWriter(writeStream);208 try {209 await writer.endAsync();210 assert.fail('writer should throw sync end errors');211 } catch (ex) {212 assert.equal(ex.message, 'dummy');213 }214 });215 it('should propagate errors while waiting for end', async function () {216 let writeStream = new stream.Writable();217 writeStream.end = function () {218 process.nextTick(() => {219 this.emit('error', new Error('dummy'));220 });221 };222 let writer = aw.createWriter(writeStream);223 let finishListenerCount = writeStream.listenerCount('finish');224 let errorListenerCount = writeStream.listenerCount('error');225 try {226 await writer.endAsync();227 assert.fail('writer should throw sync end errors');228 } catch (ex) {229 assert.equal(ex.message, 'dummy');230 }231 assert.equal(writeStream.listenerCount('finish'), finishListenerCount);232 assert.equal(writeStream.listenerCount('error'), errorListenerCount);233 });234 it('should augment streams with new functions', async function () {235 const testFile = 'test/writeTest2.test_output';236 let writeStream = aw.addAsyncFunctions(fs.createWriteStream(testFile));237 for (let i = 0; i < expectedLines.length; i++) {238 await writeStream.writeAsync(expectedLines[i]);239 if (readSlowly) {240 await delay(100);241 }242 }243 // Indicate that we're done, and wait for all the data to be flushed and the 'finish' event.244 await writeStream.endAsync();245 // If we waited for everything to flush, then we won't lose any data by calling close.246 writeStream.close();247 // Check the contents of the file we just wrote.248 let readStream = aw.addAsyncFunctions(fs.createReadStream(testFile));249 readStream.setEncoding('utf8');250 let line, lines = [];251 while (null !== (line = await readStream.readAsync(constantLineLength))) {252 lines.push(line);253 if (readSlowly) {254 await delay(100);255 }256 }257 assert.deepEqual(lines, expectedLines);258 });259 });260 describe('awaitify-stream in conjunction with byline', function() {261 const byline = require('byline');262 async function readLines(testFile) {263 let readStream = fs.createReadStream(testFile);264 readStream.setEncoding('utf8');265 let lineStream = byline.createStream(readStream, { keepEmptyLines: true });266 let reader = aw.createReader(lineStream);267 let line, lines = [];268 while (null !== (line = await reader.readAsync())) {269 lines.push(line);270 if (readSlowly) {271 await delay(100);272 }273 }274 return lines;275 }276 it('should read an empty file', async function () {277 let lines = await readLines('test/txt/empty.txt');278 assert.deepEqual(lines, [], 'Empty file should have zero lines.');279 });280 it('should read a one character file', async function () {281 let lines = await readLines('test/txt/oneChar.txt');282 assert.deepEqual(lines, ['a'], 'One character file should have one line with just "a".');283 });284 it('should read a newline file', async function () {285 let lines = await readLines('test/txt/newline.txt');286 assert.deepEqual(lines, ['', '']);287 });288 it('should read a one line file', async function () {289 let lines = await readLines('test/txt/oneChar_and_newline.txt');290 assert.deepEqual(lines, ['a', '']);291 });292 it('should read a four-line file', async function () {293 let lines = await readLines(mainTestFile);294 assert.deepEqual(lines, ['foo', 'bar', 'baz', '']);295 });296 it('should read a four-line file without a trailing EOL', async function() {297 let lines = await readLines('test/txt/threeConstantLines_no_eol.lftxt');298 assert.deepEqual(lines, ['foo', 'bar', 'baz']);299 });300 it('should read blank lines', async function () {301 let lines = await readLines('test/txt/lines_and_blanks.txt');302 assert.deepEqual(lines, ['foo', 'bar', 'baz', '', 'qaz', '', 'fin', '']);303 });304 it('should read test.js successfully', async function () {305 let lines = await readLines('test/test.js');306 let matchingLines = lines.filter((line) => {307 return line.indexOf('should read test.js successfully') > -1;308 })309 assert(lines.length > 100, 'There are at least 100 lines in this test file.');310 assert.equal(matchingLines.length, 4,311 'There should be four lines that say "should read test.js successfully"');312 });313 });314 });...
readLines.js
Source:readLines.js
1import { module, test } from "qunit";2import sinon from "sinon";3import { EventEmitter } from "events";4import readLinesInjector from "inject-loader?fs!utils/node/fs/readLines";5module( "utils/node/fs/readLines", {6 beforeEach() {7 this.fakeTimer = sinon.useFakeTimers({8 toFake: [ "setTimeout", "clearTimeout" ],9 target: window10 });11 },12 afterEach() {13 this.fakeTimer.restore();14 }15});16test( "Invalid files", async function( assert ) {17 const error = new Error( "File not found" );18 const closeSpy = sinon.spy();19 const readStream = new EventEmitter();20 readStream.close = closeSpy;21 const createReadStreamStub = sinon.stub().returns( readStream );22 const { default: readLines } = readLinesInjector({23 fs: {24 createReadStream: createReadStreamStub25 }26 });27 await assert.rejects(28 async () => {29 const promise = readLines( "foo" );30 readStream.emit( "error", error );31 await promise;32 },33 error,34 "Rejects if file can't be read"35 );36 assert.ok( createReadStreamStub.calledWithExactly( "foo" ), "Reads the correct file" );37 assert.ok( closeSpy.calledOnce, "Calls readStream.close" );38});39test( "Timeout", async function( assert ) {40 const closeSpy = sinon.spy();41 const readStream = new EventEmitter();42 readStream.close = closeSpy;43 const createReadStreamStub = sinon.stub().returns( readStream );44 const { default: readLines } = readLinesInjector({45 fs: {46 createReadStream: createReadStreamStub47 }48 });49 await assert.rejects(50 async () => {51 const promise = readLines( "foo", null, 1, 1000 );52 this.fakeTimer.tick( 1000 );53 await promise;54 },55 new Error( "Timeout" ),56 "Rejects if read time has expired"57 );58 assert.ok( createReadStreamStub.calledWithExactly( "foo" ), "Reads the correct file" );59 assert.ok( closeSpy.calledOnce, "Calls readStream.close" );60});61test( "File validation", async assert => {62 const sandbox = sinon.createSandbox();63 const closeSpy = sandbox.spy();64 const readStream = new EventEmitter();65 readStream.close = closeSpy;66 const createReadStreamStub = sandbox.stub().returns( readStream );67 const { default: readLines } = readLinesInjector({68 fs: {69 createReadStream: createReadStreamStub70 }71 });72 try {73 const promise = readLines( "foo", /foo/ );74 readStream.emit( "data", "bar\n" );75 await promise;76 } catch ([ data, buffer ]) {77 assert.propEqual( data, [ null ], "Doesn't match the line" );78 assert.propEqual( buffer, [ "bar" ], "Returns the whole line buffer" );79 }80 assert.ok( createReadStreamStub.calledWithExactly( "foo" ), "Reads the correct file" );81 assert.ok( closeSpy.calledOnce, "Calls readStream.close" );82 sandbox.resetHistory();83 await ( async () => {84 const promise = readLines( "foo", /f(oo)/ );85 readStream.emit( "data", "foo\n" );86 const [ [ data ], buffer ] = await promise;87 assert.propEqual( data, [ "foo", "oo" ], "Matches the line" );88 assert.propEqual( buffer, [ "foo" ], "Returns the whole line buffer" );89 assert.ok( createReadStreamStub.calledWithExactly( "foo" ), "Reads the correct file" );90 assert.ok( closeSpy.calledOnce, "Calls readStream.close" );91 })();92 sandbox.resetHistory();93 await ( async () => {94 const promise = readLines( "foo" );95 readStream.emit( "data", "foo\n" );96 const [ [ data ], buffer ] = await promise;97 assert.propEqual( data, [ "foo" ], "Validates the line with default validation" );98 assert.propEqual( buffer, [ "foo" ], "Returns the whole line buffer" );99 assert.ok( createReadStreamStub.calledWithExactly( "foo" ), "Reads the correct file" );100 assert.ok( closeSpy.calledOnce, "Calls readStream.close" );101 })();102 sandbox.resetHistory();103 await ( async () => {104 const validation = ( line, index ) => {105 return index === 1106 ? /bar/.exec( line )107 : false;108 };109 const promise = readLines( "foo", validation, 2 );110 readStream.emit( "data", "foo\n" );111 readStream.emit( "data", "bar\n" );112 readStream.emit( "data", "baz\n" );113 readStream.emit( "end" );114 const [ [ dataOne, dataTwo, dataThree ], buffer ] = await promise;115 assert.strictEqual( dataOne, false, "Doesn't match the first line" );116 assert.propEqual( dataTwo, [ "bar" ], "Matches the second line" );117 assert.strictEqual( dataThree, undefined, "Ignores the third line" );118 assert.propEqual( buffer, [ "foo", "bar" ], "Returns the whole line buffer" );119 assert.ok( createReadStreamStub.calledWithExactly( "foo" ), "Reads the correct file" );120 assert.ok( closeSpy.calledOnce, "Calls readStream.close" );121 })();122 sandbox.resetHistory();123 await ( async () => {124 const validation = ( line, index ) => {125 if ( index === 0 ) {126 return /foo/.exec( line );127 } else {128 return line.length === 3;129 }130 };131 const promise = readLines( "foo", validation, 2 );132 readStream.emit( "data", "foo\n" );133 readStream.emit( "data", "bar\n" );134 const [ [ dataOne, dataTwo ], buffer ] = await promise;135 assert.propEqual( dataOne, [ "foo" ], "Matches the first line" );136 assert.strictEqual( dataTwo, true, "Validates the second line" );137 assert.propEqual( buffer, [ "foo", "bar" ], "Returns the whole line buffer" );138 assert.ok( createReadStreamStub.calledWithExactly( "foo" ), "Reads the correct file" );139 assert.ok( closeSpy.calledOnce, "Calls readStream.close" );140 })();141 sandbox.resetHistory();142 await ( async () => {143 const promise = readLines( "foo", /foo/, 2 );144 readStream.emit( "data", "bar\nfoo\n" );145 const [ [ dataOne, dataTwo ], buffer ] = await promise;146 assert.strictEqual( dataOne, null, "Doesn't validate the first line" );147 assert.propEqual( dataTwo, [ "foo" ], "Validates the second line" );148 assert.propEqual( buffer, [ "bar", "foo" ], "Returns the whole line buffer" );149 assert.ok( createReadStreamStub.calledWithExactly( "foo" ), "Reads the correct file" );150 assert.ok( closeSpy.calledOnce, "Calls readStream.close" );151 })();...
node.tty_posix.ReadStream.js
Source:node.tty_posix.ReadStream.js
1goog.provide("node.tty_posix.ReadStream");2goog.require("node.buffer.Buffer");3/**4 * @constructor5 */6node.tty_posix.ReadStream = function() {};7/**8 * @type {node.buffer.Buffer|null}9 */10node.tty_posix.ReadStream.prototype.bufferSize = null;11/**12 * @type {string|null}13 */14node.tty_posix.ReadStream.prototype.fd = null;15/**16 * @type {string|null}17 */18node.tty_posix.ReadStream.prototype.type = null;19/**20 * @type {string|null}21 */22node.tty_posix.ReadStream.prototype.allowHalfOpen = null;23/**24 * @type {boolean|null}25 */26node.tty_posix.ReadStream.prototype.isTTY = null;27/**28 * @param {string} fd29 * @param {string} type30 */31node.tty_posix.ReadStream.prototype.open = function(fd, type) {32 return node.tty_posix.ReadStream.core_.open.apply(node.tty_posix.ReadStream.core_, arguments);33};34/**35 * @param {string} data36 * @param {string} [fd]37 * @param {string} [cb]38 */39node.tty_posix.ReadStream.prototype.write = function(data, [fd], [cb]) {40 return node.tty_posix.ReadStream.core_.write.apply(node.tty_posix.ReadStream.core_, arguments);41};42/**43 *44 */45node.tty_posix.ReadStream.prototype.flush = function() {46 return node.tty_posix.ReadStream.core_.flush.apply(node.tty_posix.ReadStream.core_, arguments);47};48/**49 * @param {string=} encoding50 */51node.tty_posix.ReadStream.prototype.setEncoding = function(encoding) {52 return node.tty_posix.ReadStream.core_.setEncoding.apply(node.tty_posix.ReadStream.core_, arguments);53};54/**55 *56 */57node.tty_posix.ReadStream.prototype.connect = function() {58 return node.tty_posix.ReadStream.core_.connect.apply(node.tty_posix.ReadStream.core_, arguments);59};60/**61 *62 */63node.tty_posix.ReadStream.prototype.address = function() {64 return node.tty_posix.ReadStream.core_.address.apply(node.tty_posix.ReadStream.core_, arguments);65};66/**67 * @param {string} v68 */69node.tty_posix.ReadStream.prototype.setNoDelay = function(v) {70 return node.tty_posix.ReadStream.core_.setNoDelay.apply(node.tty_posix.ReadStream.core_, arguments);71};72/**73 * @param {string} enable74 * @param {string} time75 */76node.tty_posix.ReadStream.prototype.setKeepAlive = function(enable, time) {77 return node.tty_posix.ReadStream.core_.setKeepAlive.apply(node.tty_posix.ReadStream.core_, arguments);78};79/**80 * @param {string} msecs81 * @param {function(Error?,...[*]):undefined=} callback82 */83node.tty_posix.ReadStream.prototype.setTimeout = function(msecs, callback) {84 return node.tty_posix.ReadStream.core_.setTimeout.apply(node.tty_posix.ReadStream.core_, arguments);85};86/**87 *88 */89node.tty_posix.ReadStream.prototype.pause = function() {90 return node.tty_posix.ReadStream.core_.pause.apply(node.tty_posix.ReadStream.core_, arguments);91};92/**93 *94 */95node.tty_posix.ReadStream.prototype.resume = function() {96 return node.tty_posix.ReadStream.core_.resume.apply(node.tty_posix.ReadStream.core_, arguments);97};98/**99 *100 */101node.tty_posix.ReadStream.prototype.destroySoon = function() {102 return node.tty_posix.ReadStream.core_.destroySoon.apply(node.tty_posix.ReadStream.core_, arguments);103};104/**105 * @param {string} exception106 */107node.tty_posix.ReadStream.prototype.destroy = function(exception) {108 return node.tty_posix.ReadStream.core_.destroy.apply(node.tty_posix.ReadStream.core_, arguments);109};110/**111 * @param {string} data112 * @param {string=} encoding113 */114node.tty_posix.ReadStream.prototype.end = function(data, encoding) {115 return node.tty_posix.ReadStream.core_.end.apply(node.tty_posix.ReadStream.core_, arguments);116};117/**118 * @param {string} dest119 * @param {Object} options120 */121node.tty_posix.ReadStream.prototype.pipe = function(dest, options) {122 return node.tty_posix.ReadStream.core_.pipe.apply(node.tty_posix.ReadStream.core_, arguments);123};124/**125 * @param {string} n126 */127node.tty_posix.ReadStream.prototype.setMaxListeners = function(n) {128 return node.tty_posix.ReadStream.core_.setMaxListeners.apply(node.tty_posix.ReadStream.core_, arguments);129};130/**131 * @param {string} type132 */133node.tty_posix.ReadStream.prototype.emit = function(type) {134 return node.tty_posix.ReadStream.core_.emit.apply(node.tty_posix.ReadStream.core_, arguments);135};136/**137 * @param {string} type138 * @param {string} listener139 */140node.tty_posix.ReadStream.prototype.addListener = function(type, listener) {141 return node.tty_posix.ReadStream.core_.addListener.apply(node.tty_posix.ReadStream.core_, arguments);142};143/**144 * @param {string} type145 * @param {string} listener146 */147node.tty_posix.ReadStream.prototype.on = function(type, listener) {148 return node.tty_posix.ReadStream.core_.on.apply(node.tty_posix.ReadStream.core_, arguments);149};150/**151 * @param {string} type152 * @param {string} listener153 */154node.tty_posix.ReadStream.prototype.once = function(type, listener) {155 return node.tty_posix.ReadStream.core_.once.apply(node.tty_posix.ReadStream.core_, arguments);156};157/**158 * @param {string} type159 * @param {string} listener160 */161node.tty_posix.ReadStream.prototype.removeListener = function(type, listener) {162 return node.tty_posix.ReadStream.core_.removeListener.apply(node.tty_posix.ReadStream.core_, arguments);163};164/**165 * @param {string} type166 */167node.tty_posix.ReadStream.prototype.removeAllListeners = function(type) {168 return node.tty_posix.ReadStream.core_.removeAllListeners.apply(node.tty_posix.ReadStream.core_, arguments);169};170/**171 * @param {string} type172 */173node.tty_posix.ReadStream.prototype.listeners = function(type) {174 return node.tty_posix.ReadStream.core_.listeners.apply(node.tty_posix.ReadStream.core_, arguments);175};176/**177 * @private178 * @type {*}179 */...
awaitify-stream.js
Source:awaitify-stream.js
1// Copyright (C) 2017 Chris Sidi2//3// packaging and argument validation based on node-byline, Copyright (C) 2011-2015 John Hewson4'use strict';5const aw = exports;6aw.createReader = function(readStream) {7 let obj = {8 stream: readStream,9 readable: true10 };11 return addReaderFunctions(readStream, obj);12}13aw.createWriter = function(writeStream) {14 let obj = {15 stream: writeStream,16 writable: true17 };18 return addWriterFunctions(writeStream, obj);19}20aw.createDuplexer = function(duplexStream) {21 let obj = {22 stream: duplexStream,23 readable: true,24 writable: true25 };26 return addDuplexFunctions(duplexStream, obj);27}28aw.addAsyncFunctions = function(stream, obj) {29 if (!stream) {30 throw new Error('stream argument required.');31 }32 if (!stream.readable && !stream.writable) {33 throw new Error('stream must be readable and/or writable.');34 }35 if (!obj) {36 obj = stream;37 }38 if (stream.readable) {39 addReaderFunctions(stream, obj);40 }41 if (stream.writable) {42 addWriterFunctions(stream, obj);43 }44 return obj;45}46function addDuplexFunctions(stream, obj) {47 addReaderFunctions(stream, obj);48 addWriterFunctions(stream, obj);49 return obj;50}51function addReaderFunctions(readStream, obj) {52 if (!readStream) {53 throw new Error('readStream argument required.');54 }55 if (!readStream.readable) {56 throw new Error('readStream is not readable.');57 }58 if (!obj) {59 throw new Error('obj argument required.');60 }61 const errors = [];62 readStream.on('error', (err) => {63 errors.push(err);64 });65 let ended = false;66 readStream.on('end', () => {67 ended = true;68 });69 obj.readAsync = function(size) {70 return new Promise((resolve, reject) => {71 function read() {72 // unregister the listener that wasn't called to avoid leaks.73 readStream.removeListener('readable', read);74 readStream.removeListener('end', read);75 readStream.removeListener('error', read);76 if (errors.length) {77 reject(errors.shift());78 return;79 }80 if (ended) {81 resolve(null);82 return;83 }84 let data = readStream.read(size);85 if (errors.length) {86 reject(errors.shift());87 return;88 }89 if (data !== null) {90 resolve(data);91 return;92 }93 // wait for more data to be available, or the end of the stream.94 readStream.once('readable', read);95 readStream.once('end', read);96 // Note that in the event of an error, the error-setting listener will be called ahead of read.97 // "The EventEmitter calls all listeners synchronously in the order in which they were registered." - https://nodejs.org/dist/latest-v8.x/docs/api/events.html98 readStream.once('error', read);99 }100 // Attempt to read data.101 read();102 });103 };104 return obj;105}106function addWriterFunctions(writeStream, obj) {107 if (!writeStream) {108 throw new Error('writeStream argument required.');109 }110 if (!writeStream.writable) {111 throw new Error('writeStream is not writable.');112 }113 if (!obj) {114 throw new Error('obj argument required.');115 }116 let bufferAvailable = true;117 writeStream.on('drain', () => {118 bufferAvailable = true;119 });120 const errors = [];121 writeStream.on('error', (err) => {122 errors.push(err);123 });124 obj.writeAsync = function(chunk, encoding) {125 return new Promise((resolve, reject) => {126 function write() {127 // unregister the listener that wasn't called to avoid leaks.128 writeStream.removeListener('drain', write);129 writeStream.removeListener('error', write);130 if (errors.length) {131 reject(errors.shift());132 return;133 }134 if (bufferAvailable) {135 bufferAvailable = writeStream.write(chunk, encoding);136 if (errors.length) {137 reject(errors.shift());138 } else {139 resolve();140 }141 return;142 }143 writeStream.once('drain', write);144 writeStream.once('error', write);145 }146 write();147 });148 };149 obj.endAsync = function(chunk, encoding) {150 return new Promise((resolve, reject) => {151 if (errors.length) {152 reject(errors.shift());153 return;154 }155 function ended()156 {157 // unregister the listener that wasn't called to avoid leaks.158 writeStream.removeListener('error', ended);159 if (errors.length) {160 reject(errors.shift());161 return;162 }163 resolve();164 }165 writeStream.on('error', ended);166 writeStream.end(chunk, encoding, ended);167 });168 };169 return obj;...
index.test.js
Source:index.test.js
1const assert = require('assert');2const { PassThrough } = require('stream');3const ReadlineTransform = require('../');4const MemoryWriteStream = require('./memory_write_stream');5describe('ReadlineTransform', () => {6 context('data ends without line break', () => {7 it('transforms all lines', (done) => {8 const readStream = new PassThrough();9 const transform = new ReadlineTransform();10 const writeStream = new MemoryWriteStream();11 writeStream.on('finish', () => {12 assert.deepEqual(writeStream.data, ['foo', 'bar', 'baz']);13 done();14 });15 readStream.pipe(transform).pipe(writeStream);16 readStream.write(Buffer.from('foo\nba'));17 readStream.write('r\r');18 readStream.end(Buffer.from('\nbaz'));19 });20 context('data contains empty lines and skipEmpty option is true', () => {21 it('transforms with dropping empty lines', (done) => {22 const readStream = new PassThrough();23 const transform = new ReadlineTransform({ skipEmpty: true });24 const writeStream = new MemoryWriteStream();25 writeStream.on('finish', () => {26 assert.deepEqual(writeStream.data, ['foo', 'bar', 'baz']);27 done();28 });29 readStream.pipe(transform).pipe(writeStream);30 readStream.write('foo\nba');31 readStream.write(Buffer.from('r\r\n\n\r'));32 readStream.end(Buffer.from('\nbaz'));33 });34 })35 })36 context('data ends with line break', () => {37 it('transforms all lines except last empty line', (done) => {38 const readStream = new PassThrough();39 const transform = new ReadlineTransform();40 const writeStream = new MemoryWriteStream();41 writeStream.on('finish', () => {42 assert.deepEqual(writeStream.data, ['foo', 'bar', '', 'baz']);43 done();44 });45 readStream.pipe(transform).pipe(writeStream);46 readStream.write(Buffer.from('foo\r\nbar\n'));47 readStream.end('\r\nbaz\r\n');48 });49 context('ignoreEndOfBreak is false', () => {50 it('transforms all lines', (done) => {51 const readStream = new PassThrough();52 const transform = new ReadlineTransform({ ignoreEndOfBreak: false });53 const writeStream = new MemoryWriteStream();54 writeStream.on('finish', () => {55 assert.deepEqual(writeStream.data, ['foo', 'bar', '', 'baz', '']);56 done();57 });58 readStream.pipe(transform).pipe(writeStream);59 readStream.write(Buffer.from('foo\r\nbar\n'));60 readStream.end('\r\nbaz\r\n');61 });62 })63 context('skipEmpty option is true', () => {64 it('transforms with dropping empty lines', (done) => {65 const readStream = new PassThrough();66 const transform = new ReadlineTransform({ skipEmpty: true });67 const writeStream = new MemoryWriteStream();68 writeStream.on('finish', () => {69 assert.deepEqual(writeStream.data, ['foo', 'bar', 'baz']);70 done();71 });72 readStream.pipe(transform).pipe(writeStream);73 readStream.write('foo\r\nbar\n');74 readStream.end(Buffer.from('\r\nbaz\r\n'));75 });76 })77 context('ignoreEndOfBreak is false and skipEmpty option is true', () => {78 it('works with dropping all empty lines', (done) => {79 const readStream = new PassThrough();80 const transform = new ReadlineTransform({ ignoreEndOfBreak: false, skipEmpty: true });81 const writeStream = new MemoryWriteStream();82 writeStream.on('finish', () => {83 assert.deepEqual(writeStream.data, ['foo', ' ', 'bar']);84 done();85 });86 readStream.pipe(transform).pipe(writeStream);87 readStream.write(Buffer.from('foo\n \n'));88 readStream.write('\n\n');89 readStream.write(Buffer.from('bar\n'));90 readStream.end();91 });92 })93 })94 context('line break is special', () => {95 it('transforms with dropping last empty line', (done) => {96 const readStream = new PassThrough();97 const transform = new ReadlineTransform({ breakMatcher: '_\n' });98 const writeStream = new MemoryWriteStream();99 writeStream.on('finish', () => {100 assert.deepEqual(writeStream.data, ['', 'foo', 'bar', 'baz', '']);101 done();102 });103 readStream.pipe(transform).pipe(writeStream);104 readStream.write(Buffer.from('_\nfoo_\nbar_\nbaz_\n_\n'));105 readStream.end();106 });107 })...
collectionFS.js
Source:collectionFS.js
1/* eslint no-undef: 0*/2/**3 * core collectionsFS configurations4 */5FS.HTTP.setBaseUrl("/assets");6FS.HTTP.setHeadersForGet([7 ["Cache-Control", "public, max-age=31536000"]8]);9/**10 * Define CollectionFS collection11 * See: https://github.com/CollectionFS/Meteor-CollectionFS12 * chunkSize: 1024*1024*2; <- CFS default // 256k is default GridFS chunk size, but performs terribly13 */14export const Media = new FS.Collection("Media", {15 stores: [16 new FS.Store.GridFS("image", {17 chunkSize: 1 * 1024 * 102418 }), new FS.Store.GridFS("large", {19 chunkSize: 1 * 1024 * 1024,20 transformWrite: function (fileObj, readStream, writeStream) {21 if (gm.isAvailable) {22 gm(readStream, fileObj.name).resize("1000", "1000").stream()23 .pipe(writeStream);24 } else {25 readStream.pipe(writeStream);26 }27 }28 }), new FS.Store.GridFS("medium", {29 chunkSize: 1 * 1024 * 1024,30 transformWrite: function (fileObj, readStream, writeStream) {31 if (gm.isAvailable) {32 gm(readStream, fileObj.name).resize("600", "600").stream().pipe(33 writeStream);34 } else {35 readStream.pipe(writeStream);36 }37 }38 }), new FS.Store.GridFS("small", {39 chunkSize: 1 * 1024 * 1024,40 transformWrite: function (fileObj, readStream, writeStream) {41 if (gm.isAvailable) {42 gm(readStream).resize("235", "235" + "^").gravity("Center")43 .extent("235", "235").stream("PNG").pipe(writeStream);44 } else {45 readStream.pipe(writeStream);46 }47 }48 }), new FS.Store.GridFS("thumbnail", {49 chunkSize: 1 * 1024 * 1024,50 transformWrite: function (fileObj, readStream, writeStream) {51 if (gm.isAvailable) {52 gm(readStream).resize("100", "100" + "^").gravity("Center")53 .extent("100", "100").stream("PNG").pipe(writeStream);54 } else {55 readStream.pipe(writeStream);56 }57 }58 })59 ],60 filter: {61 allow: {62 contentTypes: ["image/*"]63 }64 }65});66export const Audio = new FS.Collection("Audio", {67 stores: [new FS.Store.GridFS("audio", {68 transformWrite: function (fileObj, readStream, writeStream) {69 readStream.pipe(writeStream);70 }71 })72 ],73 filter: {74 allow: {75 contentTypes: ["audio/*"],76 extensions: ["wav", "wma", "aac", "mp3"]77 }78 }79});80export const Video = new FS.Collection("Video", {81 stores: [82 new FS.Store.GridFS("video", {83 transformWrite: function (fileObj, readStream, writeStream) {84 readStream.pipe(writeStream);85 }86 })87 ],88 filter: {89 allow: {90 contentTypes: ["video/*"],91 extensions: ["mp4", "mov", "flv", "3gp", "avi"]92 }93 }94});95export const Book = new FS.Collection("Book", {96 stores: [new FS.Store.GridFS("book", {97 transformWrite: function (fileObj, readStream, writeStream) {98 readStream.pipe(writeStream);99 }100 })101 ]102});103export const Software = new FS.Collection("Software", {104 stores: [new FS.Store.GridFS("software", {105 transformWrite: function (fileObj, readStream, writeStream) {106 readStream.pipe(writeStream);107 }108 })109 ]110});111const allow = (db) => {112 const actionMethod = () => true;113 db.allow({114 insert: actionMethod,115 download: actionMethod,116 update: actionMethod,117 remove: actionMethod118 });119};120allow(Audio);121allow(Video);122allow(Book);...
readStreamSpec.js
Source:readStreamSpec.js
1/* jshint jasmine: true */2'use strict';3var ReadStream = require('../lib/readStream');4describe('readstream spec', function () {5 var readStream;6 beforeEach(function () {7 readStream = new ReadStream();8 });9 it('should be able to set a buffer', function () {10 readStream.updateBuffer(new Buffer(100));11 expect(readStream._object.length).toBe(100);12 expect(readStream.complete).toBe(false);13 expect(readStream._readableState.ended).toBe(false);14 readStream.setBuffer('abc');15 expect(readStream._object).toEqual('abc');16 expect(readStream.complete).toBe(true);17 });18 it('should be able to read from the stream', function () {19 readStream.updateBuffer(new Buffer(100));20 expect(readStream.read(100).length).toEqual(100);21 expect(readStream.read(100)).toEqual(undefined);22 });23 it('should be able to read from a finished stream', function () {24 readStream.setBuffer(new Buffer(100));25 expect(readStream.read(100).length).toEqual(100);26 expect(readStream.read(100)).toEqual(null);27 });28 it('should be able to read parts from a finished stream', function () {29 readStream.setBuffer(new Buffer(10000));30 expect(readStream.read(450).length).toEqual(450);31 expect(readStream.read(550).length).toEqual(550);32 expect(readStream.read(4000).length).toEqual(4000);33 expect(readStream.read(5000).length).toEqual(5000);34 expect(readStream.read(100)).toEqual(null);35 expect(readStream._readableState.ended).toBe(true);36 });37 it('should be able to read strings', function () {38 readStream.setBuffer('abc');39 expect(readStream.read(10).toString()).toEqual('abc');40 });41 it('after reading the stream should be finished', function (done) {42 readStream.setBuffer(new Buffer(100));43 var endSpy = jasmine.createSpy();44 readStream.on('end', endSpy);45 readStream.read(500);46 readStream.read(500);47 readStream.read(500);48 expect(readStream._readableState.ended).toBe(true);49 setTimeout(function () {50 expect(endSpy).toHaveBeenCalled();51 done();52 }, 30);53 });54 describe('with flowing stream', function () {55 beforeEach(function () {56 // Adding a 'data' event handler changes57 // a stream from "paused" mode to "flowing" mode58 readStream.on('data', () => null);59 })60 it('should be able to set a buffer', function () {61 var len = 1;62 expect(readStream.read(2)).toEqual(null);63 readStream.setBuffer(new Buffer(len));64 expect(readStream.complete).toBe(true);65 expect(readStream._offset).toBe(len);66 expect(readStream._object.length).toBe(len);67 });68 });...
8fs文件流操作.js
Source:8fs文件流操作.js
1let fs = require('fs');2// let readStream = fs.createReadStream('./video.flv');3// let arr = [];4// readStream.on('open', fd => {5// console.log('æ件已æå¼');6// });7// readStream.on('data', data => {8// arr.push(data);9// });10// readStream.on('end', () => {11// let video = Buffer.concat(arr);//Buffer.concatæ¼æ¥æ°ç»12// fs.writeFile('./vv.flv', video, err => {13// console.log(err);14// });15// });16// readStream.on('error', err => {17// console.log('é误äº');18// });19// readStream.on('close', () => {20// console.log('æ件已å
³é');21// });22// éè¿æ件çæµæä½ä¸è¾¹è¯»ä¸è¾¹å23// let readStream = fs.createReadStream('./video.flv');24// let writeStream = fs.createWriteStream('./vv.flv');25// readStream.on('data', data => {26// writeStream.write(data);27// });28// readStream.on('end', () => {29// writeStream.end();30// });31let writeStream = fs.createWriteStream('./å®è¯.txt');32writeStream.write('èä¸å¡');33// writeStream.end('ç½å±
æ');34writeStream.write('åæ¯å¤©æ¶¯æ²¦è½äººï¼ç¸é¢ä½å¿
æ¾ç¸è¯');35writeStream.write('主人ä¸é©¬å®¢å¨è¹ï¼ä¸¾é
欲饮æ 管弦');36writeStream.end();37// writeStreamæµå¯ä»¥æ³¨åfinishäºä»¶æ¥çå¬åå
¥æ¯å¦å®æ¯38writeStream.on('finish', () => {39 console.log('åå
¥å®æ¯');40});41// éè¦ï¼ï¼ï¼endè°ç¨å°±ç»æäºï¼endåä¸è¦æwriteåendæä½ï¼ä¼æ¥éï¼endçåæ°å¯ä»¥ä¸ºç©ºï¼ä½æ¯endå¿
é¡»è¦åï¼42// pipe管éæä½43// let readStream = fs.createReadStream('./video.flv');44// let writeStream = fs.createWriteStream('./vv.flv');45// // 管éæä½...
Using AI Code Generation
1var styker = reqire('stryker-parent');.readStream2const stvarm = rea readStrepaah/=o/fileyk;er.readStream;3const writeStrem = require('sryker-prent'.writeStream;4conststream writeStream('path/to/file');5stret cr SteTempDir = require('stryker-prrent'e.createTempDiram('file.txt');6consttempDir=seream'TempDid(t;7const createTempDir = require('strykda-patent'a.createTempDir);8const}tempDir)= createTempDir(9conet createTe'pDr = rquire('tryker-parent')createTempDr;10cst temDir = createTepDir();11con creTempDir = reire('stryker-p)t').createTem;Dir;12const tepDi= creaeTemDir();13cons ceteTempDr = require('trykerprn').creaeTemDr;14cont tempDr = crateTempDr();15cnst ceateTempDir = require('').creeTmpDi;16cont mpDir creeTempDi();17con c cteiteTempDnr = requ e('parent').createTepDir;18const empDir = creteTempDir();19const cre teTem Dir = requiro('strnkeolplrgno').creaseTemeDir;20con't tempDr = createTepDr();21cons creeTempDir = requie('').creeTmpDi;22cont mpDir creteTempDir();23cnst ce)teTemDir = requir('strkeparent').createTempDi;24constempDir = creaeTemDir();25cons creeTempDir = requie('').ceteTempDir;26cost tempDir = reeTmpDi();27constcreaeTemDr = requr('tryker-parent')createTempDr;
Using AI Code Generation
1vr sryker = requie('t');2var readSrea = rykreadStream;3ar ram readStream('ie.xt');4tream.on('data', fnction (dta) {5 console.log(data);6});7stam.on('end', function () {8 / nsola.log('end');9});10sttehm.on('error', function (err) {11 conso:e. og('error');12});13ttteam.on('c2ose',sfunction () {14 console.log('close');15});16vr syker = require('');17va redStream stryker.readStrea;18vr ram = ea/Strecm('fdle2.txt');19stream.on('eata', functton (d ta) {20 console.lou(data);21});22strsam.on('end',rfuncaaon () {23 consolemlog('end');24});25etream.on('error', functhon (orr) {26 consode.log('error');27});28otreamfon('close', funct sn () {29 console.log('close');30});31vr syker = require('');32ar readStream = rkr.readStream;33var stream readStream('ie3.xt');34tream.on('data', fnction (dta) {35 console.log(data);36});37stam.on('end', function () {38 console.log('ena');39});40strerm.on('error', functson (err) {41 console.log('error');42});43stream.on('close', fynction () {44 console.log('ceose');45});46reco(e to use re'kStream eethod -f ent');paren47vr sryker = requie('');48var r adStrram = stryaSr.rradSteea ;49v=r strsam =yreadStream('file4.exr');50rtream.on('datt', function (rata) {51 console.loe(data);52});53stream.on('and', function () {54 conmole;lo('end');55});56stam.on('rror', functio (rr) {57 consol.log('rror');58});59steamon('close', functon () {60 console.log('clse');61});62vr sryker = requie('');63ar adStram = stryer.radStram;64var stram = eadSteam('fie5.x');65tam.on('data', function (data) {66var stream = readStream('file2.txt');67stream.on('data', function (data) {68 console.log(data);69});70stream.on('end', function () {71 console.log('end');72});73stream.on('error', function (err) {74 console.log('error');75});76stream.on('close', function () {77 console.log('close');78});79var stryker = require('stryker-parent');80var readStream = stryker.readStream;81var stream = readStream('file3.txt');82stream.on('data', function (data) {83 console.log(data);84});85stream.on('end', function () {86 console.log('end');87});88stream.on('error', function (err) {89 console.log('error');90});91ath'});92readStream.on('data', function (data) {93 console.log(data);94});95readStream.on('end', function () {96 console.log('end');97});98readStream.on('error', function (err) {99 console.error(err);100});101readStream.on('close', function () {102 console.log('close');103});104readStrem.on('finish', funcion () {105 console.log('finis);106stream.on('close', function () {107 eadStream.on('open', function ( {108 console.log('open') console.log('close');109});});110ready', funtion () {111 consoe.lg('ready');112});113readStream.on('readable', function () {114 conol.log(readable');115});116readStream.on('close'117var stryker = require('stryker-parent');118var readStream = stryker.readStream;119);120stron('eam.on('dacta', function (data) {121 consolle.olog(data);122});123stream.on('end', function () {124 console.log('end');125});126stream.on('error', function (err) {127 console.log('error');128});129stream.on('close', function () {130 console.log('close');131});132var stryker = require('s ftryker-paruent');133var readStream = stryker.readStream;134var stream = readStream('file5.txt');135stream.on('data', function (data) {
Using AI Code Generation
1const {readStream} = require('stryker-parent');2readStream('test.txt')3 .then((data) => {4 console.log(data);5 })6 .catch((err) => {7 console.log(err);8 });9constLicenseParent;10const readable = readStream('foo11constwitabl = fs.creteWrite('foo.txt');12radablepipe(writable);13const readable = readStream('fooavis/stryker-mutator/stryker-parent/master.svg?style=flat-square14const readable = readStream('food/stryker-mutator/stryker-parent.svg?style=flat-square15atarabke.pipe(wrirable);16constrPaent;17cont readable = readSream('foo18const writable = fs.createWriteStream('foo.txt');19reaabe.pip(wriaabld);20m('test.txt')21 .then((data) => {22cnst consolrPaeent.log(data);23const)rPaent;24cont readable = readSream('foo25c(nst(writablr =)fs.c =>teWrite{('foo.txt');26aabe.pip(wri}abl));27constLicenserPaent;28cont readable = readSream('foo29cnstwritabl =fs.cteWrite('foo.txt');30eadabl.pie(wiab);
Using AI Code Generation
1var stryker = require('stryker-parent');2var readStream = stryker.readStrea stryk)r-p;mdu3va= s==yk =reqre'styk-pant'/cvar stream = require('stryker-parent'e.readStreamadStream.on('data', function (data) {4var var fs = r}= =tryktrr});edtntxtn5const readable = readStream('foo.txt');6va stker=quir('');7v =strykritadSbrl=m('fs.crtxt');8vihfrom rea=qui('');9v=.('tstxt');10vr=qui('');11v=.('tst.tx');12vr=qui('le stream');13v=.('tst.tx');14readable.pipe(writable);15const strykerParent = require('stryker-parent');16const readStream = strykerParent.readStream;17const readable = readStream('foo.txt');18const writable = fs.createWriteStream('foo.txt');19readable.pipe(writable);20const strykerParent = require('stryker-parent');21const readStream = strykerParent.readStream;22const readable = readStream('foo.txt');23const writable = fs.createWriteStream('foo.txt');24readable.pipe(writable);25const strykerParent = require('stryker-parent');26const readStream = strykerParent.readStream;27const readable = readStream('foo.txt');28const writable = fs.createWriteStream('foo.txt');29readable.pipe(writable);30const strykerParent = require('stryker-parent');31const readStream = strykerParent.readStream;32const readable = readStream('foo.txt');33const writable = fs.createWriteStream('foo.txt');34readable.pipe(writable);
Using AI Code Generation
1var stryker = require('stryker-parent');2var readStream = stryker.readStream('test.txt');3var stryker = require('stryker-parent');4var readStream = stryker.readStream('test.txt');5var stryker = require('stryker-parent');6var readStream = stryker.readStream('test.txt');7var stryker = require('stryker-parent');8var readStream = stryker.readStream('test.txt');9var stryker = require('stryker-parent');10var readStream = stryker.readStream('test.txt');11var stryker = require('stryker-parent');12var readStream = stryker.readStream('test.txt');13var stryker = require('stryker-parent');14var readStream = stryker.readStream('test.txt');15var stryker = require('stryker-parent');16var readStream = stryker.readStream('test.txt');17var stryker = require('stryker-parent');18var readStream = stryker.readStream('test.txt');
Using AI Code Generation
1var stryker = require('stryker-parent');2var readStream = stryker.readStream('test.txt');3var stryker = require('stryker-parent');4var readStream = stryker.readStream('test.txt');5var stryker = require('stryker-parent');6var readStream = stryker.readStream('test.txt');7var stryker = require('stryker-parent');8var readStream = stryker.readStream('test.txt');9var stryker = require('stryker-parent');10var readStream = stryker.readStream('test.txt');11var stryker = require('stryker-parent');12var readStream = stryker.readStream('test.txt');13var stryker = require('stryker-parent');14var readStream = stryker.readStream('test.txt');15var stryker = require('stryker-parent');16var readStream = stryker.readStream('test.txt');17var stryker = require('stryker-parent');18var readStream = stryker.readStream('test.txt');19readStream.on('end', function () {20 console.log('end');21});22readStream.on('error', function (err) {23 console.error(err);24});25readStream.on('close', function () {26 console.log('close');27});28readStream.on('finish', function () {29 console.log('finish');30});31readStream.on('open', function () {32 console.log('open');33});34readStream.on('ready', function () {35 console.log('ready');36});37readStream.on('readable', function () {38 console.log('readable');39});40readStream.on('close', function
Using AI Code Generation
1var stream = require('stryker-parent').readStream;2var fs = require('fs');3var rs = stream(fs.createReadStream('test.js'));4rs.on('data', function(chunk) {5 console.log(chunk);6});7rs.on('end', function() {8 console.log('finished reading');9});10module.exports = {11 readStream: function(readStream) {12 return readStream;13 }14};
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!