feat(writer): Implement streaming pipelines (#1671)

This implements a new way of image write streaming under use of pipage and blockmap, which paves the way for a few things like using network locations as sources, and imaging of storage devices (aka backups). As it allows for mutation of the streaming pipeline while it's writing, it also facilitates the development of dynamic block-mapping.

Change-Type: minor
This commit is contained in:
Jonas Hermsmeier 2017-11-14 19:54:10 +01:00 committed by GitHub
parent 63528ce8f3
commit 5e77958106
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 1466 additions and 165 deletions

124
lib/cli/diskpart.js Normal file
View File

@ -0,0 +1,124 @@
/*
* Copyright 2017 resin.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict'
const os = require('os')
const fs = require('fs')
const path = require('path')
const crypto = require('crypto')
const childProcess = require('child_process')
const debug = require('debug')('etcher:cli:diskpart')
const Promise = require('bluebird')
const retry = require('bluebird-retry')
const TMP_RANDOM_BYTES = 6
const DISKPART_DELAY = 2000
const DISKPART_RETRIES = 5
/**
* @summary Generate a tmp filename with full path of OS' tmp dir
* @function
* @private
*
* @param {String} extension - temporary file extension
* @returns {String} filename
*
* @example
* const filename = tmpFilename('.sh');
*/
const tmpFilename = (extension) => {
const random = crypto.randomBytes(TMP_RANDOM_BYTES).toString('hex')
const filename = `etcher-diskpart-${random}${extension}`
return path.join(os.tmpdir(), filename)
}
/**
* @summary Run a diskpart script
* @param {Array<String>} commands - list of commands to run
* @param {Function} callback - callback(error)
* @example
* runDiskpart(['rescan'], (error) => {
* ...
* })
*/
const runDiskpart = (commands, callback) => {
if (os.platform() !== 'win32') {
callback()
return
}
const filename = tmpFilename('')
const script = commands.join('\r\n')
fs.writeFile(filename, script, {
mode: 0o755
}, (writeError) => {
debug('write %s:', filename, writeError || 'OK')
childProcess.exec(`diskpart /s ${filename}`, (execError, stdout, stderr) => {
debug('stdout:', stdout)
debug('stderr:', stderr)
fs.unlink(filename, (unlinkError) => {
debug('unlink %s:', filename, unlinkError || 'OK')
callback(execError)
})
})
})
}
module.exports = {
/**
* @summary Clean a device's partition tables
* @param {String} device - device path
* @example
* diskpart.clean('\\\\.\\PhysicalDrive2')
* .then(...)
* .catch(...)
* @returns {Promise}
*/
clean (device) {
if (os.platform() !== 'win32') {
return Promise.resolve()
}
debug('clean', device)
const pattern = /PHYSICALDRIVE(\d+)/i
if (pattern.test(device)) {
const deviceId = device.match(pattern).pop()
return retry(() => {
return new Promise((resolve, reject) => {
runDiskpart([ `select disk ${deviceId}`, 'clean', 'rescan' ], (error) => {
return error ? reject(error) : resolve()
})
}).delay(DISKPART_DELAY)
}, {
/* eslint-disable camelcase */
max_tries: DISKPART_RETRIES
/* eslint-enable camelcase */
}).catch((error) => {
throw new Error(`Couldn't clean the drive, ${error.failure.message} (code ${error.failure.code})`)
})
}
return Promise.reject(new Error(`Invalid device: "${device}"`))
}
}

View File

@ -16,7 +16,7 @@
'use strict'
const imageWrite = require('etcher-image-write')
const ImageWriter = require('../writer')
const Bluebird = require('bluebird')
const fs = Bluebird.promisifyAll(require('fs'))
const mountutils = Bluebird.promisifyAll(require('mountutils'))
@ -24,6 +24,7 @@ const os = require('os')
const imageStream = require('../image-stream')
const errors = require('../shared/errors')
const constraints = require('../shared/drive-constraints')
const diskpart = require('./diskpart')
/**
* @summary Timeout, in milliseconds, to wait before unmounting on success
@ -71,6 +72,8 @@ exports.writeImage = (imagePath, drive, options, onProgress) => {
}
return mountutils.unmountDiskAsync(drive.device)
}).then(() => {
return diskpart.clean(drive.device)
}).then(() => {
return fs.openAsync(drive.raw, 'rs+')
}).then((driveFileDescriptor) => {
@ -82,24 +85,20 @@ exports.writeImage = (imagePath, drive, options, onProgress) => {
})
}
return imageWrite.write({
const writer = new ImageWriter({
image,
fd: driveFileDescriptor,
device: drive.raw,
size: drive.size
}, {
stream: image.stream,
size: image.size.original
}, {
check: options.validateWriteOnSuccess,
transform: image.transform,
bmap: image.bmap,
bytesToZeroOutFromTheBeginning: image.bytesToZeroOutFromTheBeginning
path: drive.raw,
verify: options.validateWriteOnSuccess,
checksumAlgorithms: [ 'crc32' ]
})
return writer.write()
}).then((writer) => {
return new Bluebird((resolve, reject) => {
writer.on('progress', onProgress)
writer.on('error', reject)
writer.on('done', resolve)
writer.on('finish', resolve)
})
}).tap(() => {
// Make sure the device stream file descriptor is closed

View File

@ -181,7 +181,7 @@ module.exports = {
size: {
// FIXME(jhermsmeier): Originally `options.size`,
// See discussion in https://github.com/resin-io/etcher/pull/1587
original: size || options.size,
original: options.size,
final: {
estimation: false,
value: size

6
lib/writer/.eslintrc.yml Normal file
View File

@ -0,0 +1,6 @@
rules:
no-eq-null: off
no-magic-numbers: off
no-param-reassign: off
no-underscore-dangle: off
lodash/prefer-lodash-method: off

View File

@ -0,0 +1,212 @@
/*
* Copyright 2017 resin.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict'
const stream = require('readable-stream')
const fs = require('fs')
const debug = require('debug')('block-read-stream')
const CHUNK_SIZE = 64 * 1024
const MIN_CHUNK_SIZE = 512
/**
* @summary BlockReadStream
* @class
*/
class BlockReadStream extends stream.Readable {
/**
* @summary BlockReadStream constructor
* @param {Object} [options] - options
* @param {Number} [options.fd] - file descriptor
* @param {String} [options.path] - file path
* @param {String} [options.flags] - file open flags
* @param {Number} [options.mode] - file mode
* @param {Number} [options.start] - start offset in bytes
* @param {Number} [options.end] - end offset in bytes
* @param {Boolean} [options.autoClose] - automatically close the stream on end
* @example
* new BlockReadStream()
*/
constructor (options) {
options = Object.assign({}, BlockReadStream.defaults, options)
options.objectMode = true
debug('block-read-stream %j', options)
super(options)
this.fs = options.fs
this.fd = options.fd
this.path = options.path
this.flags = options.flags
this.mode = options.mode
this.end = options.end || Infinity
this.autoClose = options.autoClose
this.position = options.start || 0
this.bytesRead = 0
this.closed = false
this.destroyed = false
this.once('end', function () {
if (this.autoClose) {
this.close()
}
})
/**
* @summary onRead handler
* @param {Error} error - error
* @param {Number} bytesRead - bytes read
* @param {Buffer} buffer - resulting buffer
* @example
* fs.read(fd, buffer, 0, length, position, onRead)
*/
this._onRead = (error, bytesRead, buffer) => {
if (!error && bytesRead !== buffer.length) {
error = new Error(`Bytes read mismatch: ${bytesRead} != ${buffer.length}`)
}
if (error) {
if (this.autoClose) {
this.destroy()
}
this.emit('error', error)
return
}
this.bytesRead += bytesRead
this.push(buffer)
}
this.open()
}
/**
* @summary Read a chunk from the source
* @private
* @example
* // not to be called directly
*/
_read () {
// Wait for file handle to be open
if (this.fd == null) {
this.once('open', () => {
this._read()
})
return
}
const toRead = this.end - this.position
if (toRead <= 0) {
this.push(null)
return
}
const length = Math.min(CHUNK_SIZE, Math.max(MIN_CHUNK_SIZE, toRead))
const buffer = Buffer.alloc(length)
this.fs.read(this.fd, buffer, 0, length, this.position, this._onRead)
this.position += length
}
/**
* @summary Open a handle to the file
* @private
* @example
* this.open()
*/
open () {
debug('open')
if (this.fd != null) {
this.emit('open', this.fd)
return
}
this.fs.open(this.path, this.flags, this.mode, (error, fd) => {
if (error) {
if (this.autoClose) {
this.destroy()
}
this.emit('error', error)
} else {
this.fd = fd
this.emit('open', fd)
}
})
}
/**
* @summary Close the underlying resource
* @param {Function} callback - callback(error)
* @example
* blockStream.close((error) => {
* // ...
* })
*/
close (callback) {
debug('close')
if (callback) {
this.once('close', callback)
}
if (this.closed || this.fd == null) {
if (this.fd == null) {
this.once('open', () => {
this.close()
})
} else {
process.nextTick(() => {
this.emit('close')
})
}
return
}
this.closed = true
this.fs.close(this.fd, (error) => {
if (error) {
this.emit('error', error)
} else {
this.emit('close')
}
})
this.fd = null
}
}
/**
* @summary Default options
* @type {Object}
* @constant
*/
BlockReadStream.defaults = {
fs,
fd: null,
path: null,
flags: 'r',
mode: 0o666,
autoClose: true
}
module.exports = BlockReadStream

136
lib/writer/block-stream.js Normal file
View File

@ -0,0 +1,136 @@
/*
* Copyright 2017 resin.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict'
const stream = require('readable-stream')
const debug = require('debug')('block-stream')
const MIN_BLOCK_SIZE = 512
const CHUNK_SIZE = 64 * 1024
/**
* @summary BlockStream class
* @class
*/
class BlockStream extends stream.Transform {
/**
* @summary BlockStream constructor
* @param {Object} [options] - options
* @param {Number} [options.blockSize] - block size in bytes
* @param {Number} [options.chunkSize] - chunk size in bytes
* @example
* new BlockStream(options)
*/
constructor (options) {
options = Object.assign({}, BlockStream.defaults, options)
options.readableObjectMode = true
super(options)
this.blockSize = options.blockSize
this.chunkSize = options.chunkSize
this.bytesRead = 0
this.bytesWritten = 0
this._buffers = []
this._bytes = 0
debug('new %j', options)
}
/**
* @summary Internal write handler
* @private
* @param {Buffer} chunk - chunk buffer
* @param {String} encoding - chunk encoding
* @param {Function} next - callback(error, value)
* @example
* // Not to be called directly
*/
_transform (chunk, encoding, next) {
this.bytesRead += chunk.length
if (this._bytes === 0 && chunk.length >= this.chunkSize) {
if (chunk.length % this.blockSize === 0) {
this.bytesWritten += chunk.length
this.push(chunk)
next()
return
}
}
this._buffers.push(chunk)
this._bytes += chunk.length
if (this._bytes >= this.chunkSize) {
let block = Buffer.concat(this._buffers)
const length = Math.floor(block.length / this.blockSize) * this.blockSize
this._buffers.length = 0
this._bytes = 0
if (block.length !== length) {
this._buffers.push(block.slice(length))
this._bytes += block.length - length
block = block.slice(0, length)
}
this.bytesWritten += block.length
this.push(block)
}
next()
}
/**
* @summary Internal stream end handler
* @private
* @param {Function} done - callback(error, value)
* @example
* // Not to be called directly
*/
_flush (done) {
if (!this._bytes) {
done()
return
}
const length = Math.ceil(this._bytes / this.blockSize) * this.blockSize
const block = Buffer.alloc(length)
let offset = 0
for (let index = 0; index < this._buffers.length; index += 1) {
this._buffers[index].copy(block, offset)
offset += this._buffers[index].length
}
this.push(block)
done()
}
}
/**
* @summary Default options
* @type {Object}
* @constant
*/
BlockStream.defaults = {
blockSize: MIN_BLOCK_SIZE,
chunkSize: CHUNK_SIZE
}
module.exports = BlockStream

View File

@ -0,0 +1,261 @@
/*
* Copyright 2017 resin.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict'
const stream = require('readable-stream')
const fs = require('fs')
const debug = require('debug')('block-write-stream')
const CHUNK_SIZE = 64 * 1024
/**
* @summary BlockWriteStream
* @class
*/
class BlockWriteStream extends stream.Writable {
/**
* @summary BlockReadStream constructor
* @param {Object} [options] - options
* @param {Number} [options.fd] - file descriptor
* @param {String} [options.path] - file path
* @param {String} [options.flags] - file open flags
* @param {Number} [options.mode] - file mode
* @param {Boolean} [options.autoClose] - automatically close the stream on end
* @example
* new BlockWriteStream(options)
*/
constructor (options) {
options = Object.assign({}, BlockWriteStream.defaults, options)
options.objectMode = true
debug('block-write-stream %j', options)
super(options)
this._writableState.highWaterMark = 1
this.fs = options.fs
this.fd = options.fd
this.path = options.path
this.flags = options.flags
this.mode = options.mode
this.autoClose = options.autoClose
this.position = 0
this.bytesRead = 0
this.blocksRead = 0
this.bytesWritten = 0
this.blocksWritten = 0
this.closed = false
this.destroyed = false
this.once('finish', function () {
if (this.autoClose) {
this.close()
}
})
this._flushing = false
this._firstBlocks = []
this.open()
}
/**
* @summary Internal write handler
* @private
* @param {Buffer} chunk - chunk buffer
* @param {String} encoding - chunk encoding
* @param {Function} next - callback(error, value)
* @example
* // Not to be called directly
*/
_write (chunk, encoding, next) {
debug('_write', chunk.length, chunk.position, chunk.address)
// Wait for file handle to be open
if (this.fd == null) {
this.once('open', () => {
this._write(chunk, encoding, next)
})
return
}
this.bytesRead += chunk.length
this.blocksRead += 1
if (chunk.position == null) {
chunk.position = this.position
}
if (!this._flushing && (chunk.position < CHUNK_SIZE)) {
this._firstBlocks.push(chunk)
this.position = chunk.position + chunk.length
process.nextTick(next)
return
}
if (chunk.position !== this.position) {
this.position = chunk.position
}
fs.write(this.fd, chunk, 0, chunk.length, chunk.position, (error, bytesWritten) => {
this.bytesWritten += bytesWritten
this.blocksWritten += 1
this.position += bytesWritten
next(error)
})
}
/**
* @summary Write buffered data before a stream ends
* @private
* @param {Function} done - callback
* @example
* // Called by stream internals
*/
_final (done) {
debug('_final')
/**
* @summary Write the next chunk of the buffered `_firstBlocks`
* @param {Error} [error] - error
* @example
* writeNext()
*/
const writeNext = (error) => {
if (error) {
this.destroy(error)
return
}
const chunk = this._firstBlocks.pop()
if (!chunk) {
done()
return
}
this._write(chunk, null, writeNext)
}
this._flushing = true
writeNext()
}
/**
* @summary Destroy the stream, and emit the passed error
* @private
* @param {Error} [error] - error
* @param {Function} done - callback
* @example
* stream.destroy()
*/
_destroy (error, done) {
debug('_destroy', error)
if (this.autoClose) {
this.close((closeError) => {
done(error || closeError)
})
} else {
done(error)
}
}
/**
* @summary Open a handle to the file
* @private
* @example
* this.open()
*/
open () {
debug('open')
if (this.fd != null) {
this.emit('open', this.fd)
return
}
this.fs.open(this.path, this.flags, this.mode, (error, fd) => {
if (error) {
if (this.autoClose) {
this.destroy()
}
this.emit('error', error)
} else {
this.fd = fd
this.emit('open', fd)
}
})
}
/**
* @summary Close the underlying resource
* @param {Function} callback - callback(error)
* @example
* blockStream.close((error) => {
* // ...
* })
*/
close (callback) {
debug('close')
if (callback) {
this.once('close', callback)
}
if (this.closed || this.fd == null) {
if (this.fd == null) {
this.once('open', () => {
this.close()
})
} else {
process.nextTick(() => {
this.emit('close')
})
}
return
}
this.closed = true
this.fs.close(this.fd, (error) => {
if (error) {
this.emit('error', error)
} else {
this.emit('close')
}
})
this.fd = null
}
}
/**
* @summary Default options
* @type {Object}
* @constant
*/
BlockWriteStream.defaults = {
fs,
fd: null,
path: null,
flags: 'w',
mode: 0o666,
autoClose: true
}
module.exports = BlockWriteStream

View File

@ -0,0 +1,142 @@
/*
* Copyright 2017 resin.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict'
const stream = require('readable-stream')
const crypto = require('crypto')
const CRC32Stream = require('crc32-stream')
const _ = require('lodash')
/**
* @summary Get a hash stream
* @function
* @private
* @example
* var md5Stream = getHashStream('md5')
*
* @param {String} algorithm - either `crc32` or anything supported by `crypto.Hash`
* @returns {Stream.Transform}
*/
const getHashStream = (algorithm) => {
if (algorithm === 'crc32') {
return new CRC32Stream()
}
return crypto.createHash(algorithm)
}
/**
* @summary Create an instance of ChecksumStream
* @name ChecksumStream
* @class
*/
class ChecksumStream extends stream.Transform {
/**
* @summary Create an instance of ChecksumStream
* @name ChecksumStream
* @class
* @param {Object} options - options
* @param {String[]} options.algorithms - hash algorithms
* @example
* var checksum = new ChecksumStream({
* algorithms: [ 'crc32', 'md5' ]
* })
*
* checksum.once('checksum', (checksum) => {
* // checksum: {
* // crc32: 'EF28AF1C',
* // md5: ''
* // }
* })
*
* fs.createReadStream( 'os-image.img' )
* .pipe( checksum )
* .pipe( fs.createWriteStream( '/dev/rdisk2' ) )
* .once( 'finish', () => { ... })
*/
constructor (options = {}) {
super(options)
this.results = {}
this.algorithms = options.algorithms || []
this.hashes = _.map(this.algorithms, (algorithm) => {
return this._createHash(algorithm)
})
}
/**
* @summary Create & pipe to the Hash streams
* @private
* @param {String[]} algorithm - hash algorithm
* @returns {Stream}
* @example
* const hash = this._createHash(algorithm)
*/
_createHash (algorithm) {
const hash = _.attempt(getHashStream, algorithm)
if (_.isError(hash)) {
hash.message += ` "${algorithm}"`
throw hash
}
/**
* @summary Check for all checksums to have been calculated
* @private
* @example
* hash.once('end', check)
*/
const check = () => {
if (_.keys(this.results).length === this.algorithms.length) {
this.emit('checksum', _.clone(this.results))
}
}
hash.once('error', (error) => {
return this.emit('error', error)
})
if (algorithm === 'crc32') {
hash.once('end', () => {
this.results[algorithm] = hash.digest('hex')
check()
})
hash.resume()
} else {
hash.once('readable', () => {
this.results[algorithm] = hash.read().toString('hex')
check()
})
}
return this.pipe(hash)
}
/**
* @summary Pass through chunks
* @private
* @param {Buffer} chunk - chunk
* @param {String} encoding - encoding
* @param {Function} next - callback
* @example
* checksumStream.write(buffer)
*/
_transform (chunk, encoding, next) {
this.push(chunk)
next()
}
}
module.exports = ChecksumStream

311
lib/writer/index.js Normal file
View File

@ -0,0 +1,311 @@
/*
* Copyright 2017 resin.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict'
const stream = require('readable-stream')
const Pipage = require('pipage')
const BlockMap = require('blockmap')
const BlockStream = require('./block-stream')
const BlockWriteStream = require('./block-write-stream')
const BlockReadStream = require('./block-read-stream')
const ChecksumStream = require('./checksum-stream')
const ProgressStream = require('./progress-stream')
const debug = require('debug')('image-writer')
const EventEmitter = require('events').EventEmitter
const _ = require('lodash')
/**
* @summary ImageWriter class
* @class
*/
class ImageWriter extends EventEmitter {
/**
* @summary ImageWriter constructor
* @param {Object} options - options
* @example
* new ImageWriter(options)
*/
constructor (options) {
super()
this.options = options
this.source = null
this.pipeline = null
this.target = null
this.hadError = false
this.bytesRead = 0
this.bytesWritten = 0
this.checksum = {}
}
/**
* @summary Start the writing process
* @returns {ImageWriter} imageWriter
* @example
* imageWriter.write()
*/
write () {
this.hadError = false
this._createWritePipeline(this.options)
.on('checksum', (checksum) => {
debug('write:checksum', checksum)
this.checksum = checksum
})
.on('error', (error) => {
this.hadError = true
this.emit('error', error)
})
this.target.on('finish', () => {
this.bytesRead = this.source.bytesRead
this.bytesWritten = this.target.bytesWritten
if (this.options.verify) {
this.verify()
} else {
this._emitFinish()
}
})
return this
}
/**
* @summary Start the writing process
* @returns {ImageWriter} imageWriter
* @example
* imageWriter.verify()
*/
verify () {
this._createVerifyPipeline(this.options)
.on('error', (error) => {
this.hadError = true
this.emit('error', error)
})
.on('checksum', (checksum) => {
debug('verify:checksum', this.checksum, '==', checksum)
if (!_.isEqual(this.checksum, checksum)) {
const error = new Error(`Verification failed: ${JSON.stringify(this.checksum)} != ${JSON.stringify(checksum)}`)
error.code = 'EVALIDATION'
this.emit('error', error)
}
this._emitFinish()
})
.on('finish', () => {
debug('verify:end')
// NOTE: As the 'checksum' event only happens after
// the 'finish' event, we `._emitFinish()` there instead of here
})
return this
}
/**
* @summary Abort the flashing process
* @example
* imageWriter.abort()
*/
abort () {
if (this.source) {
this.emit('abort')
this.source.destroy()
}
}
/**
* @summary Emits the `finish` event with state metadata
* @private
* @example
* this._emitFinish()
*/
_emitFinish () {
this.emit('finish', {
bytesRead: this.bytesRead,
bytesWritten: this.bytesWritten,
checksum: this.checksum
})
}
/**
* @summary Creates a write pipeline from given options
* @private
* @param {Object} options - options
* @param {Object} options.image - source image
* @param {Number} [options.fd] - destination file descriptor
* @param {String} [options.path] - destination file path
* @param {String} [options.flags] - destination file open flags
* @param {String} [options.mode] - destination file mode
* @returns {Pipage} pipeline
* @example
* this._createWritePipeline({
* image: sourceImage,
* path: '/dev/rdisk2'
* })
*/
_createWritePipeline (options) {
const pipeline = new Pipage({
readableObjectMode: true
})
const image = options.image
const source = image.stream
const progressOptions = {
length: image.size.original,
time: 500
}
let progressStream = null
// If the final size is an estimation,
// use the original source size for progress metering
if (image.size.final.estimation) {
progressStream = new ProgressStream(progressOptions)
pipeline.append(progressStream)
}
const isPassThrough = image.transform instanceof stream.PassThrough
// If the image transform is a pass-through,
// ignore it to save on the overhead
if (image.transform && !isPassThrough) {
pipeline.append(image.transform)
}
// If the final size is known precisely and we're not
// using block maps, then use the final size for progress
if (!image.size.final.estimation && !image.bmap) {
progressOptions.length = image.size.final.value
progressStream = new ProgressStream(progressOptions)
pipeline.append(progressStream)
}
if (image.bmap) {
const blockMap = BlockMap.parse(image.bmap)
debug('write:bmap', blockMap)
progressStream = new ProgressStream(progressOptions)
pipeline.append(progressStream)
pipeline.append(new BlockMap.FilterStream(blockMap))
} else {
debug('write:blockstream')
const checksumStream = new ChecksumStream({
algorithms: options.checksumAlgorithms
})
pipeline.append(checksumStream)
pipeline.bind(checksumStream, 'checksum')
pipeline.append(new BlockStream())
}
const target = new BlockWriteStream({
fd: options.fd,
path: options.path,
flags: options.flags,
mode: options.mode,
autoClose: false
})
// Pipeline.bind(progressStream, 'progress');
progressStream.on('progress', (state) => {
state.device = options.path
state.type = 'write'
this.emit('progress', state)
})
pipeline.bind(source, 'error')
pipeline.bind(target, 'error')
source.pipe(pipeline)
.pipe(target)
this.source = source
this.pipeline = pipeline
this.target = target
return pipeline
}
/**
* @summary Creates a verification pipeline from given options
* @private
* @param {Object} options - options
* @param {Object} options.image - image
* @param {Number} [options.fd] - file descriptor
* @param {String} [options.path] - file path
* @param {String} [options.flags] - file open flags
* @param {String} [options.mode] - file mode
* @returns {Pipage} pipeline
* @example
* this._createVerifyPipeline({
* path: '/dev/rdisk2'
* })
*/
_createVerifyPipeline (options) {
const pipeline = new Pipage()
const size = options.image.size.final.estimation ? this.bytesWritten : options.image.size.final.value
const progressStream = new ProgressStream({
length: size,
time: 500
})
pipeline.append(progressStream)
if (options.image.bmap) {
debug('verify:bmap')
const blockMap = BlockMap.parse(options.image.bmap)
pipeline.append(new BlockMap.FilterStream(blockMap))
} else {
const checksumStream = new ChecksumStream({
algorithms: options.checksumAlgorithms
})
pipeline.append(checksumStream)
pipeline.bind(checksumStream, 'checksum')
}
const source = new BlockReadStream({
fd: options.fd,
path: options.path,
flags: options.flags,
mode: options.mode,
autoClose: false,
start: 0,
end: size
})
pipeline.bind(source, 'error')
progressStream.on('progress', (state) => {
state.device = options.path
state.type = 'check'
this.emit('progress', state)
})
this.target = null
this.source = source
this.pipeline = pipeline
source.pipe(pipeline).resume()
return pipeline
}
}
module.exports = ImageWriter

View File

@ -0,0 +1,113 @@
/*
* Copyright 2017 resin.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict'
const Stream = require('stream')
const speedometer = require('speedometer')
const PERCENT = 100
const DEFAULT_TIME_MS = 500
/**
* @class ProgressStream
* @public
*/
class ProgressStream extends Stream.Transform {
/**
* @summary ProgressStream constructor
* @param {Object} options - options
* @param {Number} options.length - expected total
* @param {Number} [options.time] - time interval to report progress
* @example
* new ProgressStream({ length: 1024 * 1024 })
* .on('progress', (state) => {
* console.log( state.percentage.toFixed(0) + '%' )
* })
*/
constructor (options) {
super(options)
this.start = 0
this.interval = options.time || DEFAULT_TIME_MS
this.timer = null
this.meter = speedometer()
this.delta = 0
this.state = {
delta: 0,
eta: 0,
length: options.length,
percentage: 0,
remaining: 0,
runtime: 0,
speed: 0,
transferred: 0
}
this.clear = () => {
clearInterval(this.timer)
}
this.update = () => {
this.state.delta = this.delta
this.state.transferred += this.delta
this.state.percentage = this.state.transferred / this.state.length * PERCENT
this.state.remaining = this.state.length - this.state.transferred
this.state.runtime = Date.now() - this.start
this.state.speed = this.meter(this.state.delta)
this.state.eta = this.state.remaining / this.state.speed
this.delta = 0
this.emit('progress', this.state)
}
this.once('end', this.clear)
this.once('end', this.update)
this.once('error', this.clear)
this.timer = setInterval(this.update, this.interval)
}
/**
* @summary Transform function
* @private
* @param {Buffer} chunk - chunk
* @param {String} _ - encoding
* @param {Function} next - callback
* @example
* progressStream.write(buffer)
*/
_transform (chunk, _, next) {
this.start = this.start || Date.now()
this.delta += chunk.length
next(null, chunk)
}
/**
* @summary Destroy handler
* @param {Error} [error] - error
* @param {Function} done - callback
* @example
* progressStream.destroy()
*/
_destroy (error, done) {
this.clear()
done(error)
}
}
module.exports = ProgressStream

284
npm-shrinkwrap.json generated
View File

@ -493,6 +493,23 @@
"from": "block-stream@*",
"resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz"
},
"blockmap": {
"version": "2.0.2",
"from": "blockmap@2.0.2",
"resolved": "https://registry.npmjs.org/blockmap/-/blockmap-2.0.2.tgz",
"dependencies": {
"debug": {
"version": "3.1.0",
"from": "debug@>=3.1.0 <4.0.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz"
},
"ms": {
"version": "2.0.0",
"from": "ms@2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz"
}
}
},
"bloodline": {
"version": "1.0.1",
"from": "bloodline@>=1.0.1 <2.0.0",
@ -518,36 +535,9 @@
}
},
"bluebird-retry": {
"version": "0.10.1",
"from": "bluebird-retry@>=0.10.1 <0.11.0",
"resolved": "https://registry.npmjs.org/bluebird-retry/-/bluebird-retry-0.10.1.tgz"
},
"bmapflash": {
"version": "1.2.1",
"from": "bmapflash@>=1.2.1 <2.0.0",
"resolved": "https://registry.npmjs.org/bmapflash/-/bmapflash-1.2.1.tgz",
"dependencies": {
"lodash": {
"version": "4.17.4",
"from": "lodash@>=4.14.2 <5.0.0",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz"
},
"through2": {
"version": "2.0.3",
"from": "through2@^2.0.1",
"resolved": "https://registry.npmjs.org/through2/-/through2-2.0.3.tgz"
},
"xml2js": {
"version": "0.4.17",
"from": "xml2js@>=0.4.17 <0.5.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.17.tgz"
},
"xtend": {
"version": "4.0.1",
"from": "xtend@~4.0.0",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
}
}
"version": "0.11.0",
"from": "bluebird-retry@0.11.0",
"resolved": "https://registry.npmjs.org/bluebird-retry/-/bluebird-retry-0.11.0.tgz"
},
"boom": {
"version": "2.10.1",
@ -637,11 +627,6 @@
"from": "buffer-crc32@>=0.2.1 <0.3.0",
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.5.tgz"
},
"buffer-shims": {
"version": "1.0.0",
"from": "buffer-shims@>=1.0.0 <2.0.0",
"resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz"
},
"buffers": {
"version": "0.1.1",
"from": "buffers@>=0.1.1 <0.2.0",
@ -991,9 +976,9 @@
"resolved": "https://registry.npmjs.org/crc/-/crc-3.4.4.tgz"
},
"crc32-stream": {
"version": "1.0.1",
"from": "crc32-stream@>=1.0.1 <2.0.0",
"resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-1.0.1.tgz"
"version": "2.0.0",
"from": "crc32-stream@latest",
"resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-2.0.0.tgz"
},
"create-error-class": {
"version": "3.0.2",
@ -1099,9 +1084,16 @@
}
},
"debug": {
"version": "2.6.0",
"from": "debug@>=2.2.0 <3.0.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.0.tgz"
"version": "2.6.8",
"from": "debug@2.6.8",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.8.tgz",
"dependencies": {
"ms": {
"version": "2.0.0",
"from": "ms@2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz"
}
}
},
"decamelize": {
"version": "1.2.0",
@ -1274,11 +1266,6 @@
"resolved": "https://registry.npmjs.org/detective/-/detective-4.5.0.tgz",
"dev": true
},
"dev-null-stream": {
"version": "0.0.1",
"from": "dev-null-stream@0.0.1",
"resolved": "https://registry.npmjs.org/dev-null-stream/-/dev-null-stream-0.0.1.tgz"
},
"diff": {
"version": "1.4.0",
"from": "diff@1.4.0",
@ -1299,6 +1286,33 @@
}
}
},
"dom-serializer": {
"version": "0.1.0",
"from": "dom-serializer@>=0.0.0 <1.0.0",
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.0.tgz",
"dependencies": {
"domelementtype": {
"version": "1.1.3",
"from": "domelementtype@>=1.1.1 <1.2.0",
"resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.1.3.tgz"
}
}
},
"domelementtype": {
"version": "1.3.0",
"from": "domelementtype@^1.3.0",
"resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.0.tgz"
},
"domhandler": {
"version": "2.4.1",
"from": "domhandler@^2.3.0",
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.1.tgz"
},
"domutils": {
"version": "1.6.2",
"from": "domutils@^1.5.1",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-1.6.2.tgz"
},
"dot-prop": {
"version": "4.1.1",
"from": "dot-prop@>=4.1.0 <5.0.0",
@ -1767,24 +1781,17 @@
"resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz",
"dev": true
},
"entities": {
"version": "1.1.1",
"from": "entities@^1.1.1",
"resolved": "https://registry.npmjs.org/entities/-/entities-1.1.1.tgz"
},
"env-paths": {
"version": "1.0.0",
"from": "env-paths@>=1.0.0 <2.0.0",
"resolved": "https://registry.npmjs.org/env-paths/-/env-paths-1.0.0.tgz",
"dev": true
},
"error": {
"version": "7.0.2",
"from": "error@>=7.0.2 <8.0.0",
"resolved": "https://registry.npmjs.org/error/-/error-7.0.2.tgz",
"dependencies": {
"xtend": {
"version": "4.0.1",
"from": "xtend@~4.0.0",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
}
}
},
"error-ex": {
"version": "1.3.0",
"from": "error-ex@>=1.2.0 <2.0.0",
@ -2166,43 +2173,6 @@
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz",
"dev": true
},
"etcher-image-write": {
"version": "9.1.3",
"from": "etcher-image-write@9.1.3",
"resolved": "https://registry.npmjs.org/etcher-image-write/-/etcher-image-write-9.1.3.tgz",
"dependencies": {
"bluebird": {
"version": "3.5.0",
"from": "bluebird@>=3.4.7 <4.0.0",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.0.tgz"
},
"debug": {
"version": "2.6.6",
"from": "debug@>=2.6.6 <3.0.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.6.tgz"
},
"lodash": {
"version": "4.17.4",
"from": "lodash@>=4.17.4 <5.0.0",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz"
},
"ms": {
"version": "0.7.3",
"from": "ms@0.7.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-0.7.3.tgz"
},
"through2": {
"version": "2.0.3",
"from": "through2@>=2.0.1 <3.0.0",
"resolved": "https://registry.npmjs.org/through2/-/through2-2.0.3.tgz"
},
"xtend": {
"version": "4.0.1",
"from": "xtend@~4.0.0",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
}
}
},
"event-emitter": {
"version": "0.3.5",
"from": "event-emitter@>=0.3.5 <0.4.0",
@ -3046,6 +3016,11 @@
"resolved": "https://registry.npmjs.org/html-tag/-/html-tag-0.2.1.tgz",
"dev": true
},
"htmlparser2": {
"version": "3.9.2",
"from": "htmlparser2@>=3.9.2 <4.0.0",
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.9.2.tgz"
},
"http-proxy-agent": {
"version": "0.2.7",
"from": "http-proxy-agent@>=0.0.0 <1.0.0",
@ -3450,7 +3425,8 @@
"isarray": {
"version": "0.0.1",
"from": "isarray@0.0.1",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz"
"resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
"dev": true
},
"isbinaryfile": {
"version": "3.0.2",
@ -4823,7 +4799,8 @@
"ms": {
"version": "0.7.2",
"from": "ms@0.7.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-0.7.2.tgz"
"resolved": "https://registry.npmjs.org/ms/-/ms-0.7.2.tgz",
"dev": true
},
"multistream": {
"version": "2.1.0",
@ -8204,7 +8181,8 @@
"object-keys": {
"version": "0.4.0",
"from": "object-keys@>=0.4.0 <0.5.0",
"resolved": "https://registry.npmjs.org/object-keys/-/object-keys-0.4.0.tgz"
"resolved": "https://registry.npmjs.org/object-keys/-/object-keys-0.4.0.tgz",
"dev": true
},
"object.omit": {
"version": "2.0.1",
@ -8442,6 +8420,23 @@
"from": "pinkie-promise@>=2.0.0 <3.0.0",
"resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz"
},
"pipage": {
"version": "1.0.2",
"from": "pipage@latest",
"resolved": "https://registry.npmjs.org/pipage/-/pipage-1.0.2.tgz",
"dependencies": {
"debug": {
"version": "3.1.0",
"from": "debug@>=3.1.0 <3.2.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz"
},
"ms": {
"version": "2.0.0",
"from": "ms@2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz"
}
}
},
"pkg": {
"version": "4.1.1",
"from": "pkg@4.1.1",
@ -8585,7 +8580,16 @@
"progress-stream": {
"version": "1.2.0",
"from": "progress-stream@>=1.1.1 <2.0.0",
"resolved": "https://registry.npmjs.org/progress-stream/-/progress-stream-1.2.0.tgz"
"resolved": "https://registry.npmjs.org/progress-stream/-/progress-stream-1.2.0.tgz",
"dev": true,
"dependencies": {
"speedometer": {
"version": "0.1.4",
"from": "speedometer@>=0.1.2 <0.2.0",
"resolved": "https://registry.npmjs.org/speedometer/-/speedometer-0.1.4.tgz",
"dev": true
}
}
},
"project-name": {
"version": "0.2.6",
@ -8715,14 +8719,29 @@
"resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz"
},
"readable-stream": {
"version": "2.2.2",
"version": "2.3.3",
"from": "readable-stream@>=2.0.2 <3.0.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.2.tgz",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.3.tgz",
"dependencies": {
"inherits": {
"version": "2.0.3",
"from": "inherits@>=2.0.3 <2.1.0",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz"
},
"isarray": {
"version": "1.0.0",
"from": "isarray@>=1.0.0 <1.1.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz"
},
"safe-buffer": {
"version": "5.1.1",
"from": "safe-buffer@>=5.1.1 <5.2.0",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz"
},
"string_decoder": {
"version": "1.0.3",
"from": "string_decoder@>=1.0.3 <1.1.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz"
}
}
},
@ -9343,23 +9362,6 @@
"resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-0.0.4.tgz",
"dev": true
},
"slice-stream2": {
"version": "2.0.1",
"from": "slice-stream2@>=2.0.0 <3.0.0",
"resolved": "https://registry.npmjs.org/slice-stream2/-/slice-stream2-2.0.1.tgz",
"dependencies": {
"through2": {
"version": "2.0.3",
"from": "through2@^2.0.1",
"resolved": "https://registry.npmjs.org/through2/-/through2-2.0.3.tgz"
},
"xtend": {
"version": "4.0.1",
"from": "xtend@>=4.0.0 <4.1.0",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
}
}
},
"slide": {
"version": "1.1.6",
"from": "slide@>=1.1.5 <2.0.0",
@ -9430,9 +9432,9 @@
"resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-1.2.1.tgz"
},
"speedometer": {
"version": "0.1.4",
"from": "speedometer@>=0.1.2 <0.2.0",
"resolved": "https://registry.npmjs.org/speedometer/-/speedometer-0.1.4.tgz"
"version": "1.0.0",
"from": "speedometer@1.0.0",
"resolved": "https://registry.npmjs.org/speedometer/-/speedometer-1.0.0.tgz"
},
"sprintf-js": {
"version": "1.0.3",
@ -9479,23 +9481,6 @@
"resolved": "https://registry.npmjs.org/stdout-stream/-/stdout-stream-1.4.0.tgz",
"dev": true
},
"stream-chunker": {
"version": "1.2.8",
"from": "stream-chunker@>=1.2.8 <2.0.0",
"resolved": "https://registry.npmjs.org/stream-chunker/-/stream-chunker-1.2.8.tgz",
"dependencies": {
"through2": {
"version": "2.0.3",
"from": "through2@~2.0.0",
"resolved": "https://registry.npmjs.org/through2/-/through2-2.0.3.tgz"
},
"xtend": {
"version": "4.0.1",
"from": "xtend@>=4.0.0 <4.1.0",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
}
}
},
"stream-meter": {
"version": "1.0.4",
"from": "stream-meter@1.0.4",
@ -9511,12 +9496,8 @@
"string_decoder": {
"version": "0.10.31",
"from": "string_decoder@>=0.10.0 <0.11.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz"
},
"string-template": {
"version": "0.2.1",
"from": "string-template@>=0.2.1 <0.3.0",
"resolved": "https://registry.npmjs.org/string-template/-/string-template-0.2.1.tgz"
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
"dev": true
},
"string-width": {
"version": "1.0.1",
@ -9772,11 +9753,13 @@
"version": "0.2.3",
"from": "through2@>=0.2.3 <0.3.0",
"resolved": "https://registry.npmjs.org/through2/-/through2-0.2.3.tgz",
"dev": true,
"dependencies": {
"readable-stream": {
"version": "1.1.14",
"from": "readable-stream@>=1.1.9 <1.2.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz"
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz",
"dev": true
}
}
},
@ -9795,7 +9778,8 @@
"tmp": {
"version": "0.0.31",
"from": "tmp@0.0.31",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.31.tgz"
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.31.tgz",
"dev": true
},
"to-file": {
"version": "0.2.0",
@ -10268,6 +10252,11 @@
"resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-3.0.0.tgz",
"dev": true
},
"xml": {
"version": "1.0.1",
"from": "xml@>=1.0.1 <2.0.0",
"resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz"
},
"xml2js": {
"version": "0.4.17",
"from": "xml2js@0.4.17",
@ -10292,7 +10281,8 @@
"xtend": {
"version": "2.1.2",
"from": "xtend@>=2.1.1 <2.2.0",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-2.1.2.tgz"
"resolved": "https://registry.npmjs.org/xtend/-/xtend-2.1.2.tgz",
"dev": true
},
"y18n": {
"version": "3.2.1",

View File

@ -47,14 +47,16 @@
"angular-ui-bootstrap": "2.5.0",
"angular-ui-router": "0.4.2",
"bindings": "1.2.1",
"blockmap": "2.0.2",
"bluebird": "3.4.1",
"bluebird-retry": "0.11.0",
"bootstrap-sass": "3.3.6",
"chalk": "1.1.3",
"command-join": "2.0.0",
"debug": "2.6.0",
"crc32-stream": "2.0.0",
"debug": "2.6.8",
"drivelist": "5.2.4",
"electron-is-running-in-asar": "1.0.0",
"etcher-image-write": "9.1.3",
"file-type": "4.1.0",
"flexboxgrid": "6.3.0",
"gpt": "1.0.0",
@ -68,17 +70,20 @@
"node-ipc": "8.9.2",
"node-stream-zip": "1.3.7",
"path-is-inside": "1.0.2",
"pipage": "1.0.2",
"pretty-bytes": "1.0.4",
"prop-types": "15.5.9",
"react": "15.5.4",
"react-dom": "15.5.4",
"react2angular": "1.1.3",
"readable-stream": "2.3.3",
"redux": "3.5.2",
"request": "2.81.0",
"resin-cli-form": "1.4.1",
"resin-cli-visuals": "1.3.1",
"resin-corvus": "1.0.0-beta.30",
"semver": "5.1.1",
"speedometer": "1.0.0",
"sudo-prompt": "6.1.0",
"trackjs": "2.3.1",
"udif": "0.10.0",

View File

@ -73,6 +73,7 @@ describe('ImageStream: DMG', function () {
describe('.getImageMetadata()', function () {
it('should return the correct metadata', function () {
const image = path.join(DMG_PATH, 'etcher-test-zlib.dmg')
const compressedSize = fs.statSync(path.join(DMG_PATH, 'etcher-test-zlib.dmg')).size
const uncompressedSize = fs.statSync(path.join(IMAGES_PATH, 'etcher-test.img')).size
return imageStream.getImageMetadata(image).then((metadata) => {
@ -80,7 +81,7 @@ describe('ImageStream: DMG', function () {
path: image,
extension: 'dmg',
size: {
original: uncompressedSize,
original: compressedSize,
final: {
estimation: false,
value: uncompressedSize
@ -107,6 +108,7 @@ describe('ImageStream: DMG', function () {
describe('.getImageMetadata()', function () {
it('should return the correct metadata', function () {
const image = path.join(DMG_PATH, 'etcher-test-raw.dmg')
const compressedSize = fs.statSync(path.join(DMG_PATH, 'etcher-test-raw.dmg')).size
const uncompressedSize = fs.statSync(path.join(IMAGES_PATH, 'etcher-test.img')).size
return imageStream.getImageMetadata(image).then((metadata) => {
@ -114,7 +116,7 @@ describe('ImageStream: DMG', function () {
path: image,
extension: 'dmg',
size: {
original: uncompressedSize,
original: compressedSize,
final: {
estimation: false,
value: uncompressedSize