Handle the last fail as an error if all devices failed

This commit is contained in:
Alexis Svinartchouk 2018-07-02 12:27:40 +01:00
parent d07d535993
commit 268c5302e8
4 changed files with 65 additions and 51 deletions

View File

@ -33,7 +33,6 @@ const _ = require('lodash')
const semver = require('semver') const semver = require('semver')
const uuidV4 = require('uuid/v4') const uuidV4 = require('uuid/v4')
>>>>>>> Show raspberry pi usbboot update progress in devices list
const EXIT_CODES = require('../../shared/exit-codes') const EXIT_CODES = require('../../shared/exit-codes')
const messages = require('../../shared/messages') const messages = require('../../shared/messages')
const s3Packages = require('../../shared/s3-packages') const s3Packages = require('../../shared/s3-packages')

View File

@ -86,21 +86,20 @@ const handleError = (error) => {
terminate(EXIT_CODES.GENERAL_ERROR) terminate(EXIT_CODES.GENERAL_ERROR)
} }
function runVerifier(verifier, onFail) { function lastMapValue(map) {
return new Promise((resolve, reject) => { let value
verifier.on('error', onFail); for (value of map.values()){
verifier.on('finish', resolve); }
verifier.run(); return value
});
} }
function pipeRegularSourceToDestination(source, destination, verify, onProgress, onFail) { function writeAndValidate(source, destination, verify, onProgress, onFail, onFinish, onError) {
let checksum let checksum
let sparse let sparse
let sourceMetadata let sourceMetadata
let step = 'flashing' let step = 'flashing'
let lastPosition = 0 let lastPosition = 0
const errors = new Map() // destination -> error map const errors = new Map() // destination -> error map TODO: include open and close errors in it
const state = { const state = {
active: destination.destinations.size, active: destination.destinations.size,
flashing: destination.destinations.size, flashing: destination.destinations.size,
@ -109,6 +108,9 @@ function pipeRegularSourceToDestination(source, destination, verify, onProgress,
successful: 0, successful: 0,
type: step type: step
} }
function allDestinationsFailed() {
return (errors.size === destination.destinations.size)
}
function updateState() { function updateState() {
state.type = step state.type = step
state.failed = errors.size state.failed = errors.size
@ -140,7 +142,17 @@ function pipeRegularSourceToDestination(source, destination, verify, onProgress,
Object.assign(progressEvent, state) Object.assign(progressEvent, state)
onProgress(progressEvent) onProgress(progressEvent)
} }
function onFail2(error) {
errors.set(error.destination, error.error)
updateState()
onFail(error)
}
destination.on('fail', onFail2)
return Promise.all([ source.getInnerSource(), destination.open() ])
.then(([ _source ]) => {
source = _source
return source.canCreateSparseReadStream() return source.canCreateSparseReadStream()
})
.then((_sparse) => { .then((_sparse) => {
sparse = _sparse sparse = _sparse
let sourceStream let sourceStream
@ -156,18 +168,15 @@ function pipeRegularSourceToDestination(source, destination, verify, onProgress,
return Promise.all([ sourceStream, destinationStream, source.getMetadata() ]) return Promise.all([ sourceStream, destinationStream, source.getMetadata() ])
}) })
.then(([ sourceStream, destinationStream, metadata ]) => { .then(([ sourceStream, destinationStream, metadata ]) => {
destinationStream.on('fail', (error) => { destinationStream.on('fail', onFail2)
errors.set(error.destination, error.error)
updateState()
onFail({ device: error.destination.drive, error: error.error }) // TODO: device should be error.destination
})
sourceMetadata = metadata sourceMetadata = metadata
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let done = false let done = false
let hasher
sourceStream.on('error', reject) sourceStream.on('error', reject)
destinationStream.on('progress', onProgress2) destinationStream.on('progress', onProgress2)
if (verify && !sparse) { if (verify && !sparse) {
const hasher = sdk.sourceDestination.createHasher() hasher = sdk.sourceDestination.createHasher()
hasher.on('checksum', (cs) => { hasher.on('checksum', (cs) => {
checksum = cs checksum = cs
if (done) { if (done) {
@ -178,6 +187,13 @@ function pipeRegularSourceToDestination(source, destination, verify, onProgress,
} }
destinationStream.on('done', () => { destinationStream.on('done', () => {
done = true; done = true;
if (allDestinationsFailed() && (hasher !== undefined)) {
sourceStream.unpipe(hasher)
verify = false
resolve()
return
}
if (sparse || !verify || (checksum !== undefined)) { if (sparse || !verify || (checksum !== undefined)) {
resolve() resolve()
} }
@ -196,15 +212,24 @@ function pipeRegularSourceToDestination(source, destination, verify, onProgress,
updateState() updateState()
const verifier = destination.createVerifier(sparse ? sourceMetadata.blockMap : checksum, sourceMetadata.size) // TODO: ensure blockMap exists const verifier = destination.createVerifier(sparse ? sourceMetadata.blockMap : checksum, sourceMetadata.size) // TODO: ensure blockMap exists
verifier.on('progress', onProgress2) verifier.on('progress', onProgress2)
return runVerifier(verifier, onFail) verifier.on('fail', onFail2)
return new Promise((resolve) => {
verifier.on('finish', resolve);
verifier.run();
});
} }
}) })
.then(() => { .then(() => {
step = 'finished' step = 'finished'
updateState() updateState()
//onProgress2({ speed: 0, position: sourceMetadata.size }) return Promise.all([ source.close(), destination.close() ])
}) })
.then(() => { .then(() => {
// If all destinations errored, treat the last fail as an error
if (allDestinationsFailed()) {
const lastError = lastMapValue(errors)
throw lastError
}
const result = { const result = {
bytesWritten: lastPosition, bytesWritten: lastPosition,
devices: { devices: {
@ -220,16 +245,9 @@ function pipeRegularSourceToDestination(source, destination, verify, onProgress,
error.device = destination.drive.device error.device = destination.drive.device
result.errors.push(error) result.errors.push(error)
} }
return result onFinish(result)
})
}
function sourceDestinationDisposer(sourceDestination) {
return Bluebird.resolve(sourceDestination.open())
.return(sourceDestination)
.disposer(() => {
return Bluebird.resolve(sourceDestination.close()).catchReturn()
}) })
.catch(onError)
} }
ipc.connectTo(IPC_SERVER_ID, () => { ipc.connectTo(IPC_SERVER_ID, () => {
@ -315,39 +333,32 @@ ipc.connectTo(IPC_SERVER_ID, () => {
/** /**
* @summary Failure handler (non-fatal errors) * @summary Failure handler (non-fatal errors)
* @param {Object} event - event data (error & device) * @param {Object} error - MultiDestinationError
* @example * @example
* writer.on('fail', onFail) * writer.on('fail', onFail)
*/ */
const onFail = (event) => { const onFail = (error) => {
ipc.of[IPC_SERVER_ID].emit('fail', { ipc.of[IPC_SERVER_ID].emit('fail', {
device: event.device, device: error.destination.drive, // TODO: device should be error.destination
error: errors.toJSON(event.error) error: errors.toJSON(error.error)
}) })
} }
const destinations = _.map(options.destinations, 'drive.device') const destinations = _.map(options.destinations, 'device')
const dests = options.destinations.map((destination) => { const dests = options.destinations.map((destination) => {
return new sdk.sourceDestination.BlockDevice(destination, options.unmountOnSuccess) return new sdk.sourceDestination.BlockDevice(destination, options.unmountOnSuccess)
}) })
const destination = new sdk.sourceDestination.MultiDestination(dests)
const source = new sdk.sourceDestination.File(options.imagePath, sdk.sourceDestination.File.OpenFlags.Read) const source = new sdk.sourceDestination.File(options.imagePath, sdk.sourceDestination.File.OpenFlags.Read)
source.getInnerSource() writeAndValidate(
.then((innerSource) => { source,
return Bluebird.using( destination,
sourceDestinationDisposer(innerSource), options.validateWriteOnSuccess,
sourceDestinationDisposer(new sdk.sourceDestination.MultiDestination(dests)), onProgress,
(innerSource, destination) => { onFail,
destination.on('fail', onFail) onFinish,
return pipeRegularSourceToDestination(innerSource, destination, options.validateWriteOnSuccess, onProgress, onFail) onError
}
) )
})
.then((results) => {
onFinish(results)
})
.catch((error) => {
onError(error)
})
log(`Image: ${options.imagePath}`) log(`Image: ${options.imagePath}`)
log(`Devices: ${destinations.join(', ')}`) log(`Devices: ${destinations.join(', ')}`)

6
npm-shrinkwrap.json generated
View File

@ -3068,7 +3068,7 @@
}, },
"etcher-sdk": { "etcher-sdk": {
"version": "0.0.1", "version": "0.0.1",
"resolved": "git://github.com/resin-io-modules/etcher-sdk.git#bda51535715edb3691b783973801434bb3d78b30", "resolved": "git://github.com/resin-io-modules/etcher-sdk.git#9d483eec059a9e149d0f1c2e2746b8a816f87bb4",
"dependencies": { "dependencies": {
"@types/lodash": { "@types/lodash": {
"version": "4.14.110", "version": "4.14.110",
@ -3126,6 +3126,10 @@
"version": "5.1.2", "version": "5.1.2",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz"
}, },
"semver": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz"
},
"string_decoder": { "string_decoder": {
"version": "1.1.1", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz"

View File

@ -63,7 +63,7 @@
"debug": "3.1.0", "debug": "3.1.0",
"drivelist": "6.4.6", "drivelist": "6.4.6",
"electron-is-running-in-asar": "1.0.0", "electron-is-running-in-asar": "1.0.0",
"etcher-sdk": "github:resin-io-modules/etcher-sdk#bda51535715edb3691b783973801434bb3d78b30", "etcher-sdk": "github:resin-io-modules/etcher-sdk#9d483eec059a9e149d0f1c2e2746b8a816f87bb4",
"file-type": "4.1.0", "file-type": "4.1.0",
"flexboxgrid": "6.3.0", "flexboxgrid": "6.3.0",
"gpt": "1.0.0", "gpt": "1.0.0",