Integrate etcher-sdk

This commit is contained in:
Alexis Svinartchouk 2018-06-08 15:01:00 +01:00
parent ca6aa5d4aa
commit 6143023502
13 changed files with 967 additions and 564 deletions

View File

@ -100,6 +100,11 @@ electron-develop: | $(BUILD_TEMPORARY_DIRECTORY)
-s $(PLATFORM) \
-n $(BUILD_TEMPORARY_DIRECTORY)/npm \
-a $(S3_BUCKET)
# patch from https://github.com/mapbox/node-pre-gyp/pull/279/files , required for lzma-native in electron child processes
# we only apply the patch if it hasn't been applied
if ! [ -f node_modules/lzma-native/node_modules/node-pre-gyp/lib/util/versioning.js.orig ]; \
then patch --backup --force --strip=1 --ignore-whitespace < patches/allow-electron-forks-of-modules-that-use-pre-gyp.patch; \
fi;
electron-test:
$(RESIN_SCRIPTS)/electron/test.sh \

View File

@ -230,7 +230,8 @@ app.run(() => {
})
app.run(($timeout) => {
driveScanner.on('devices', (drives) => {
function updateDrives() {
const drives = Array.from(driveScanner.drives)
const BLACKLISTED_DRIVES = settings.has('driveBlacklist')
? settings.get('driveBlacklist').split(',')
: []
@ -240,18 +241,23 @@ app.run(($timeout) => {
// available drives list has changed, and incorrectly
// keeps asking the user to "Connect a drive".
$timeout(() => {
if (BLACKLISTED_DRIVES.length) {
const allowedDrives = drives.filter((drive) => {
return !(BLACKLISTED_DRIVES.includes(drive.devicePath) ||
BLACKLISTED_DRIVES.includes(drive.device) ||
BLACKLISTED_DRIVES.includes(drive.raw))
})
availableDrives.setDrives(allowedDrives)
} else {
availableDrives.setDrives(drives)
}
const allowedDrives = drives
.filter((drive) => {
return !(
BLACKLISTED_DRIVES.includes(drive.devicePath) ||
BLACKLISTED_DRIVES.includes(drive.device) ||
BLACKLISTED_DRIVES.includes(drive.raw)
)
})
.map((drive) => {
// TODO: we should be able to use the SourceDestination `drive` directly
return drive.drive
})
availableDrives.setDrives(allowedDrives)
})
})
}
driveScanner.on('attach', updateDrives)
driveScanner.on('detach', updateDrives)
driveScanner.on('error', (error) => {
// Stop the drive scanning loop in case of errors,

View File

@ -46,7 +46,7 @@ const verifyNoNilFields = (object, fields, name) => {
return _.isNil(_.get(object, field))
})
if (nilFields.length) {
throw new Error(`Missing ${name} fields: ${nilFields.join(', ')}`)
throw new Error(`Missing ${name} fields: ${nilFields.join(', ')} ${JSON.stringify(object, null, 4)}`)
}
}
@ -165,7 +165,8 @@ const storeReducer = (state = DEFAULT_STATE, action) => {
const drives = action.data
if (!_.isArray(drives) || !_.every(drives, _.isPlainObject)) {
//if (!_.isArray(drives) || !_.every(drives, _.isPlainObject)) {
if (!_.isArray(drives)) {
throw errors.createError({
title: `Invalid drives: ${drives}`
})

View File

@ -16,16 +16,27 @@
'use strict'
const settings = require('../models/settings')
const SDK = require('../../../sdk')
const sdk = require('etcher-sdk')
const process = require('process')
const scanner = SDK.createScanner({
blockdevice: {
get includeSystemDrives () {
return settings.get('unsafeMode') && !settings.get('disableUnsafeMode')
}
},
usbboot: {}
const settings = require('../models/settings')
const permissions = require('../../../shared/permissions')
function includeSystemDrives() {
return settings.get('unsafeMode') && !settings.get('disableUnsafeMode')
}
const adapters = [
new sdk.scanner.adapters.BlockDeviceAdapter(includeSystemDrives)
]
permissions.isElevated()
.then((isElevated) => {
if ((process.platform !== 'linux') || isElevated) {
adapters.push(new sdk.scanner.adapters.UsbbootDeviceAdapter())
}
})
const scanner = new sdk.scanner.Scanner(adapters)
module.exports = scanner

View File

@ -149,7 +149,7 @@ module.exports = function (
// otherwise Windows throws EPERM
driveScanner.stop()
return imageWriter.flash(image.path, devices)
return imageWriter.flash(image.path, drives)
}).then(() => {
if (!flashState.wasLastFlashCancelled()) {
const flashResults = flashState.getFlashResults()

View File

@ -16,11 +16,15 @@
'use strict'
const Bluebird = require('bluebird')
const _ = require('lodash')
const ipc = require('node-ipc')
const sdk = require('etcher-sdk')
const EXIT_CODES = require('../../shared/exit-codes')
const errors = require('../../shared/errors')
const ImageWriter = require('../../sdk/writer')
const BlockWriteStream = require('../../sdk/writer/block-write-stream')
const BlockReadStream = require('../../sdk/writer/block-read-stream')
ipc.config.id = process.env.IPC_CLIENT_ID
ipc.config.socketRoot = process.env.IPC_SOCKET_ROOT
@ -85,6 +89,124 @@ const handleError = (error) => {
terminate(EXIT_CODES.GENERAL_ERROR)
}
function runVerifier(verifier, onFail) {
return new Promise((resolve, reject) => {
verifier.on('error', onFail);
verifier.on('finish', resolve);
verifier.run();
});
}
function pipeRegularSourceToDestination(source, destination, verify, onProgress, onFail) {
let checksum
let sourceMetadata
let step = 'flashing'
let lastPosition = 0
const errors = new Map() // destination -> error map
const state = {
active: destination.destinations.length,
flashing: destination.destinations.length,
verifying: 0,
failed: 0,
successful: 0
}
function updateState() {
state.failed = errors.size
state.active = destination.destinations.length - state.failed
if (step === 'flashing') {
state.flashing = state.active
state.verifying = 0
} else if (step === 'verifying') {
state.flashing = 0
state.verifying = state.active
} else if (step === 'finished') {
state.successful = state.active
}
}
function onProgress2(progressEvent) {
lastPosition = progressEvent.position
progressEvent.percentage = progressEvent.position / sourceMetadata.size * 100
// NOTE: We need to guard against this becoming Infinity,
// because that value isn't transmitted properly over IPC and becomes `null`
progressEvent.eta = progressEvent.speed ? (sourceMetadata.size - progressEvent.position) / progressEvent.speed : 0
progressEvent.totalSpeed = progressEvent.speed * state.active
Object.assign(progressEvent, state)
onProgress(progressEvent)
}
return Promise.all([ source.createReadStream(), destination.createWriteStream(), source.getMetadata() ])
.then(([ sourceStream, destinationStream, metadata ]) => {
destinationStream.on('fail', (error) => {
errors.set(error.destination, error.error)
updateState()
onFail({ device: error.destination.drive, error: error.error }) // TODO: device should be error.destination
onProgress2({ eta: 0, speed: 0, position: lastPosition }) // TODO: this is not needed if a success / error screen is shown
})
sourceMetadata = metadata
return new Promise((resolve, reject) => {
let done = false
sourceStream.on('error', reject)
destinationStream.on('progress', onProgress2)
if (verify) {
const hasher = sdk.sourceDestination.createHasher()
hasher.on('checksum', (cs) => {
checksum = cs
if (done) {
resolve()
}
})
sourceStream.pipe(hasher)
}
destinationStream.on('done', () => {
done = true;
if (!verify || (checksum !== undefined)) {
resolve()
}
})
sourceStream.pipe(destinationStream)
})
})
.then(() => {
if (verify) {
step = 'verifying'
updateState()
const verifier = destination.createVerifier(checksum, sourceMetadata.size)
verifier.on('progress', onProgress2)
return runVerifier(verifier, onFail)
}
})
.then(() => {
step = 'finished'
updateState()
onProgress2({ speed: 0, position: sourceMetadata.size })
})
.then(() => {
const result = {
bytesWritten: lastPosition,
devices: {
failed: state.failed,
successful: state.active
},
errors: []
}
if (verify && (checksum !== undefined)) {
result.checksum = { xxhash: checksum }
}
for (const [ destination, error ] of errors) {
error.device = destination.drive.device
result.errors.push(error)
}
return result
})
}
function sourceDestinationDisposer(sourceDestination) {
return Bluebird.resolve(sourceDestination.open())
.return(sourceDestination)
.disposer(() => {
return Bluebird.resolve(sourceDestination.close()).catchReturn()
})
}
ipc.connectTo(IPC_SERVER_ID, () => {
process.once('uncaughtException', handleError)
@ -114,15 +236,6 @@ ipc.connectTo(IPC_SERVER_ID, () => {
let writer = null
ipc.of[IPC_SERVER_ID].on('write', (options) => {
const destinations = [].concat(options.destinations)
log(`Image: ${options.imagePath}`)
log(`Devices: ${destinations.join(', ')}`)
log(`Umount on success: ${options.unmountOnSuccess}`)
log(`Validate on success: ${options.validateWriteOnSuccess}`)
let exitCode = EXIT_CODES.SUCCESS
/**
* @summary Progress handler
* @param {Object} state - progress state
@ -133,6 +246,8 @@ ipc.connectTo(IPC_SERVER_ID, () => {
ipc.of[IPC_SERVER_ID].emit('state', state)
}
let exitCode = EXIT_CODES.SUCCESS
/**
* @summary Finish handler
* @param {Object} results - Flash results
@ -184,19 +299,41 @@ ipc.connectTo(IPC_SERVER_ID, () => {
})
}
writer = new ImageWriter({
writer = new ImageWriter({ // TODO: remove
verify: options.validateWriteOnSuccess,
unmountOnSuccess: options.unmountOnSuccess,
checksumAlgorithms: options.checksumAlgorithms || []
})
writer.on('error', onError)
writer.on('fail', onFail)
writer.on('progress', onProgress)
writer.on('finish', onFinish)
writer.on('abort', onAbort)
writer.write(options.imagePath, destinations)
const destinations = _.map(options.destinations, 'drive.device')
const dests = options.destinations.map((destination) => {
return new sdk.sourceDestination.BlockDevice(destination)
})
Bluebird.using(
sourceDestinationDisposer(new sdk.sourceDestination.File(options.imagePath, sdk.sourceDestination.File.OpenFlags.Read)),
sourceDestinationDisposer(new sdk.sourceDestination.MultiDestination(dests)),
(source, destination) => {
return source.getInnerSource()
.then((innerSource) => {
return Bluebird.using(sourceDestinationDisposer(innerSource), (innerSource) => {
return pipeRegularSourceToDestination(innerSource, destination, options.validateWriteOnSuccess, onProgress, onFail)
})
})
}
)
.then((results) => {
onFinish(results)
})
.catch((error) => {
onError(error)
})
log(`Image: ${options.imagePath}`)
log(`Devices: ${destinations.join(', ')}`)
log(`Umount on success: ${options.unmountOnSuccess}`)
log(`Validate on success: ${options.validateWriteOnSuccess}`)
})
ipc.of[IPC_SERVER_ID].on('cancel', () => {

View File

@ -23,7 +23,7 @@ const debug = require('debug')('etcher:writer:block-write-stream')
const errors = require('./error-types')
const CHUNK_SIZE = 64 * 1024
const UPDATE_INTERVAL_MS = 500
const UPDATE_INTERVAL_MS = 1000 / 60
/**
* @summary I/O retry base timeout, in milliseconds

View File

@ -219,6 +219,7 @@ exports.elevateCommand = (command, options) => {
// for now, we should make sure we double check if the error messages
// have changed every time we upgrade `sudo-prompt`.
}).catch((error) => {
console.log('error', error.cause)
return _.includes(error.message, 'is not in the sudoers file')
}, () => {
throw errors.createUserError({

1241
npm-shrinkwrap.json generated

File diff suppressed because it is too large Load Diff

View File

@ -63,13 +63,14 @@
"debug": "3.1.0",
"drivelist": "6.4.6",
"electron-is-running-in-asar": "1.0.0",
"etcher-sdk": "github:resin-io-modules/etcher-sdk#b12e63b49c4a01305a2809b504859a3940927399",
"file-type": "4.1.0",
"flexboxgrid": "6.3.0",
"gpt": "1.0.0",
"immutable": "3.8.1",
"inactivity-timer": "1.0.0",
"lodash": "4.17.10",
"lzma-native": "1.5.2",
"lzma-native": "3.0.8",
"mbr": "1.1.2",
"mime-types": "2.1.15",
"mountutils": "1.3.16",

View File

@ -0,0 +1,29 @@
--- a/node_modules/lzma-native/node_modules/node-pre-gyp/lib/util/versioning.js
+++ b/node_modules/lzma-native/node_modules/node-pre-gyp/lib/util/versioning.js
@@ -70,7 +70,14 @@ function get_runtime_abi(runtime, target_version) {
if (runtime === 'node-webkit') {
return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']);
} else if (runtime === 'electron') {
- return get_electron_abi(runtime, target_version || process.versions.electron);
+ var electron_version = target_version || process.versions.electron;
+ if (!electron_version) {
+ // TODO PR something to electron to pass in the version number for forks
+ // https://github.com/electron/electron/issues/9058
+ try { electron_version = require('electron/package.json').version; }
+ catch (_) {}
+ }
+ return get_electron_abi(runtime, electron_version);
} else {
if (runtime != 'node') {
throw new Error("Unknown Runtime: '" + runtime + "'");
@@ -245,7 +252,8 @@ function get_process_runtime(versions) {
var runtime = 'node';
if (versions['node-webkit']) {
runtime = 'node-webkit';
- } else if (versions.electron) {
+ } else if (versions.electron || process.env.ELECTRON_RUN_AS_NODE) {
+ // Running in electron or a childProcess.fork of electron
runtime = 'electron';
}
return runtime;

View File

@ -1,15 +0,0 @@
--- b/node_modules/lzma-native/index.js 2018-01-23 14:37:50.000000000 -0400
+++ a/node_modules/lzma-native/index.js 2018-01-23 14:37:00.000000000 -0400
@@ -7,11 +7,8 @@ var extend = require('util-extend');
var assert = require('assert');
var fs = require('fs');
-// node-pre-gyp magic
-var nodePreGyp = require('node-pre-gyp');
var path = require('path');
-var binding_path = nodePreGyp.find(path.resolve(path.join(__dirname,'./package.json')));
-var native = require(binding_path);
+var native = require(path.join(__dirname, 'binding', 'lzma_native.node'));
extend(exports, native);

View File

@ -124,7 +124,7 @@ const etcherConfig = _.assign({
// on the tree (for testing purposes) or inside a generated
// bundle (for production purposes), by translating
// relative require paths within the bundle.
if (/\/(sdk|shared)/i.test(request) || /package\.json$/.test(request)) {
if (/\/(etcher-sdk|sdk|shared)/i.test(request) || /package\.json$/.test(request)) {
const output = path.join(__dirname, 'generated')
const dirname = path.join(context, request)
const relative = path.relative(output, dirname)