patch: refactor scanner, loader and flasher out of gui + upgrade to electron 25

This commit is contained in:
Edwin Joassart 2023-08-30 13:57:05 +02:00
parent 09e13e9b43
commit fb8ed5b529
29 changed files with 34134 additions and 1008 deletions

View File

@ -15,7 +15,7 @@ inputs:
default: "accounts+apple@balena.io"
NODE_VERSION:
type: string
default: "16.x"
default: "18.x"
VERBOSE:
type: string
default: "true"

1
.gitignore vendored
View File

@ -28,6 +28,7 @@ pids
# Generated files
/generated
/binaries
# Dependency directory
# https://docs.npmjs.com/misc/faq#should-i-check-my-node-modules-folder-into-git

View File

@ -14,5 +14,11 @@
<true/>
<key>com.apple.security.network.client</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
<key>com.apple.security.get-task-allow</key>
<true/>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
</dict>
</plist>

View File

@ -16,30 +16,30 @@
import * as electron from 'electron';
import * as remote from '@electron/remote';
import * as sdk from 'etcher-sdk';
import * as _ from 'lodash';
import { debounce, capitalize, Dictionary, values } from 'lodash';
import outdent from 'outdent';
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import { v4 as uuidV4 } from 'uuid';
import * as packageJSON from '../../../package.json';
import { DrivelistDrive, isSourceDrive } from '../../shared/drive-constraints';
import { DrivelistDrive } from '../../shared/drive-constraints';
import * as EXIT_CODES from '../../shared/exit-codes';
import * as messages from '../../shared/messages';
import * as availableDrives from './models/available-drives';
import * as flashState from './models/flash-state';
import { deselectImage, getImage } from './models/selection-state';
import * as settings from './models/settings';
import { Actions, observe, store } from './models/store';
import * as analytics from './modules/analytics';
import { scanner as driveScanner } from './modules/drive-scanner';
import { startApiAndSpawnChild } from './modules/api';
import * as exceptionReporter from './modules/exception-reporter';
import * as osDialog from './os/dialog';
import * as windowProgress from './os/window-progress';
import MainPage from './pages/main/MainPage';
import './css/main.css';
import * as i18next from 'i18next';
import { promises } from 'dns';
import { SourceMetadata } from '../../shared/typings/source-selector';
window.addEventListener(
'unhandledrejection',
@ -89,7 +89,7 @@ analytics.logEvent('Application start', {
version: currentVersion,
});
const debouncedLog = _.debounce(console.log, 1000, { maxWait: 1000 });
const debouncedLog = debounce(console.log, 1000, { maxWait: 1000 });
function pluralize(word: string, quantity: number) {
return `${quantity} ${word}${quantity === 1 ? '' : 's'}`;
@ -115,7 +115,7 @@ observe(() => {
// might cause some non-sense flashing state logs including
// `undefined` values.
debouncedLog(outdent({ newline: ' ' })`
${_.capitalize(currentFlashState.type)}
${capitalize(currentFlashState.type)}
${active},
${currentFlashState.percentage}%
at
@ -128,173 +128,40 @@ observe(() => {
`);
});
/**
* @summary The radix used by USB ID numbers
*/
const USB_ID_RADIX = 16;
/**
* @summary The expected length of a USB ID number
*/
const USB_ID_LENGTH = 4;
/**
* @summary Convert a USB id (e.g. product/vendor) to a string
*
* @example
* console.log(usbIdToString(2652))
* > '0x0a5c'
*/
function usbIdToString(id: number): string {
return `0x${_.padStart(id.toString(USB_ID_RADIX), USB_ID_LENGTH, '0')}`;
}
/**
* @summary Product ID of BCM2708
*/
const USB_PRODUCT_ID_BCM2708_BOOT = 0x2763;
/**
* @summary Product ID of BCM2710
*/
const USB_PRODUCT_ID_BCM2710_BOOT = 0x2764;
/**
* @summary Compute module descriptions
*/
const COMPUTE_MODULE_DESCRIPTIONS: _.Dictionary<string> = {
[USB_PRODUCT_ID_BCM2708_BOOT]: 'Compute Module 1',
[USB_PRODUCT_ID_BCM2710_BOOT]: 'Compute Module 3',
};
async function driveIsAllowed(drive: {
devicePath: string;
device: string;
raw: string;
}) {
const driveBlacklist = (await settings.get('driveBlacklist')) || [];
return !(
driveBlacklist.includes(drive.devicePath) ||
driveBlacklist.includes(drive.device) ||
driveBlacklist.includes(drive.raw)
);
}
type Drive =
| sdk.sourceDestination.BlockDevice
| sdk.sourceDestination.UsbbootDrive
| sdk.sourceDestination.DriverlessDevice;
function prepareDrive(drive: Drive) {
if (drive instanceof sdk.sourceDestination.BlockDevice) {
// @ts-ignore (BlockDevice.drive is private)
return drive.drive;
} else if (drive instanceof sdk.sourceDestination.UsbbootDrive) {
// This is a workaround etcher expecting a device string and a size
// @ts-ignore
drive.device = drive.usbDevice.portId;
drive.size = null;
// @ts-ignore
drive.progress = 0;
drive.disabled = true;
drive.on('progress', (progress) => {
updateDriveProgress(drive, progress);
});
return drive;
} else if (drive instanceof sdk.sourceDestination.DriverlessDevice) {
const description =
COMPUTE_MODULE_DESCRIPTIONS[
drive.deviceDescriptor.idProduct.toString()
] || 'Compute Module';
return {
device: `${usbIdToString(
drive.deviceDescriptor.idVendor,
)}:${usbIdToString(drive.deviceDescriptor.idProduct)}`,
displayName: 'Missing drivers',
description,
mountpoints: [],
isReadOnly: false,
isSystem: false,
disabled: true,
icon: 'warning',
size: null,
link: 'https://www.raspberrypi.com/documentation/computers/compute-module.html#flashing-the-compute-module-emmc',
linkCTA: 'Install',
linkTitle: 'Install missing drivers',
linkMessage: outdent`
Would you like to download the necessary drivers from the Raspberry Pi Foundation?
This will open your browser.
Once opened, download and run the installer from the "Windows Installer" section to install the drivers
`,
};
function setDrives(drives: Dictionary<DrivelistDrive>) {
// prevent setting drives while flashing otherwise we might lose some while we unmount them
if (!flashState.isFlashing()) {
availableDrives.setDrives(values(drives));
}
}
function setDrives(drives: _.Dictionary<DrivelistDrive>) {
availableDrives.setDrives(_.values(drives));
}
// Spwaning the child process without privileges to get the drives list
// TODO: clean up this mess of exports
export let requestMetadata: any;
function getDrives() {
return _.keyBy(availableDrives.getDrives(), 'device');
}
// start the api and spawn the child process
startApiAndSpawnChild({
withPrivileges: false,
}).then(({ emit, registerHandler }) => {
// start scanning
emit('scan');
async function addDrive(drive: Drive) {
const preparedDrive = prepareDrive(drive);
if (!(await driveIsAllowed(preparedDrive))) {
return;
}
const drives = getDrives();
drives[preparedDrive.device] = preparedDrive;
setDrives(drives);
}
// make the sourceMetada awaitable to be used on source selection
requestMetadata = async (params: any): Promise<SourceMetadata> => {
emit('sourceMetadata', JSON.stringify(params));
function removeDrive(drive: Drive) {
if (
drive instanceof sdk.sourceDestination.BlockDevice &&
// @ts-ignore BlockDevice.drive is private
isSourceDrive(drive.drive, getImage())
) {
// Deselect the image if it was on the drive that was removed.
// This will also deselect the image if the drive mountpoints change.
deselectImage();
}
const preparedDrive = prepareDrive(drive);
const drives = getDrives();
delete drives[preparedDrive.device];
setDrives(drives);
}
return new Promise((resolve) =>
registerHandler('sourceMetadata', (data: any) => {
resolve(JSON.parse(data));
}),
);
};
function updateDriveProgress(
drive: sdk.sourceDestination.UsbbootDrive,
progress: number,
) {
const drives = getDrives();
// @ts-ignore
const driveInMap = drives[drive.device];
if (driveInMap) {
// @ts-ignore
drives[drive.device] = { ...driveInMap, progress };
setDrives(drives);
}
}
driveScanner.on('attach', addDrive);
driveScanner.on('detach', removeDrive);
driveScanner.on('error', (error) => {
// Stop the drive scanning loop in case of errors,
// otherwise we risk presenting the same error over
// and over again to the user, while also heavily
// spamming our error reporting service.
driveScanner.stop();
return exceptionReporter.report(error);
registerHandler('drives', (data: any) => {
setDrives(JSON.parse(data));
});
});
driveScanner.start();
let popupExists = false;
analytics.initAnalytics();

View File

@ -139,8 +139,9 @@ export function FlashResults({
};
} & FlexProps) {
const [showErrorsInfo, setShowErrorsInfo] = React.useState(false);
const allFailed = !skip && results.devices.successful === 0;
const someFailed = results.devices.failed !== 0 || errors.length !== 0;
const allFailed = !skip && results?.devices?.successful === 0;
const someFailed = results?.devices?.failed !== 0 || errors?.length !== 0;
const effectiveSpeed = bytesToMegabytes(getEffectiveSpeed(results)).toFixed(
1,
);

View File

@ -20,13 +20,13 @@ import LinkSvg from '@fortawesome/fontawesome-free/svgs/solid/link.svg';
import ExclamationTriangleSvg from '@fortawesome/fontawesome-free/svgs/solid/exclamation-triangle.svg';
import ChevronDownSvg from '@fortawesome/fontawesome-free/svgs/solid/chevron-down.svg';
import ChevronRightSvg from '@fortawesome/fontawesome-free/svgs/solid/chevron-right.svg';
import { sourceDestination } from 'etcher-sdk';
import { ipcRenderer, IpcRendererEvent } from 'electron';
import * as _ from 'lodash';
import { GPTPartition, MBRPartition } from 'partitioninfo';
import { uniqBy, isNil } from 'lodash';
import * as path from 'path';
import * as prettyBytes from 'pretty-bytes';
import * as React from 'react';
import { requestMetadata } from '../../app';
import {
Flex,
ButtonProps,
@ -47,7 +47,7 @@ import { observe } from '../../models/store';
import * as analytics from '../../modules/analytics';
import * as exceptionReporter from '../../modules/exception-reporter';
import * as osDialog from '../../os/dialog';
import { replaceWindowsNetworkDriveLetter } from '../../os/windows-network-drives';
import {
ChangeButton,
DetailsText,
@ -64,8 +64,12 @@ import ImageSvg from '../../../assets/image.svg';
import SrcSvg from '../../../assets/src.svg';
import { DriveSelector } from '../drive-selector/drive-selector';
import { DrivelistDrive } from '../../../../shared/drive-constraints';
import axios, { AxiosRequestConfig } from 'axios';
import { isJson } from '../../../../shared/utils';
import {
SourceMetadata,
Authentication,
Source,
} from '../../../../shared/typings/source-selector';
import * as i18next from 'i18next';
const recentUrlImagesKey = 'recentUrlImages';
@ -83,7 +87,7 @@ function normalizeRecentUrlImages(urls: any[]): URL[] {
}
})
.filter((url) => url !== undefined);
urls = _.uniqBy(urls, (url) => url.href);
urls = uniqBy(urls, (url) => url.href);
return urls.slice(urls.length - 5);
}
@ -301,24 +305,6 @@ const FlowSelector = styled(
}
`;
export type Source =
| typeof sourceDestination.File
| typeof sourceDestination.BlockDevice
| typeof sourceDestination.Http;
export interface SourceMetadata extends sourceDestination.Metadata {
hasMBR?: boolean;
partitions?: MBRPartition[] | GPTPartition[];
path: string;
displayName: string;
description: string;
SourceType: Source;
drive?: DrivelistDrive;
extension?: string;
archiveExtension?: string;
auth?: Authentication;
}
interface SourceSelectorProps {
flashing: boolean;
}
@ -336,11 +322,6 @@ interface SourceSelectorState {
imageLoading: boolean;
}
interface Authentication {
username: string;
password: string;
}
export class SourceSelector extends React.Component<
SourceSelectorProps,
SourceSelectorState
@ -381,43 +362,11 @@ export class SourceSelector extends React.Component<
this.setState({ imageLoading: true });
await this.selectSource(
imagePath,
isURL(this.normalizeImagePath(imagePath))
? sourceDestination.Http
: sourceDestination.File,
isURL(this.normalizeImagePath(imagePath)) ? 'Http' : 'File',
).promise;
this.setState({ imageLoading: false });
}
private async createSource(
selected: string,
SourceType: Source,
auth?: Authentication,
) {
try {
selected = await replaceWindowsNetworkDriveLetter(selected);
} catch (error: any) {
analytics.logException(error);
}
if (isJson(decodeURIComponent(selected))) {
const config: AxiosRequestConfig = JSON.parse(
decodeURIComponent(selected),
);
return new sourceDestination.Http({
url: config.url!,
axiosInstance: axios.create(_.omit(config, ['url'])),
});
}
if (SourceType === sourceDestination.File) {
return new sourceDestination.File({
path: selected,
});
}
return new sourceDestination.Http({ url: selected, auth });
}
public normalizeImagePath(imgPath: string) {
const decodedPath = decodeURIComponent(imgPath);
if (isJson(decodedPath)) {
@ -446,11 +395,10 @@ export class SourceSelector extends React.Component<
},
promise: (async () => {
const sourcePath = isString(selected) ? selected : selected.device;
let source;
let metadata: SourceMetadata | undefined;
if (isString(selected)) {
if (
SourceType === sourceDestination.Http &&
SourceType === 'Http' &&
!isURL(this.normalizeImagePath(selected))
) {
this.handleError(
@ -470,24 +418,14 @@ export class SourceSelector extends React.Component<
},
});
}
source = await this.createSource(selected, SourceType, auth);
if (cancelled) {
return;
}
try {
const innerSource = await source.getInnerSource();
if (cancelled) {
return;
}
metadata = await this.getMetadata(innerSource, selected);
if (cancelled) {
return;
}
metadata.SourceType = SourceType;
// this will send an event down the ipcMain asking for metadata
// we'll get the response through an event
if (!metadata.hasMBR && this.state.warning === null) {
metadata = await requestMetadata({ selected, SourceType, auth });
if (!metadata?.hasMBR && this.state.warning === null) {
analytics.logEvent('Missing partition table', { metadata });
this.setState({
warning: {
@ -503,12 +441,6 @@ export class SourceSelector extends React.Component<
messages.error.openSource(sourcePath, error.message),
error,
);
} finally {
try {
await source.close();
} catch (error: any) {
// Noop
}
}
} else {
if (selected.partitionTableType === null) {
@ -525,13 +457,14 @@ export class SourceSelector extends React.Component<
displayName: selected.displayName,
description: selected.displayName,
size: selected.size as SourceMetadata['size'],
SourceType: sourceDestination.BlockDevice,
SourceType: 'BlockDevice',
drive: selected,
};
}
if (metadata !== undefined) {
metadata.auth = auth;
metadata.SourceType = SourceType;
selectionState.selectSource(metadata);
analytics.logEvent('Select image', {
// An easy way so we can quickly identify if we're making use of
@ -565,25 +498,6 @@ export class SourceSelector extends React.Component<
analytics.logEvent(title, { path: sourcePath });
}
private async getMetadata(
source: sourceDestination.SourceDestination,
selected: string | DrivelistDrive,
) {
const metadata = (await source.getMetadata()) as SourceMetadata;
const partitionTable = await source.getPartitionTable();
if (partitionTable) {
metadata.hasMBR = true;
metadata.partitions = partitionTable.partitions;
} else {
metadata.hasMBR = false;
}
if (isString(selected)) {
metadata.extension = path.extname(selected).slice(1);
metadata.path = selected;
}
return metadata;
}
private async openImageSelector() {
analytics.logEvent('Open image selector');
this.setState({ imageSelectorOpen: true });
@ -596,7 +510,7 @@ export class SourceSelector extends React.Component<
analytics.logEvent('Image selector closed');
return;
}
await this.selectSource(imagePath, sourceDestination.File).promise;
await this.selectSource(imagePath, 'File').promise;
} catch (error: any) {
exceptionReporter.report(error);
} finally {
@ -607,7 +521,7 @@ export class SourceSelector extends React.Component<
private async onDrop(event: React.DragEvent<HTMLDivElement>) {
const [file] = event.dataTransfer.files;
if (file) {
await this.selectSource(file.path, sourceDestination.File).promise;
await this.selectSource(file.path, 'File').promise;
}
}
@ -723,7 +637,7 @@ export class SourceSelector extends React.Component<
{i18next.t('cancel')}
</ChangeButton>
)}
{!_.isNil(imageSize) && !imageLoading && (
{!isNil(imageSize) && !imageLoading && (
<DetailsText>{prettyBytes(imageSize)}</DetailsText>
)}
</>
@ -827,7 +741,7 @@ export class SourceSelector extends React.Component<
let promise;
({ promise, cancel: cancelURLSelection } = this.selectSource(
imageURL,
sourceDestination.Http,
'Http',
auth,
));
await promise;
@ -850,10 +764,7 @@ export class SourceSelector extends React.Component<
if (originalList.length) {
const originalSource = originalList[0];
if (selectionImage?.drive?.device !== originalSource.device) {
this.selectSource(
originalSource,
sourceDestination.BlockDevice,
);
this.selectSource(originalSource, 'BlockDevice');
}
} else {
selectionState.deselectImage();
@ -868,7 +779,7 @@ export class SourceSelector extends React.Component<
) {
return selectionState.deselectImage();
}
this.selectSource(drive, sourceDestination.BlockDevice);
this.selectSource(drive, 'BlockDevice');
}
}}
/>

184
lib/gui/app/modules/api.ts Normal file
View File

@ -0,0 +1,184 @@
/** This function will :
* - start the ipc server (api)
* - spawn the child process (privileged or not)
* - wait for the child process to connect to the api
* - return a promise that will resolve with the emit function for the api
*
* //TODO:
* - this should be refactored to reverse the control flow:
* - the child process should be the server
* - this should be the client
* - replace the current node-ipc api with a websocket api
* - centralise the api for both the writer and the scanner instead of having two instances running
*/
import * as ipc from 'node-ipc';
import { spawn } from 'child_process';
import * as os from 'os';
import * as path from 'path';
import * as packageJSON from '../../../../package.json';
import * as permissions from '../../../shared/permissions';
import { getAppPath } from '../../../shared/get-app-path';
import * as errors from '../../../shared/errors';
const THREADS_PER_CPU = 16;
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
function writerArgv(): string[] {
let entryPoint = path.join(getAppPath(), 'generated', 'etcher-util');
// AppImages run over FUSE, so the files inside the mount point
// can only be accessed by the user that mounted the AppImage.
// This means we can't re-spawn Etcher as root from the same
// mount-point, and as a workaround, we re-mount the original
// AppImage as root.
if (os.platform() === 'linux' && process.env.APPIMAGE && process.env.APPDIR) {
entryPoint = entryPoint.replace(process.env.APPDIR, '');
return [
process.env.APPIMAGE,
'-e',
`require(\`\${process.env.APPDIR}${entryPoint}\`)`,
];
} else {
return [entryPoint];
}
}
function writerEnv(
IPC_CLIENT_ID: string,
IPC_SERVER_ID: string,
IPC_SOCKET_ROOT: string,
) {
return {
IPC_SERVER_ID,
IPC_CLIENT_ID,
IPC_SOCKET_ROOT,
UV_THREADPOOL_SIZE: (os.cpus().length * THREADS_PER_CPU).toString(),
// This environment variable prevents the AppImages
// desktop integration script from presenting the
// "installation" dialog
SKIP: '1',
...(process.platform === 'win32' ? {} : process.env),
};
}
async function spawnChild({
withPrivileges,
IPC_CLIENT_ID,
IPC_SERVER_ID,
IPC_SOCKET_ROOT,
}: {
withPrivileges: boolean;
IPC_CLIENT_ID: string;
IPC_SERVER_ID: string;
IPC_SOCKET_ROOT: string;
}) {
const argv = writerArgv();
const env = writerEnv(IPC_CLIENT_ID, IPC_SERVER_ID, IPC_SOCKET_ROOT);
if (withPrivileges) {
return await permissions.elevateCommand(argv, {
applicationName: packageJSON.displayName,
environment: env,
});
} else {
const process = await spawn(argv[0], argv.slice(1), {
env,
});
return { cancelled: false, process };
}
}
function terminateServer(server: any) {
// Turns out we need to destroy all sockets for
// the server to actually close. Otherwise, it
// just stops receiving any further connections,
// but remains open if there are active ones.
// @ts-ignore (no Server.sockets in @types/node-ipc)
for (const socket of server.sockets) {
socket.destroy();
}
server.stop();
}
// TODO: replace the custom ipc events by one generic "message" for all communication with the backend
function startApiAndSpawnChild({
withPrivileges,
}: {
withPrivileges: boolean;
}): Promise<any> {
// There might be multiple Etcher instances running at
// the same time, also we might spawn multiple child and api so we must ensure each IPC
// server/client has a different name.
const IPC_SERVER_ID = `etcher-server-${process.pid}-${Date.now()}-${
withPrivileges ? 'privileged' : 'unprivileged'
}}}`;
const IPC_CLIENT_ID = `etcher-client-${process.pid}-${Date.now()}-${
withPrivileges ? 'privileged' : 'unprivileged'
}}`;
const IPC_SOCKET_ROOT = path.join(
process.env.XDG_RUNTIME_DIR || os.tmpdir(),
path.sep,
);
ipc.config.id = IPC_SERVER_ID;
ipc.config.socketRoot = IPC_SOCKET_ROOT;
return new Promise((resolve, reject) => {
ipc.serve();
// log is special message which brings back the logs from the child process and prints them to the console
ipc.server.on('log', (message: string) => {
console.log(message);
});
// api to register more handlers with callbacks
const registerHandler = (event: string, handler: any) => {
ipc.server.on(event, handler);
};
// once api is ready (means child process is connected) we pass the emit and terminate function to the caller
ipc.server.on('ready', (_: any, socket) => {
const emit = (channel: string, data: any) => {
ipc.server.emit(socket, channel, data);
};
resolve({
emit,
terminateServer: () => terminateServer(ipc.server),
registerHandler,
});
});
// on api error we terminate
ipc.server.on('error', (error: any) => {
terminateServer(ipc.server);
const errorObject = errors.fromJSON(error);
reject(errorObject);
});
// when the api is started we spawn the child process
ipc.server.on('start', async () => {
try {
const results = await spawnChild({
withPrivileges,
IPC_CLIENT_ID,
IPC_SERVER_ID,
IPC_SOCKET_ROOT,
});
// this will happen if the child is spawned withPrivileges and privileges has been rejected
if (results.cancelled) {
reject();
}
} catch (error) {
reject(error);
}
});
// start the server
ipc.server.start();
});
}
export { startApiAndSpawnChild };

View File

@ -17,38 +17,15 @@
import { Drive as DrivelistDrive } from 'drivelist';
import * as sdk from 'etcher-sdk';
import { Dictionary } from 'lodash';
import * as ipc from 'node-ipc';
import * as os from 'os';
import * as path from 'path';
import * as packageJSON from '../../../../package.json';
import * as errors from '../../../shared/errors';
import * as permissions from '../../../shared/permissions';
import { getAppPath } from '../../../shared/utils';
import { SourceMetadata } from '../components/source-selector/source-selector';
import { SourceMetadata } from '../../../shared/typings/source-selector';
import * as flashState from '../models/flash-state';
import * as selectionState from '../models/selection-state';
import * as settings from '../models/settings';
import * as analytics from '../modules/analytics';
import * as windowProgress from '../os/window-progress';
const THREADS_PER_CPU = 16;
// There might be multiple Etcher instances running at
// the same time, therefore we must ensure each IPC
// server/client has a different name.
const IPC_SERVER_ID = `etcher-server-${process.pid}`;
const IPC_CLIENT_ID = `etcher-client-${process.pid}`;
ipc.config.id = IPC_SERVER_ID;
ipc.config.socketRoot = path.join(
process.env.XDG_RUNTIME_DIR || os.tmpdir(),
path.sep,
);
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
import { startApiAndSpawnChild } from './api';
import { terminateScanningServer } from '../app';
/**
* @summary Handle a flash error and log it to analytics
@ -80,51 +57,7 @@ function handleErrorLogging(
}
}
function terminateServer() {
// Turns out we need to destroy all sockets for
// the server to actually close. Otherwise, it
// just stops receiving any further connections,
// but remains open if there are active ones.
// @ts-ignore (no Server.sockets in @types/node-ipc)
for (const socket of ipc.server.sockets) {
socket.destroy();
}
ipc.server.stop();
}
function writerArgv(): string[] {
let entryPoint = path.join(getAppPath(), 'generated', 'child-writer.js');
// AppImages run over FUSE, so the files inside the mount point
// can only be accessed by the user that mounted the AppImage.
// This means we can't re-spawn Etcher as root from the same
// mount-point, and as a workaround, we re-mount the original
// AppImage as root.
if (os.platform() === 'linux' && process.env.APPIMAGE && process.env.APPDIR) {
entryPoint = entryPoint.replace(process.env.APPDIR, '');
return [
process.env.APPIMAGE,
'-e',
`require(\`\${process.env.APPDIR}${entryPoint}\`)`,
];
} else {
return [process.argv[0], entryPoint];
}
}
function writerEnv() {
return {
IPC_SERVER_ID,
IPC_CLIENT_ID,
IPC_SOCKET_ROOT: ipc.config.socketRoot,
ELECTRON_RUN_AS_NODE: '1',
UV_THREADPOOL_SIZE: (os.cpus().length * THREADS_PER_CPU).toString(),
// This environment variable prevents the AppImages
// desktop integration script from presenting the
// "installation" dialog
SKIP: '1',
...(process.platform === 'win32' ? {} : process.env),
};
}
let cancelEmitter: (type: string) => void | undefined;
interface FlashResults {
skip?: boolean;
@ -144,22 +77,13 @@ async function performWrite(
drives: DrivelistDrive[],
onProgress: sdk.multiWrite.OnProgressFunction,
): Promise<{ cancelled?: boolean }> {
let cancelled = false;
let skip = false;
ipc.serve();
const { autoBlockmapping, decompressFirst } = await settings.getAll();
return await new Promise((resolve, reject) => {
ipc.server.on('error', (error) => {
terminateServer();
const errorObject = errors.fromJSON(error);
reject(errorObject);
});
ipc.server.on('log', (message) => {
console.log(message);
});
console.log({ image, drives });
return await new Promise(async (resolve, reject) => {
const flashResults: FlashResults = {};
const analyticsData = {
image,
drives,
@ -168,75 +92,51 @@ async function performWrite(
flashInstanceUuid: flashState.getFlashUuid(),
};
ipc.server.on('fail', ({ device, error }) => {
const onFail = ({ device, error }) => {
console.log('fail event');
console.log(device);
console.log(error);
if (device.devicePath) {
flashState.addFailedDeviceError({ device, error });
}
handleErrorLogging(error, analyticsData);
});
finish();
};
ipc.server.on('done', (event) => {
const onDone = (event) => {
console.log('done event');
event.results.errors = event.results.errors.map(
(data: Dictionary<any> & { message: string }) => {
return errors.fromJSON(data);
},
);
flashResults.results = event.results;
});
finish();
};
ipc.server.on('abort', () => {
terminateServer();
cancelled = true;
});
const onAbort = () => {
console.log('abort event');
flashResults.cancelled = true;
finish();
};
ipc.server.on('skip', () => {
terminateServer();
skip = true;
});
const onSkip = () => {
console.log('skip event');
flashResults.skip = true;
finish();
};
ipc.server.on('state', onProgress);
ipc.server.on('ready', (_data, socket) => {
ipc.server.emit(socket, 'write', {
image,
destinations: drives,
SourceType: image.SourceType.name,
autoBlockmapping,
decompressFirst,
});
});
const argv = writerArgv();
ipc.server.on('start', async () => {
console.log(`Elevating command: ${argv.join(' ')}`);
const env = writerEnv();
try {
const results = await permissions.elevateCommand(argv, {
applicationName: packageJSON.displayName,
environment: env,
});
flashResults.cancelled = cancelled || results.cancelled;
flashResults.skip = skip;
} catch (error: any) {
// This happens when the child is killed using SIGKILL
const SIGKILL_EXIT_CODE = 137;
if (error.code === SIGKILL_EXIT_CODE) {
error.code = 'ECHILDDIED';
}
reject(error);
} finally {
console.log('Terminating IPC server');
terminateServer();
}
const finish = () => {
console.log('Flash results', flashResults);
// The flash wasn't cancelled and we didn't get a 'done' event
// Catch unexepected situation
if (
!flashResults.cancelled &&
!flashResults.skip &&
flashResults.results === undefined
) {
console.log(flashResults);
reject(
errors.createUserError({
title: 'The writer process ended unexpectedly',
@ -244,15 +144,40 @@ async function performWrite(
'Please try again, and contact the Etcher team if the problem persists',
}),
);
return;
}
resolve(flashResults);
});
// Clear the update lock timer to prevent longer
// flashing timing it out, and releasing the lock
ipc.server.start();
console.log('Terminating IPC server');
terminateServer();
resolve(flashResults);
};
// Spawn the child process with privileges and wait for the connection to be made
const { emit, registerHandler, terminateServer } =
await startApiAndSpawnChild({
withPrivileges: true,
});
registerHandler('state', onProgress);
registerHandler('fail', onFail);
registerHandler('done', onDone);
registerHandler('abort', onAbort);
registerHandler('skip', onSkip);
cancelEmitter = (cancelStatus: string) => emit(cancelStatus);
// Now that we know we're connected we can instruct the child process to start the write
const paramaters = {
image,
destinations: drives,
SourceType: image.SourceType,
autoBlockmapping,
decompressFirst,
};
console.log('params', paramaters);
emit('write', paramaters);
});
// The process continue in the event handler
}
/**
@ -269,6 +194,7 @@ export async function flash(
}
await flashState.setFlashingFlag();
flashState.setDevicePaths(
drives.map((d) => d.devicePath).filter((p) => p != null) as string[],
);
@ -284,6 +210,7 @@ export async function flash(
analytics.logEvent('Flash', analyticsData);
// start api and call the flasher
try {
const result = await write(image, drives, flashState.setProgressState);
await flashState.unsetFlashingFlag(result);
@ -292,8 +219,11 @@ export async function flash(
cancelled: false,
errorCode: error.code,
});
windowProgress.clear();
const { results = {} } = flashState.getFlashResults();
const eventData = {
...analyticsData,
errors: results.errors,
@ -304,7 +234,9 @@ export async function flash(
analytics.logEvent('Write failed', eventData);
throw error;
}
windowProgress.clear();
if (flashState.wasLastFlashCancelled()) {
const eventData = {
...analyticsData,
@ -327,6 +259,7 @@ export async function flash(
/**
* @summary Cancel write operation
* //TODO: find a better solution to handle cancellation
*/
export async function cancel(type: string) {
const status = type.toLowerCase();
@ -341,15 +274,7 @@ export async function cancel(type: string) {
};
analytics.logEvent('Cancel', analyticsData);
// Re-enable lock release on inactivity
try {
// @ts-ignore (no Server.sockets in @types/node-ipc)
const [socket] = ipc.server.sockets;
if (socket !== undefined) {
ipc.server.emit(socket, status);
}
} catch (error: any) {
analytics.logException(error);
if (cancelEmitter) {
cancelEmitter(status);
}
}

View File

@ -27,7 +27,6 @@ import * as availableDrives from '../../models/available-drives';
import * as flashState from '../../models/flash-state';
import * as selection from '../../models/selection-state';
import * as analytics from '../../modules/analytics';
import { scanner as driveScanner } from '../../modules/drive-scanner';
import * as imageWriter from '../../modules/image-writer';
import * as notification from '../../os/notification';
import {
@ -95,10 +94,6 @@ async function flashImageToDrive(
return '';
}
// Stop scanning drives when flashing
// otherwise Windows throws EPERM
driveScanner.stop();
const iconPath = path.join('media', 'icon.png');
const basename = path.basename(image.path);
try {
@ -110,7 +105,7 @@ async function flashImageToDrive(
cancelled,
} = flashState.getFlashResults();
if (!skip && !cancelled) {
if (results.devices.successful > 0) {
if (results?.devices?.successful > 0) {
notifySuccess(iconPath, basename, drives, results.devices);
} else {
notifyFailure(iconPath, basename, drives);
@ -129,7 +124,6 @@ async function flashImageToDrive(
return errorMessage;
} finally {
availableDrives.setDrives([]);
driveScanner.start();
}
return '';

View File

@ -26,10 +26,8 @@ import styled from 'styled-components';
import FinishPage from '../../components/finish/finish';
import { ReducedFlashingInfos } from '../../components/reduced-flashing-infos/reduced-flashing-infos';
import { SettingsModal } from '../../components/settings/settings';
import {
SourceMetadata,
SourceSelector,
} from '../../components/source-selector/source-selector';
import { SourceSelector } from '../../components/source-selector/source-selector';
import { SourceMetadata } from '../../../../shared/typings/source-selector';
import * as flashState from '../../models/flash-state';
import * as selectionState from '../../models/selection-state';
import * as settings from '../../models/settings';

View File

@ -181,7 +181,7 @@ async function createMainWindow() {
electron.app.setAsDefaultProtocolClient(customProtocol);
mainWindow.setFullScreen(true);
// mainWindow.setFullScreen(true);
// Prevent flash of white when starting the application
mainWindow.once('ready-to-show', () => {

View File

@ -1,333 +0,0 @@
/*
* Copyright 2017 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Drive as DrivelistDrive } from 'drivelist';
import {
BlockDevice,
File,
Http,
Metadata,
SourceDestination,
} from 'etcher-sdk/build/source-destination';
import {
MultiDestinationProgress,
OnProgressFunction,
OnFailFunction,
decompressThenFlash,
DECOMPRESSED_IMAGE_PREFIX,
} from 'etcher-sdk/build/multi-write';
import { cleanupTmpFiles } from 'etcher-sdk/build/tmp';
import * as ipc from 'node-ipc';
import { totalmem } from 'os';
import { toJSON } from '../../shared/errors';
import { GENERAL_ERROR, SUCCESS } from '../../shared/exit-codes';
import { delay, isJson } from '../../shared/utils';
import { SourceMetadata } from '../app/components/source-selector/source-selector';
import axios from 'axios';
import * as _ from 'lodash';
ipc.config.id = process.env.IPC_CLIENT_ID as string;
ipc.config.socketRoot = process.env.IPC_SOCKET_ROOT as string;
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
// > If set to 0, the client will NOT try to reconnect.
// See https://github.com/RIAEvangelist/node-ipc/
//
// The purpose behind this change is for this process
// to emit a "disconnect" event as soon as the GUI
// process is closed, so we can kill this process as well.
// @ts-ignore (0 is a valid value for stopRetrying and is not the same as false)
ipc.config.stopRetrying = 0;
const DISCONNECT_DELAY = 100;
const IPC_SERVER_ID = process.env.IPC_SERVER_ID as string;
/**
* @summary Send a log debug message to the IPC server
*/
function log(message: string) {
ipc.of[IPC_SERVER_ID].emit('log', message);
}
/**
* @summary Terminate the child writer process
*/
async function terminate(exitCode: number) {
ipc.disconnect(IPC_SERVER_ID);
await cleanupTmpFiles(Date.now(), DECOMPRESSED_IMAGE_PREFIX);
process.nextTick(() => {
process.exit(exitCode || SUCCESS);
});
}
/**
* @summary Handle a child writer error
*/
async function handleError(error: Error) {
ipc.of[IPC_SERVER_ID].emit('error', toJSON(error));
await delay(DISCONNECT_DELAY);
await terminate(GENERAL_ERROR);
}
export interface FlashError extends Error {
description: string;
device: string;
code: string;
}
export interface WriteResult {
bytesWritten?: number;
devices?: {
failed: number;
successful: number;
};
errors: FlashError[];
sourceMetadata?: Metadata;
}
export interface FlashResults extends WriteResult {
skip?: boolean;
cancelled?: boolean;
}
/**
* @summary writes the source to the destinations and validates the writes
* @param {SourceDestination} source - source
* @param {SourceDestination[]} destinations - destinations
* @param {Boolean} verify - whether to validate the writes or not
* @param {Boolean} autoBlockmapping - whether to trim ext partitions before writing
* @param {Function} onProgress - function to call on progress
* @param {Function} onFail - function to call on fail
* @returns {Promise<{ bytesWritten, devices, errors} >}
*/
async function writeAndValidate({
source,
destinations,
verify,
autoBlockmapping,
decompressFirst,
onProgress,
onFail,
}: {
source: SourceDestination;
destinations: BlockDevice[];
verify: boolean;
autoBlockmapping: boolean;
decompressFirst: boolean;
onProgress: OnProgressFunction;
onFail: OnFailFunction;
}): Promise<WriteResult> {
const { sourceMetadata, failures, bytesWritten } = await decompressThenFlash({
source,
destinations,
onFail,
onProgress,
verify,
trim: autoBlockmapping,
numBuffers: Math.min(
2 + (destinations.length - 1) * 32,
256,
Math.floor(totalmem() / 1024 ** 2 / 8),
),
decompressFirst,
});
const result: WriteResult = {
bytesWritten,
devices: {
failed: failures.size,
successful: destinations.length - failures.size,
},
errors: [],
sourceMetadata,
};
for (const [destination, error] of failures) {
const err = error as FlashError;
const drive = destination as BlockDevice;
err.device = drive.device;
err.description = drive.description;
result.errors.push(err);
}
return result;
}
interface WriteOptions {
image: SourceMetadata;
destinations: DrivelistDrive[];
autoBlockmapping: boolean;
decompressFirst: boolean;
SourceType: string;
httpRequest?: any;
}
ipc.connectTo(IPC_SERVER_ID, () => {
// Remove leftover tmp files older than 1 hour
cleanupTmpFiles(Date.now() - 60 * 60 * 1000);
process.once('uncaughtException', handleError);
// Gracefully exit on the following cases. If the parent
// process detects that child exit successfully but
// no flashing information is available, then it will
// assume that the child died halfway through.
process.once('SIGINT', async () => {
await terminate(SUCCESS);
});
process.once('SIGTERM', async () => {
await terminate(SUCCESS);
});
// The IPC server failed. Abort.
ipc.of[IPC_SERVER_ID].on('error', async () => {
await terminate(SUCCESS);
});
// The IPC server was disconnected. Abort.
ipc.of[IPC_SERVER_ID].on('disconnect', async () => {
await terminate(SUCCESS);
});
ipc.of[IPC_SERVER_ID].on('write', async (options: WriteOptions) => {
/**
* @summary Progress handler
* @param {Object} state - progress state
* @example
* writer.on('progress', onProgress)
*/
const onProgress = (state: MultiDestinationProgress) => {
ipc.of[IPC_SERVER_ID].emit('state', state);
};
let exitCode = SUCCESS;
/**
* @summary Abort handler
* @example
* writer.on('abort', onAbort)
*/
const onAbort = async () => {
log('Abort');
ipc.of[IPC_SERVER_ID].emit('abort');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
const onSkip = async () => {
log('Skip validation');
ipc.of[IPC_SERVER_ID].emit('skip');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
ipc.of[IPC_SERVER_ID].on('cancel', onAbort);
ipc.of[IPC_SERVER_ID].on('skip', onSkip);
/**
* @summary Failure handler (non-fatal errors)
* @param {SourceDestination} destination - destination
* @param {Error} error - error
* @example
* writer.on('fail', onFail)
*/
const onFail = (destination: SourceDestination, error: Error) => {
ipc.of[IPC_SERVER_ID].emit('fail', {
// TODO: device should be destination
// @ts-ignore (destination.drive is private)
device: destination.drive,
error: toJSON(error),
});
};
const destinations = options.destinations.map((d) => d.device);
const imagePath = options.image.path;
log(`Image: ${imagePath}`);
log(`Devices: ${destinations.join(', ')}`);
log(`Auto blockmapping: ${options.autoBlockmapping}`);
log(`Decompress first: ${options.decompressFirst}`);
const dests = options.destinations.map((destination) => {
return new BlockDevice({
drive: destination,
unmountOnSuccess: true,
write: true,
direct: true,
});
});
const { SourceType } = options;
try {
let source;
if (options.image.drive) {
source = new BlockDevice({
drive: options.image.drive,
direct: !options.autoBlockmapping,
});
} else {
if (SourceType === File.name) {
source = new File({
path: imagePath,
});
} else {
const decodedImagePath = decodeURIComponent(imagePath);
if (isJson(decodedImagePath)) {
const imagePathObject = JSON.parse(decodedImagePath);
source = new Http({
url: imagePathObject.url,
avoidRandomAccess: true,
axiosInstance: axios.create(_.omit(imagePathObject, ['url'])),
auth: options.image.auth,
});
} else {
source = new Http({
url: imagePath,
avoidRandomAccess: true,
auth: options.image.auth,
});
}
}
}
const results = await writeAndValidate({
source,
destinations: dests,
verify: true,
autoBlockmapping: options.autoBlockmapping,
decompressFirst: options.decompressFirst,
onProgress,
onFail,
});
log(`Finish: ${results.bytesWritten}`);
results.errors = results.errors.map((error) => {
return toJSON(error);
});
ipc.of[IPC_SERVER_ID].emit('done', { results });
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
} catch (error: any) {
exitCode = GENERAL_ERROR;
ipc.of[IPC_SERVER_ID].emit('error', toJSON(error));
}
});
ipc.of[IPC_SERVER_ID].on('connect', () => {
log(
`Successfully connected to IPC server: ${IPC_SERVER_ID}, socket root ${ipc.config.socketRoot}`,
);
ipc.of[IPC_SERVER_ID].emit('ready', {});
});
});

10
lib/pkg-sidekick.json Normal file
View File

@ -0,0 +1,10 @@
{
"bin": "build/util/child-writer.js",
"pkg": {
"assets": [
"node_modules/usb/prebuilds/darwin-x64+arm64/node.napi.node",
"node_modules/lzma-native/prebuilds/darwin-arm64/node.napi.node",
"node_modules/drivelist/build/Release/drivelist.node"
]
}
}

View File

@ -19,7 +19,7 @@ import { join } from 'path';
import { env } from 'process';
import { promisify } from 'util';
import { getAppPath } from '../utils';
import { getAppPath } from '../get-app-path';
import { supportedLocales } from '../../gui/app/i18n';
const execFileAsync = promisify(execFile);

View File

@ -15,11 +15,11 @@
*/
import { Drive } from 'drivelist';
import * as _ from 'lodash';
import { isNil } from 'lodash';
import * as pathIsInside from 'path-is-inside';
import * as messages from './messages';
import { SourceMetadata } from '../gui/app/components/source-selector/source-selector';
import { SourceMetadata } from './typings/source-selector';
/**
* @summary The default unknown size for things such as images and drives
@ -210,8 +210,8 @@ export function getDriveImageCompatibilityStatuses(
});
}
if (
!_.isNil(drive) &&
!_.isNil(drive.size) &&
!isNil(drive) &&
!isNil(drive.size) &&
!isDriveLargeEnough(drive, image)
) {
statusList.push(statuses.small);
@ -229,7 +229,7 @@ export function getDriveImageCompatibilityStatuses(
if (
image !== undefined &&
!_.isNil(drive) &&
!isNil(drive) &&
!isDriveSizeRecommended(drive, image)
) {
statusList.push(statuses.sizeNotRecommended);

View File

@ -0,0 +1,12 @@
export function getAppPath(): string {
return (
(require('electron').app || require('@electron/remote').app)
.getAppPath()
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
.replace(/\.asar$/, () =>
process.platform === 'darwin' ? '-' + process.arch : '',
)
);
}

View File

@ -0,0 +1,23 @@
import { GPTPartition, MBRPartition } from 'partitioninfo';
import { sourceDestination } from 'etcher-sdk';
import { DrivelistDrive } from '../drive-constraints';
export type Source = 'File' | 'BlockDevice' | 'Http';
export interface SourceMetadata extends sourceDestination.Metadata {
hasMBR?: boolean;
partitions?: MBRPartition[] | GPTPartition[];
path: string;
displayName: string;
description: string;
SourceType: Source;
drive?: DrivelistDrive;
extension?: string;
archiveExtension?: string;
auth?: Authentication;
}
export interface Authentication {
username: string;
password: string;
}

View File

@ -14,9 +14,6 @@
* limitations under the License.
*/
import axios from 'axios';
import { Dictionary } from 'lodash';
import * as errors from './errors';
export function isValidPercentage(percentage: any): boolean {
@ -38,19 +35,6 @@ export async function delay(duration: number): Promise<void> {
});
}
export function getAppPath(): string {
return (
(require('electron').app || require('@electron/remote').app)
.getAppPath()
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
.replace(/\.asar$/, () =>
process.platform === 'darwin' ? '-' + process.arch : '',
)
);
}
export function isJson(jsonString: string) {
try {
JSON.parse(jsonString);

201
lib/util/api.ts Normal file
View File

@ -0,0 +1,201 @@
/*
* Copyright 2017 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as ipc from 'node-ipc';
import { toJSON } from '../shared/errors';
import { GENERAL_ERROR, SUCCESS } from '../shared/exit-codes';
import { delay } from '../shared/utils';
import { WriteOptions } from './types/types';
import { MultiDestinationProgress } from 'etcher-sdk/build/multi-write';
import { write, cleanup } from './child-writer';
import { startScanning } from './scanner';
import { getSourceMetadata } from './source-metadata';
import { DrivelistDrive } from '../shared/drive-constraints';
import { Dictionary, values } from 'lodash';
ipc.config.id = process.env.IPC_CLIENT_ID as string;
ipc.config.socketRoot = process.env.IPC_SOCKET_ROOT as string;
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
// > If set to 0, the client will NOT try to reconnect.
// See https://github.com/RIAEvangelist/node-ipc/
//
// The purpose behind this change is for this process
// to emit a "disconnect" event as soon as the GUI
// process is closed, so we can kill this process as well.
// @ts-ignore (0 is a valid value for stopRetrying and is not the same as false)
ipc.config.stopRetrying = 0;
const DISCONNECT_DELAY = 100;
const IPC_SERVER_ID = process.env.IPC_SERVER_ID as string;
/**
* @summary Send a message to the IPC server
*/
function emit(channel: string, message?: any) {
ipc.of[IPC_SERVER_ID].emit(channel, message);
}
/**
* @summary Send a log debug message to the IPC server
*/
function log(message: string) {
if (console?.log) {
console.log(message);
}
emit('log', message);
}
/**
* @summary Terminate the child process
*/
async function terminate(exitCode: number) {
ipc.disconnect(IPC_SERVER_ID);
await cleanup(Date.now());
process.nextTick(() => {
process.exit(exitCode || SUCCESS);
});
}
/**
* @summary Handle errors
*/
async function handleError(error: Error) {
emit('error', toJSON(error));
await delay(DISCONNECT_DELAY);
await terminate(GENERAL_ERROR);
}
/**
* @summary Abort handler
* @example
*/
const onAbort = async (exitCode: number) => {
log('Abort');
emit('abort');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
const onSkip = async (exitCode: number) => {
log('Skip validation');
emit('skip');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
ipc.connectTo(IPC_SERVER_ID, () => {
// Gracefully exit on the following cases. If the parent
// process detects that child exit successfully but
// no flashing information is available, then it will
// assume that the child died halfway through.
process.once('uncaughtException', handleError);
process.once('SIGINT', async () => {
await terminate(SUCCESS);
});
process.once('SIGTERM', async () => {
await terminate(SUCCESS);
});
// The IPC server failed. Abort.
ipc.of[IPC_SERVER_ID].on('error', async () => {
await terminate(SUCCESS);
});
// The IPC server was disconnected. Abort.
ipc.of[IPC_SERVER_ID].on('disconnect', async () => {
await terminate(SUCCESS);
});
ipc.of[IPC_SERVER_ID].on('sourceMetadata', async (params) => {
const { selected, SourceType, auth } = JSON.parse(params);
try {
const sourceMatadata = await getSourceMetadata(
selected,
SourceType,
auth,
);
emitSourceMetadata(sourceMatadata);
} catch (error: any) {
emitFail(error);
}
});
ipc.of[IPC_SERVER_ID].on('scan', async () => {
startScanning();
});
// write handler
ipc.of[IPC_SERVER_ID].on('write', async (options: WriteOptions) => {
// Remove leftover tmp files older than 1 hour
cleanup(Date.now() - 60 * 60 * 1000);
let exitCode = SUCCESS;
ipc.of[IPC_SERVER_ID].on('cancel', () => onAbort(exitCode));
ipc.of[IPC_SERVER_ID].on('skip', () => onSkip(exitCode));
const results = await write(options);
if (results.errors.length > 0) {
results.errors = results.errors.map((error: any) => {
return toJSON(error);
});
exitCode = GENERAL_ERROR;
}
emit('done', { results });
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
});
ipc.of[IPC_SERVER_ID].on('connect', () => {
log(
`Successfully connected to IPC server: ${IPC_SERVER_ID}, socket root ${ipc.config.socketRoot}`,
);
emit('ready', {});
});
});
function emitLog(message: string) {
log(message);
}
function emitState(state: MultiDestinationProgress) {
emit('state', state);
}
function emitFail(data: any) {
emit('fail', data);
}
function emitDrives(drives: Dictionary<DrivelistDrive>) {
emit('drives', JSON.stringify(values(drives)));
}
function emitSourceMetadata(sourceMetadata: any) {
emit('sourceMetadata', JSON.stringify(sourceMetadata));
}
export { emitLog, emitState, emitFail, emitDrives, emitSourceMetadata };

200
lib/util/child-writer.ts Normal file
View File

@ -0,0 +1,200 @@
/*
* Copyright 2023 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file handles the writer process.
*/
import {
OnProgressFunction,
OnFailFunction,
decompressThenFlash,
DECOMPRESSED_IMAGE_PREFIX,
MultiDestinationProgress,
} from 'etcher-sdk/build/multi-write';
import { totalmem } from 'os';
import { cleanupTmpFiles } from 'etcher-sdk/build/tmp';
import {
File,
Http,
BlockDevice,
SourceDestination,
} from 'etcher-sdk/build/source-destination';
import { WriteResult, FlashError, WriteOptions } from './types/types';
import { isJson } from '../shared/utils';
import { toJSON } from '../shared/errors';
import axios from 'axios';
import { omit } from 'lodash';
import { emitLog, emitState, emitFail } from './api';
async function write(options: WriteOptions) {
/**
* @summary Failure handler (non-fatal errors)
* @param {SourceDestination} destination - destination
* @param {Error} error - error
*/
const onFail = (destination: SourceDestination, error: Error) => {
emitFail({
// TODO: device should be destination
// @ts-ignore (destination.drive is private)
device: destination.drive,
error: toJSON(error),
});
};
/**
* @summary Progress handler
* @param {Object} state - progress state
* @example
* writer.on('progress', onProgress)
*/
const onProgress = (state: MultiDestinationProgress) => {
emitState(state);
};
// Write the image to the destinations
const destinations = options.destinations.map((d) => d.device);
const imagePath = options.image.path;
emitLog(`Image: ${imagePath}`);
emitLog(`Devices: ${destinations.join(', ')}`);
emitLog(`Auto blockmapping: ${options.autoBlockmapping}`);
emitLog(`Decompress first: ${options.decompressFirst}`);
const dests = options.destinations.map((destination) => {
return new BlockDevice({
drive: destination,
unmountOnSuccess: true,
write: true,
direct: true,
});
});
const { SourceType } = options;
try {
let source;
if (options.image.drive) {
source = new BlockDevice({
drive: options.image.drive,
direct: !options.autoBlockmapping,
});
} else {
if (SourceType === File.name) {
source = new File({
path: imagePath,
});
} else {
const decodedImagePath = decodeURIComponent(imagePath);
if (isJson(decodedImagePath)) {
const imagePathObject = JSON.parse(decodedImagePath);
source = new Http({
url: imagePathObject.url,
avoidRandomAccess: true,
axiosInstance: axios.create(omit(imagePathObject, ['url'])),
auth: options.image.auth,
});
} else {
source = new Http({
url: imagePath,
avoidRandomAccess: true,
auth: options.image.auth,
});
}
}
}
const results = await writeAndValidate({
source,
destinations: dests,
verify: true,
autoBlockmapping: options.autoBlockmapping,
decompressFirst: options.decompressFirst,
onProgress,
onFail,
});
return results;
} catch (error: any) {
return { errors: [error] };
}
}
/** @summary clean up tmp files */
export async function cleanup(until: number) {
await cleanupTmpFiles(until, DECOMPRESSED_IMAGE_PREFIX);
}
/**
* @summary writes the source to the destinations and validates the writes
* @param {SourceDestination} source - source
* @param {SourceDestination[]} destinations - destinations
* @param {Boolean} verify - whether to validate the writes or not
* @param {Boolean} autoBlockmapping - whether to trim ext partitions before writing
* @param {Function} onProgress - function to call on progress
* @param {Function} onFail - function to call on fail
* @returns {Promise<{ bytesWritten, devices, errors} >}
*/
async function writeAndValidate({
source,
destinations,
verify,
autoBlockmapping,
decompressFirst,
onProgress,
onFail,
}: {
source: SourceDestination;
destinations: BlockDevice[];
verify: boolean;
autoBlockmapping: boolean;
decompressFirst: boolean;
onProgress: OnProgressFunction;
onFail: OnFailFunction;
}): Promise<WriteResult> {
const { sourceMetadata, failures, bytesWritten } = await decompressThenFlash({
source,
destinations,
onFail,
onProgress,
verify,
trim: autoBlockmapping,
numBuffers: Math.min(
2 + (destinations.length - 1) * 32,
256,
Math.floor(totalmem() / 1024 ** 2 / 8),
),
decompressFirst,
});
const result: WriteResult = {
bytesWritten,
devices: {
failed: failures.size,
successful: destinations.length - failures.size,
},
errors: [],
sourceMetadata,
};
for (const [destination, error] of failures) {
const err = error as FlashError;
const drive = destination as BlockDevice;
err.device = drive.device;
err.description = drive.description;
result.errors.push(err);
}
return result;
}
export { write };

180
lib/util/scanner.ts Normal file
View File

@ -0,0 +1,180 @@
import { scanner as driveScanner } from './drive-scanner';
import * as sdk from 'etcher-sdk';
import { DrivelistDrive } from '../shared/drive-constraints';
import outdent from 'outdent';
import { Dictionary, values, keyBy, padStart } from 'lodash';
import { emitDrives } from './api';
let availableDrives: DrivelistDrive[] = [];
export function hasAvailableDrives() {
return availableDrives.length > 0;
}
driveScanner.on('error', (error) => {
// Stop the drive scanning loop in case of errors,
// otherwise we risk presenting the same error over
// and over again to the user, while also heavily
// spamming our error reporting service.
driveScanner.stop();
console.log('scanner error', error);
});
function setDrives(drives: Dictionary<DrivelistDrive>) {
availableDrives = values(drives);
emitDrives(drives);
}
function getDrives() {
return keyBy(availableDrives, 'device');
}
async function addDrive(drive: Drive) {
const preparedDrive = prepareDrive(drive);
if (!(await driveIsAllowed(preparedDrive))) {
return;
}
const drives = getDrives();
drives[preparedDrive.device] = preparedDrive;
setDrives(drives);
}
function removeDrive(drive: Drive) {
const preparedDrive = prepareDrive(drive);
const drives = getDrives();
delete drives[preparedDrive.device];
setDrives(drives);
}
async function driveIsAllowed(drive: {
devicePath: string;
device: string;
raw: string;
}) {
// const driveBlacklist = (await settings.get("driveBlacklist")) || [];
const driveBlacklist: any[] = [];
return !(
driveBlacklist.includes(drive.devicePath) ||
driveBlacklist.includes(drive.device) ||
driveBlacklist.includes(drive.raw)
);
}
type Drive =
| sdk.sourceDestination.BlockDevice
| sdk.sourceDestination.UsbbootDrive
| sdk.sourceDestination.DriverlessDevice;
function prepareDrive(drive: Drive) {
if (drive instanceof sdk.sourceDestination.BlockDevice) {
// @ts-ignore (BlockDevice.drive is private)
return drive.drive;
} else if (drive instanceof sdk.sourceDestination.UsbbootDrive) {
// This is a workaround etcher expecting a device string and a size
// @ts-ignore
drive.device = drive.usbDevice.portId;
drive.size = null;
// @ts-ignore
drive.progress = 0;
drive.disabled = true;
drive.on('progress', (progress) => {
updateDriveProgress(drive, progress);
});
return drive;
} else if (drive instanceof sdk.sourceDestination.DriverlessDevice) {
const description =
COMPUTE_MODULE_DESCRIPTIONS[
drive.deviceDescriptor.idProduct.toString()
] || 'Compute Module';
return {
device: `${usbIdToString(
drive.deviceDescriptor.idVendor,
)}:${usbIdToString(drive.deviceDescriptor.idProduct)}`,
displayName: 'Missing drivers',
description,
mountpoints: [],
isReadOnly: false,
isSystem: false,
disabled: true,
icon: 'warning',
size: null,
link: 'https://www.raspberrypi.com/documentation/computers/compute-module.html#flashing-the-compute-module-emmc',
linkCTA: 'Install',
linkTitle: 'Install missing drivers',
linkMessage: outdent`
Would you like to download the necessary drivers from the Raspberry Pi Foundation?
This will open your browser.
Once opened, download and run the installer from the "Windows Installer" section to install the drivers
`,
};
}
}
/**
* @summary The radix used by USB ID numbers
*/
const USB_ID_RADIX = 16;
/**
* @summary The expected length of a USB ID number
*/
const USB_ID_LENGTH = 4;
/**
* @summary Convert a USB id (e.g. product/vendor) to a string
*
* @example
* console.log(usbIdToString(2652))
* > '0x0a5c'
*/
function usbIdToString(id: number): string {
return `0x${padStart(id.toString(USB_ID_RADIX), USB_ID_LENGTH, '0')}`;
}
function updateDriveProgress(
drive: sdk.sourceDestination.UsbbootDrive,
progress: number,
) {
const drives = getDrives();
// @ts-ignore
const driveInMap = drives[drive.device];
if (driveInMap) {
// @ts-ignore
drives[drive.device] = { ...driveInMap, progress };
setDrives(drives);
}
}
/**
* @summary Product ID of BCM2708
*/
const USB_PRODUCT_ID_BCM2708_BOOT = 0x2763;
/**
* @summary Product ID of BCM2710
*/
const USB_PRODUCT_ID_BCM2710_BOOT = 0x2764;
/**
* @summary Compute module descriptions
*/
const COMPUTE_MODULE_DESCRIPTIONS: Dictionary<string> = {
[USB_PRODUCT_ID_BCM2708_BOOT]: 'Compute Module 1',
[USB_PRODUCT_ID_BCM2710_BOOT]: 'Compute Module 3',
};
const startScanning = () => {
driveScanner.on('attach', (drive) => addDrive(drive));
driveScanner.on('detach', (drive) => removeDrive(drive));
driveScanner.start();
};
const stopScanning = () => {
driveScanner.stop();
};
export { startScanning, stopScanning };

View File

@ -0,0 +1,93 @@
/** Get metadata for a source */
import { sourceDestination } from 'etcher-sdk';
import { replaceWindowsNetworkDriveLetter } from '../gui/app/os/windows-network-drives';
import axios, { AxiosRequestConfig } from 'axios';
import { isJson } from '../shared/utils';
import * as path from 'path';
import {
SourceMetadata,
Authentication,
Source,
} from '../shared/typings/source-selector';
import { DrivelistDrive } from '../shared/drive-constraints';
import { omit } from 'lodash';
function isString(value: any): value is string {
return typeof value === 'string';
}
async function createSource(
selected: string,
SourceType: Source,
auth?: Authentication,
) {
try {
selected = await replaceWindowsNetworkDriveLetter(selected);
} catch (error: any) {
// TODO: analytics.logException(error);
}
if (isJson(decodeURIComponent(selected))) {
const config: AxiosRequestConfig = JSON.parse(decodeURIComponent(selected));
return new sourceDestination.Http({
url: config.url!,
axiosInstance: axios.create(omit(config, ['url'])),
});
}
if (SourceType === 'File') {
return new sourceDestination.File({
path: selected,
});
}
return new sourceDestination.Http({ url: selected, auth });
}
async function getMetadata(
source: sourceDestination.SourceDestination,
selected: string | DrivelistDrive,
) {
const metadata = (await source.getMetadata()) as SourceMetadata;
const partitionTable = await source.getPartitionTable();
if (partitionTable) {
metadata.hasMBR = true;
metadata.partitions = partitionTable.partitions;
} else {
metadata.hasMBR = false;
}
if (isString(selected)) {
metadata.extension = path.extname(selected).slice(1);
metadata.path = selected;
}
return metadata;
}
async function getSourceMetadata(
selected: string | DrivelistDrive,
SourceType: Source,
auth?: Authentication,
) {
if (isString(selected)) {
const source = await createSource(selected, SourceType, auth);
try {
const innerSource = await source.getInnerSource();
const metadata = await getMetadata(innerSource, selected);
return metadata;
} catch (error: any) {
// TODO: handle error
} finally {
try {
await source.close();
} catch (error: any) {
// Noop
}
}
}
}
export { getSourceMetadata };

33
lib/util/types/types.d.ts vendored Normal file
View File

@ -0,0 +1,33 @@
import { Metadata } from 'etcher-sdk/build/source-destination';
import { SourceMetadata } from '../../shared/typings/source-selector';
import { Drive as DrivelistDrive } from 'drivelist';
export interface WriteResult {
bytesWritten?: number;
devices?: {
failed: number;
successful: number;
};
errors: FlashError[];
sourceMetadata?: Metadata;
}
export interface FlashError extends Error {
description: string;
device: string;
code: string;
}
export interface FlashResults extends WriteResult {
skip?: boolean;
cancelled?: boolean;
}
interface WriteOptions {
image: SourceMetadata;
destinations: DrivelistDrive[];
autoBlockmapping: boolean;
decompressFirst: boolean;
SourceType: string;
httpRequest?: any;
}

32837
npm-shrinkwrap.json generated Normal file

File diff suppressed because it is too large Load Diff

10
pkg-sidecar.json Normal file
View File

@ -0,0 +1,10 @@
{
"assets": [
"node_modules/usb/**",
"node_modules/lzma-native/**",
"node_modules/drivelist/**",
"node_modules/mountutils/**",
"node_modules/winusb-driver-generator/**",
"node_modules/node-raspberrypi-usbboot/**"
]
}

182
test-wrapper.ts Normal file
View File

@ -0,0 +1,182 @@
/*
* This is a test wrapper for etcher-utils.
* The only use for this file is debugging while developing etcher-utils.
* It will create a IPC server, spawn the cli version of etcher-writer, and wait for it to connect.
* Requires elevated privileges to work (launch with sudo)
* Note that you'll need to to edit `ipc.server.on('ready', ...` function based on what you want to test.
*/
import * as ipc from 'node-ipc';
import * as os from 'os';
import * as path from 'path';
import * as packageJSON from './package.json';
import * as permissions from './lib/shared/permissions';
// if (process.argv.length !== 3) {
// console.error('Expects an image to flash as only arg!');
// process.exit(1);
// }
const THREADS_PER_CPU = 16;
// There might be multiple Etcher instances running at
// the same time, therefore we must ensure each IPC
// server/client has a different name.
const IPC_SERVER_ID = `etcher-server-${process.pid}`;
const IPC_CLIENT_ID = `etcher-client-${process.pid}`;
ipc.config.id = IPC_SERVER_ID;
ipc.config.socketRoot = path.join(
process.env.XDG_RUNTIME_DIR || os.tmpdir(),
path.sep,
);
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
function writerArgv(): string[] {
const entryPoint = path.join('./generated/etcher-util');
return [entryPoint];
}
function writerEnv() {
return {
IPC_SERVER_ID,
IPC_CLIENT_ID,
IPC_SOCKET_ROOT: ipc.config.socketRoot,
UV_THREADPOOL_SIZE: (os.cpus().length * THREADS_PER_CPU).toString(),
// This environment variable prevents the AppImages
// desktop integration script from presenting the
// "installation" dialog
SKIP: '1',
...(process.platform === 'win32' ? {} : process.env),
};
}
async function start(): Promise<any> {
ipc.serve();
return await new Promise((resolve, reject) => {
ipc.server.on('error', (message) => {
console.log('IPC server error', message);
});
ipc.server.on('log', (message) => {
console.log('log', message);
});
ipc.server.on('fail', ({ device, error }) => {
console.log('failure', error, device);
});
ipc.server.on('done', (event) => {
console.log('done', event);
});
ipc.server.on('abort', () => {
console.log('abort');
});
ipc.server.on('skip', () => {
console.log('skip');
});
ipc.server.on('state', (progress) => {
console.log('progress', progress);
});
ipc.server.on('drives', (drives) => {
console.log('drives', drives);
});
ipc.server.on('ready', (_data, socket) => {
console.log('ready');
ipc.server.emit(socket, 'scan', {});
// ipc.server.emit(socket, "hello", { message: "world" });
// ipc.server.emit(socket, "write", {
// image: {
// path: process.argv[2],
// displayName: "Random image for test",
// description: "Random image for test",
// SourceType: "File",
// },
// destinations: [
// {
// size: 15938355200,
// isVirtual: false,
// enumerator: "DiskArbitration",
// logicalBlockSize: 512,
// raw: "/dev/rdisk4",
// error: null,
// isReadOnly: false,
// displayName: "/dev/disk4",
// blockSize: 512,
// isSCSI: false,
// isRemovable: true,
// device: "/dev/disk4",
// busVersion: null,
// isSystem: false,
// busType: "USB",
// isCard: false,
// isUSB: true,
// devicePath:
// "IODeviceTree:/arm-io@10F00000/usb-drd1@2280000/usb-drd1-port-hs@01100000",
// mountpoints: [
// {
// path: "/Volumes/flash-rootB",
// label: "flash-rootB",
// },
// {
// path: "/Volumes/flash-rootA",
// label: "flash-rootA",
// },
// {
// path: "/Volumes/flash-boot",
// label: "flash-boot",
// },
// ],
// description: "Generic Flash Disk Media",
// isUAS: null,
// partitionTableType: "mbr",
// },
// ],
// SourceType: "File",
// autoBlockmapping: true,
// decompressFirst: true,
// });
});
const argv = writerArgv();
ipc.server.on('start', async () => {
console.log(`Elevating command: ${argv.join(' ')}`);
const env = writerEnv();
try {
await permissions.elevateCommand(argv, {
applicationName: packageJSON.displayName,
environment: env,
});
} catch (error: any) {
console.log('error', error);
// This happens when the child is killed using SIGKILL
const SIGKILL_EXIT_CODE = 137;
if (error.code === SIGKILL_EXIT_CODE) {
error.code = 'ECHILDDIED';
}
reject(error);
} finally {
console.log('Terminating IPC server');
}
resolve(true);
});
// Clear the update lock timer to prevent longer
// flashing timing it out, and releasing the lock
ipc.server.start();
});
}
start();

18
tsconfig.sidecar.json Normal file
View File

@ -0,0 +1,18 @@
{
"compilerOptions": {
"target": "ES2019",
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"typeRoots": ["./node_modules/@types", "./typings"],
"module": "CommonJS",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"outDir": "build"
},
"include": ["lib/util"]
}

View File

@ -15,12 +15,8 @@
*/
import * as CopyPlugin from 'copy-webpack-plugin';
import { readdirSync } from 'fs';
import * as _ from 'lodash';
import * as os from 'os';
import outdent from 'outdent';
import * as path from 'path';
import { env } from 'process';
import * as SimpleProgressWebpackPlugin from 'simple-progress-webpack-plugin';
import * as TerserPlugin from 'terser-webpack-plugin';
import {
@ -48,24 +44,6 @@ function externalPackageJson(packageJsonPath: string) {
};
}
function platformSpecificModule(
platform: string,
module: string,
replacement = '{}',
) {
// Resolves module on platform, otherwise resolves the replacement
return (
{ request }: { context: string; request: string },
callback: (error?: Error, result?: string, type?: string) => void,
) => {
if (request === module && os.platform() !== platform) {
callback(undefined, replacement);
return;
}
callback();
};
}
function renameNodeModules(resourcePath: string) {
// electron-builder excludes the node_modules folder even if you specifically include it
// Work around by renaming it to "modules"
@ -74,78 +52,11 @@ function renameNodeModules(resourcePath: string) {
path
.relative(__dirname, resourcePath)
.replace('node_modules', 'modules')
// use the same name on all architectures so electron-builder can build a universal dmg on mac
.replace(LZMA_BINDINGS_FOLDER, LZMA_BINDINGS_FOLDER_RENAMED)
// file-loader expects posix paths, even on Windows
.replace(/\\/g, '/')
);
}
function findUsbPrebuild(): string[] {
const usbPrebuildsFolder = path.join('node_modules', 'usb', 'prebuilds');
const prebuildFolders = readdirSync(usbPrebuildsFolder);
let bindingFile: string | undefined = 'node.napi.node';
const platformFolder = prebuildFolders.find(
(f) => f.startsWith(os.platform()) && f.indexOf(os.arch()) > -1,
);
if (platformFolder === undefined) {
throw new Error(
'Could not find usb prebuild. Should try fallback to node-gyp and use /build/Release instead of /prebuilds',
);
}
const bindingFiles = readdirSync(
path.join(usbPrebuildsFolder, platformFolder),
);
if (!bindingFiles.length) {
throw new Error('Could not find usb prebuild for platform');
}
if (bindingFiles.length === 1) {
bindingFile = bindingFiles[0];
}
// armv6 vs v7 in linux-arm and
// glibc vs musl in linux-x64
if (bindingFiles.length > 1) {
bindingFile = bindingFiles.find((file) => {
if (bindingFiles.indexOf('arm') > -1) {
const process = require('process');
return file.indexOf(process.config.variables.arm_version) > -1;
} else {
return file.indexOf('glibc') > -1;
}
});
}
if (bindingFile === undefined) {
throw new Error('Could not find usb prebuild for platform');
}
return [platformFolder, bindingFile];
}
const [USB_BINDINGS_FOLDER, USB_BINDINGS_FILE] = findUsbPrebuild();
function findLzmaNativeBindingsFolder(): string {
const files = readdirSync(
path.join('node_modules', 'lzma-native', 'prebuilds'),
);
const bindingsFolder = files.find(
(f) =>
f.startsWith(os.platform()) &&
f.endsWith(env.npm_config_target_arch || os.arch()),
);
if (bindingsFolder === undefined) {
throw new Error('Could not find lzma_native binding');
}
return bindingsFolder;
}
const LZMA_BINDINGS_FOLDER = findLzmaNativeBindingsFolder();
const LZMA_BINDINGS_FOLDER_RENAMED = 'binding';
interface ReplacementRule {
search: string;
replace: string | (() => string);
@ -222,94 +133,10 @@ const commonConfig = {
search: './adapters/xhr',
replace: './adapters/http',
}),
// remove bindings magic from drivelist
replace(
/node_modules\/drivelist\/js\/index\.js$/,
{
search: 'require("bindings");',
replace: "require('../build/Release/drivelist.node')",
},
{
search: "bindings('drivelist')",
replace: 'bindings',
},
),
replace(
/node_modules\/lzma-native\/index\.js$/,
// remove node-pre-gyp magic from lzma-native
{
search: `require('node-gyp-build')(__dirname);`,
replace: `require('./prebuilds/${LZMA_BINDINGS_FOLDER}/electron.napi.node')`,
},
// use regular stream module instead of readable-stream
{
search: "var stream = require('readable-stream');",
replace: "var stream = require('stream');",
},
),
// remove node-pre-gyp magic from usb
replace(/node_modules\/usb\/dist\/usb\/bindings\.js$/, {
search: `require('node-gyp-build')(path_1.join(__dirname, '..', '..'));`,
replace: `require('../../prebuilds/${USB_BINDINGS_FOLDER}/${USB_BINDINGS_FILE}')`,
}),
// remove bindings magic from mountutils
replace(/node_modules\/mountutils\/index\.js$/, {
search: outdent`
require('bindings')({
bindings: 'MountUtils',
/* eslint-disable camelcase */
module_root: __dirname
/* eslint-enable camelcase */
})
`,
replace: "require('./build/Release/MountUtils.node')",
}),
// remove bindings magic from winusb-driver-generator
replace(/node_modules\/winusb-driver-generator\/index\.js$/, {
search: outdent`
require('bindings')({
bindings: 'Generator',
/* eslint-disable camelcase */
module_root: __dirname
/* eslint-enable camelcase */
});
`,
replace: "require('./build/Release/Generator.node')",
}),
replace(/node_modules\/node-raspberrypi-usbboot\/build\/index\.js$/, {
search:
"return await readFile(Path.join(__dirname, '..', 'blobs', filename));",
replace: outdent`
const remote = require('@electron/remote');
return await readFile(
Path.join(
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
remote.app.getAppPath().replace(/\\.asar$/, () => process.platform === 'darwin' ? '-' + process.arch : ''),
'generated',
__dirname.replace('node_modules', 'modules'),
'..',
'blobs',
filename
)
);
`,
}),
// Copy native modules to generated folder
{
test: /\.node$/,
use: [
{
loader: 'native-addon-loader',
options: { name: renameNodeModules },
},
],
},
],
},
resolve: {
extensions: ['.node', '.js', '.json', '.ts', '.tsx'],
extensions: ['.js', '.json', '.ts', '.tsx'],
},
plugins: [
PnpWebpackPlugin,
@ -341,32 +168,9 @@ const commonConfig = {
externals: [
// '../package.json' because we are in 'generated'
externalPackageJson('../package.json'),
// Only exists on windows
platformSpecificModule('win32', 'winusb-driver-generator'),
// Not needed but required by resin-corvus > os-locale > execa > cross-spawn
platformSpecificModule('none', 'spawn-sync'),
// Not needed as we replace all requires for it
platformSpecificModule('none', 'node-pre-gyp', '{ find: () => {} }'),
// Not needed as we replace all requires for it
platformSpecificModule('none', 'bindings'),
],
};
const guiConfigCopyPatterns = [
{
from: 'node_modules/node-raspberrypi-usbboot/blobs',
to: 'modules/node-raspberrypi-usbboot/blobs',
},
];
if (os.platform() === 'win32') {
// liblzma.dll is required on Windows for lzma-native
guiConfigCopyPatterns.push({
from: `node_modules/lzma-native/prebuilds/${LZMA_BINDINGS_FOLDER}/liblzma.dll`,
to: `modules/lzma-native/prebuilds/${LZMA_BINDINGS_FOLDER_RENAMED}/liblzma.dll`,
});
}
const guiConfig = {
...commonConfig,
target: 'electron-renderer',
@ -377,7 +181,6 @@ const guiConfig = {
entry: {
gui: path.join(__dirname, 'lib', 'gui', 'app', 'renderer.ts'),
},
// entry: path.join(__dirname, 'lib', 'gui', 'app', 'renderer.ts'),
plugins: [
...commonConfig.plugins,
new CopyPlugin({
@ -393,7 +196,6 @@ const guiConfig = {
banner: '__REACT_DEVTOOLS_GLOBAL_HOOK__ = { isDisabled: true };',
raw: true,
}),
new CopyPlugin({ patterns: guiConfigCopyPatterns }),
],
};
@ -413,17 +215,4 @@ const etcherConfig = {
},
};
const childWriterConfig = {
...mainConfig,
entry: {
'child-writer': path.join(
__dirname,
'lib',
'gui',
'modules',
'child-writer.ts',
),
},
};
export default [guiConfig, etcherConfig, childWriterConfig];
export default [guiConfig, etcherConfig];