Compare commits

..

5 Commits

Author SHA1 Message Date
Edwin Joassart
308e7c2585 wip: ci tests 2023-04-18 18:15:11 +02:00
Edwin Joassart
b5042e9b9f wip: ci tests 2023-04-18 18:14:21 +02:00
Edwin Joassart
81910d27ee wip: ci tests 2023-04-18 17:45:53 +02:00
Edwin Joassart
ab386a8521 wip: test ci 2023-04-18 12:08:26 +02:00
Edwin Joassart
80a96160a2 patch: set electron security fuses 2023-04-14 16:03:37 +02:00
40 changed files with 37608 additions and 34406 deletions

View File

@@ -15,7 +15,7 @@ inputs:
default: "accounts+apple@balena.io"
NODE_VERSION:
type: string
default: "18.x"
default: "16.x"
VERBOSE:
type: string
default: "true"
@@ -31,17 +31,10 @@ runs:
path: ${{ runner.temp }}
- name: Extract custom source artifact
if: runner.os != 'Windows'
shell: pwsh
working-directory: .
run: tar -xf ${{ runner.temp }}/custom.tgz
- name: Extract custom source artifact
if: runner.os == 'Windows'
shell: pwsh
working-directory: .
run: C:\"Program Files"\Git\usr\bin\tar.exe --force-local -xf ${{ runner.temp }}\custom.tgz
- name: Setup Node.js
uses: actions/setup-node@v3
with:

View File

@@ -47,15 +47,9 @@ runs:
ELECTRON_NO_ATTACH_CONSOLE: true
- name: Compress custom source
if: runner.os != 'Windows'
shell: pwsh
run: tar -acf ${{ runner.temp }}/custom.tgz .
- name: Compress custom source
if: runner.os == 'Windows'
shell: pwsh
run: C:\"Program Files"\Git\usr\bin\tar.exe --force-local -acf ${{ runner.temp }}\custom.tgz .
- name: Upload custom artifact
uses: actions/upload-artifact@v3
with:

View File

@@ -1,4 +1,5 @@
name: Flowzone
on:
pull_request:
types: [opened, synchronize, closed]
@@ -7,6 +8,7 @@ on:
pull_request_target:
types: [opened, synchronize, closed]
branches: [main, master]
jobs:
flowzone:
name: Flowzone
@@ -21,4 +23,8 @@ jobs:
tests_run_on: '["ubuntu-20.04","macos-latest","windows-2019"]'
restrict_custom_actions: false
github_prerelease: true
repo_config: true
repo_description: "Flash OS images to SD cards & USB drives, safely and easily."
repo_homepage: https://etcher.io/
repo_enable_wiki: true
cloudflare_website: "etcher"

1
.gitignore vendored
View File

@@ -28,7 +28,6 @@ pids
# Generated files
/generated
/binaries
# Dependency directory
# https://docs.npmjs.com/misc/faq#should-i-check-my-node-modules-folder-into-git

View File

@@ -1,110 +1,3 @@
- commits:
- subject: "patch: upgrade to electron 25"
hash: f38bca290fe26121bed58d1131265e1aa350ddb5
body: ""
footer: {}
author: Edwin Joassart
nested: []
- subject: "patch: refactor scanner, loader and flasher out of gui + upgrade to
electron 25"
hash: fb8ed5b529e22bc9e766bfe99c2b6955ed695b58
body: ""
footer: {}
author: Edwin Joassart
nested: []
version: 1.18.13
title: ""
date: 2023-10-16T13:32:26.738Z
- commits:
- subject: Update instructions for installing deb file
hash: acab03ad77a1c1901d0c8a65999e93c1d27169a0
body: ""
footer:
Change-type: patch
change-type: patch
author: Jorge Capona
nested: []
version: 1.18.12
title: ""
date: 2023-07-19T10:24:22.407Z
- commits:
- subject: "fix: prevent stealing window focus from auth dialog"
hash: f716c74ef7cb164b4d825828e4e46033484ad9af
body: ""
footer:
Change-type: patch
change-type: patch
author: leadpogrommer
nested: []
version: 1.18.11
title: ""
date: 2023-07-13T14:31:40.021Z
- commits:
- subject: "spelling: validates"
hash: 06d246e3fd1c573b9e04d23ab3bc3c4036fb9859
body: ""
footer:
Change-type: patch
change-type: patch
Signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
author: Josh Soref
nested: []
- subject: "spelling: undefined"
hash: 67b26a5b69f819066c6419d3d915846b63fdbcf0
body: ""
footer:
Change-type: patch
change-type: patch
Signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
author: Josh Soref
nested: []
- subject: "spelling: except if"
hash: b4b9db7ffa2104c19e7bd079e4f394a817f40bc0
body: ""
footer:
Change-type: patch
change-type: patch
Signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
author: Josh Soref
nested: []
version: 1.18.10
title: ""
date: 2023-07-12T11:21:59.231Z
- commits:
- subject: Fix opening links from within SafeWebView
hash: 497bb0e2cbefad3e9a1188ee5df49cf61f6bd6e4
body: ""
footer:
Change-type: patch
change-type: patch
author: Akis Kesoglou
nested: []
version: 1.18.9
title: ""
date: 2023-07-12T09:07:17.666Z
- commits:
- subject: "Patch: Fix Support link"
hash: 882b385c88111a192e5f37e20c1c8aeca9950b21
body: ""
footer: {}
author: Oliver Plummer
nested: []
version: 1.18.8
title: ""
date: 2023-04-26T09:57:46.155Z
- commits:
- subject: "patch: update docs to remove cloudsmith install instructions for linux"
hash: 02a406711852cf237e41da4cd39350d8acc1f0b0
body: ""
footer: {}
author: Edwin Joassart
nested: []
version: 1.18.7
title: ""
date: 2023-04-25T15:25:35.584Z
- commits:
- subject: add-flash-with-etcher-to-docs
hash: 856b426dc98925f5e339976a5cac144f4bb4ea59
@@ -297,11 +190,10 @@
- subject: Switch to `@electron/remote`
hash: 7ee174edcecbfc2d7370db6d4185b3ee4eedbe28
body: >
Electron 12 deprecated `electron.remote` and the functionality was
removed in Electron 14, but became available as a separate
`@electron/remote` module. This commit makes the transition to the
external module as an intermediary step to enable updating to a newer
Electron version.
Electron 12 deprecated `electron.remote` and the functionality was removed
in Electron 14, but became available as a separate `@electron/remote`
module. This commit makes the transition to the external module as an
intermediary step to enable updating to a newer Electron version.
footer:
Change-type: patch
change-type: patch
@@ -346,8 +238,8 @@
- subject: Lazily import Electron from child-writer process
hash: 851219f835ed037d9fd970f538095e4b339c5342
body: >
No idea how this *used* to work, but it doesnt since 887ec428 and this
is fixing it properly.
No idea how this *used* to work, but it doesnt since 887ec428 and this is
fixing it properly.
footer:
Change-type: patch
change-type: patch
@@ -1292,8 +1184,7 @@
- subject: Fixing call to electron block screensaver methods invocation
hash: 1b5b64713505dfb69448bc2184839b4c23bd677b
body: >
Replacing `send` calls to `invoke` for `enable/disable-screensaver`
calls.
Replacing `send` calls to `invoke` for `enable/disable-screensaver` calls.
footer:
Change-type: patch
change-type: patch
@@ -2045,11 +1936,11 @@
- subject: Add support for basic auth when downloading images from URL.
hash: b2d0c1c9ddbbfe87d5a905d420d615821610e825
body: >
When selecting "Flash from URL" the user can optionally provide a
username and password for basic authentication. The authentication input
fields are collapsed by default. When the authentication input fields
are collapsed after entering values the values are cleared to ensure
that the user sees all parameter passed to the server.
When selecting "Flash from URL" the user can optionally provide a username
and password for basic authentication. The authentication input fields
are collapsed by default. When the authentication input fields are
collapsed after entering values the values are cleared to ensure that
the user sees all parameter passed to the server.
footer:
Change-Type: minor
change-type: minor
@@ -2606,8 +2497,7 @@
- subject: Ignore ENOENT errors on unlink in withTmpFile
hash: 7bb2a23c4e94dcda6a7b494fe0435c0b59b56b06
body: >
The temporary file might have been already deleted by
cleanupTmpFiles
The temporary file might have been already deleted by cleanupTmpFiles
footer:
Change-type: patch
change-type: patch
@@ -2677,8 +2567,7 @@
- subject: Pass strings between methods as std::string instead of char *
hash: 1ec6a8ffc4c9e138b78210f0db84a9ebd6c9182b
body: >
- Fixes "basic_string::_M_construct null not valid"
exception
- Fixes "basic_string::_M_construct null not valid" exception
aborting program, because WCharToUtf8() returned NULL
in some cases, and NULL was being fed to string constructor.
- Fixes memory leak because memory allocated with calloc()
@@ -4766,8 +4655,8 @@
change-type: patch
subject: Fixes the Command for macOS drive recovery
body: >-
Changes the documentation to update the disktutil command which didn't
fix my case, cause the boot partition was broken.
Changes the documentation to update the disktutil command which didn't fix
my case, cause the boot partition was broken.
This way it rewrites the drive into a FAT32 partition editable in Unix/Windows.
- hash: b3f25c176b1bdb487d1a7bf111d7f170fe008842
@@ -7696,8 +7585,7 @@
changelog-entry: Add support for configuration files
subject: "feat(gui): Add ability to read settings from a config file"
body: >-
This adds the capability to configure settings via a `.etcher.json`
file,
This adds the capability to configure settings via a `.etcher.json` file,
either in the user's home directory, or the current working directory.
@@ -7815,8 +7703,7 @@
change-type: patch
subject: "doc: Update MAINTAINERS.md with Symantec Whitelisting"
body: >-
This adds instructions for submitting Etcher for false positive
detection
This adds instructions for submitting Etcher for false positive detection
to Symantec Endpoint Protection.
- hash: bb2dac75040554c0ba2c7e50ff9ecd61608e7d38
@@ -7992,8 +7879,7 @@
change-type: patch
subject: "fix(image-writer): Remove use of _.isError"
body: >-
`_.isError()` returns `true` for anything that has a `name` and
`message`
`_.isError()` returns `true` for anything that has a `name` and `message`
property, causing the check here to always keep the plain object as error.
- hash: 355373f24df6be0989fad9429c2230166b33a3bf
@@ -8009,8 +7895,7 @@
change-type: patch
subject: "upgrade(package): Update drivelist 6.1.5 -> 6.1.7"
body: >-
This fixes a ReferenceError that could occur when the DeviceNode was
null,
This fixes a ReferenceError that could occur when the DeviceNode was null,
as well as devices being null when run after the system recovers from sleep / standby.
- hash: 6e7484d3dabc2aeaa7cd471822d7019860cc4a5c
@@ -8846,8 +8731,7 @@
changelog-entry: Remove stale `invalidKey` check in store.
subject: "refactor: remove stale invalid key check in store"
body: >-
We remove a piece of code checking whether `_.keys` returns any
non-string
We remove a piece of code checking whether `_.keys` returns any non-string
values in its array, but per the Lodash documentation `_.keys` always returns an
@@ -8875,8 +8759,7 @@
changelog-entry: Make the drive-selector button orange on warnings.
subject: "feat(GUI): warning makes drive-selector button orange"
body: >-
We make the drive-selector button orange when there is a warning
attached
We make the drive-selector button orange when there is a warning attached
to the image-drive pair.
- hash: 4ce89f97fe02d714ce7f247a6a03ad6d326c3a8a

View File

@@ -3,44 +3,6 @@
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](http://semver.org/).
# v1.18.13
## (2023-10-16)
* patch: upgrade to electron 25 [Edwin Joassart]
* patch: refactor scanner, loader and flasher out of gui + upgrade to electron 25 [Edwin Joassart]
# v1.18.12
## (2023-07-19)
* Update instructions for installing deb file [Jorge Capona]
# v1.18.11
## (2023-07-13)
* fix: prevent stealing window focus from auth dialog [leadpogrommer]
# v1.18.10
## (2023-07-12)
* spelling: validates [Josh Soref]
* spelling: undefined [Josh Soref]
* spelling: except if [Josh Soref]
# v1.18.9
## (2023-07-12)
* Fix opening links from within SafeWebView [Akis Kesoglou]
# v1.18.8
## (2023-04-26)
* Patch: Fix Support link [Oliver Plummer]
# v1.18.7
## (2023-04-25)
* patch: update docs to remove cloudsmith install instructions for linux [Edwin Joassart]
# v1.18.6
## (2023-03-21)

125
README.md
View File

@@ -32,32 +32,132 @@ installers for all supported operating systems.
## Packages
> [![Hosted By: Cloudsmith](https://img.shields.io/badge/OSS%20hosting%20by-cloudsmith-blue?logo=cloudsmith&style=for-the-badge)](https://cloudsmith.com) \
Package repository hosting is graciously provided by [Cloudsmith](https://cloudsmith.com).
Cloudsmith is the only fully hosted, cloud-native, universal package management solution, that
enables your organization to create, store and share packages in any format, to any place, with total
confidence.
#### Debian and Ubuntu based Package Repository (GNU/Linux x86/x64)
Package for Debian and Ubuntu can be downloaded from the [Github release page](https://github.com/balena-io/etcher/releases/)
> Detailed or alternative steps in the [instructions by Cloudsmith](https://cloudsmith.io/~balena/repos/etcher/setup/#formats-deb)
##### Install .deb file using apt
1. Add Etcher Debian repository:
```sh
sudo apt install ./balena-etcher_******_amd64.deb
curl -1sLf \
'https://dl.cloudsmith.io/public/balena/etcher/setup.deb.sh' \
| sudo -E bash
```
2. Update and install:
```sh
sudo apt-get update #you can use apt instead of apt-get as well
sudo apt-get install balena-etcher-electron
```
>Note: after v1.7.9 the package name changed to `balena-etcher` (no electron at the end)
##### Uninstall
```sh
sudo apt remove balena-etcher
```
```sh
sudo apt-get remove balena-etcher-electron
rm /etc/apt/sources.list.d/balena-etcher.list
apt-get clean
rm -rf /var/lib/apt/lists/*
apt-get update
```
#### Redhat (RHEL) and Fedora-based Package Repository (GNU/Linux x86/x64)
##### Yum
> Detailed or alternative steps in the [instructions by Cloudsmith](https://cloudsmith.io/~balena/repos/etcher/setup/#formats-rpm)
Package for Fedora-based and Redhat can be downloaded from the [Github release page](https://github.com/balena-io/etcher/releases/)
1. Install using yum
##### DNF
1. Add Etcher rpm repository:
```sh
curl -1sLf \
'https://dl.cloudsmith.io/public/balena/etcher/setup.rpm.sh' \
| sudo -E bash
```
2. Update and install:
```sh
sudo dnf install -y balena-etcher-electron
```
>Note: after v1.7.9 the package name changed to `balena-etcher` (no electron at the end)
###### Uninstall
```sh
sudo yum localinstall balena-etcher-***.x86_64.rpm
rm /etc/yum.repos.d/balena-etcher.repo
rm /etc/yum.repos.d/balena-etcher-source.repo
```
##### Yum
1. Add Etcher rpm repository:
```sh
curl -1sLf \
'https://dl.cloudsmith.io/public/balena/etcher/setup.rpm.sh' \
| sudo -E bash
```
2. Update and install:
```sh
sudo yum install -y balena-etcher-electron
```
>Note: after v1.7.9 the package name changed to `balena-etcher` (no electron at the end)
###### Uninstall
```sh
sudo yum remove -y balena-etcher-electron
rm /etc/yum.repos.d/balena-etcher.repo
rm /etc/yum.repos.d/balena-etcher-source.repo
```
#### OpenSUSE LEAP & Tumbleweed install (zypper)
1. Add the repo
```sh
curl -1sLf \
'https://dl.cloudsmith.io/public/balena/etcher/setup.rpm.sh' \
| sudo -E bash
```
2. Update and install
```sh
sudo zypper up
sudo zypper install balena-etcher-electron
```
>Note: after v1.7.9 the package name changed to `balena-etcher` (no electron at the end)
##### Uninstall
```sh
sudo zypper rm balena-etcher-electron
# remove the repo
sudo zypper rr balena-etcher
sudo zypper rr balena-etcher-source
```
#### Solus (GNU/Linux x64)
```sh
sudo eopkg it etcher
```
##### Uninstall
```sh
sudo eopkg rm etcher
```
#### Arch/Manjaro Linux (GNU/Linux x64)
@@ -73,7 +173,6 @@ yay -S balena-etcher
```sh
yay -R balena-etcher
```
#### WinGet (Windows)
This package is updated by [gh-action](https://github.com/vedantmgoyal2009/winget-releaser), and is kept up to date automatically.
@@ -116,9 +215,11 @@ the [license].
[etcher]: https://balena.io/etcher
[electron]: https://electronjs.org/
[electron-supported-platforms]: https://electronjs.org/docs/tutorial/support#supported-platforms
[support]: https://github.com/balena-io/etcher/blob/master/docs/SUPPORT.md
[support]: https://github.com/balena-io/etcher/blob/master/SUPPORT.md
[contributing]: https://github.com/balena-io/etcher/blob/master/docs/CONTRIBUTING.md
[user-documentation]: https://github.com/balena-io/etcher/blob/master/docs/USER-DOCUMENTATION.md
[milestones]: https://github.com/balena-io/etcher/milestones
[newissue]: https://github.com/balena-io/etcher/issues/new
[license]: https://github.com/balena-io/etcher/blob/master/LICENSE

View File

@@ -1,20 +1,23 @@
'use strict'
"use strict";
const cp = require('child_process')
const fs = require('fs')
const outdent = require('outdent')
const path = require('path')
const cp = require("child_process");
const fs = require("fs");
const outdent = require("outdent");
const path = require("path");
const builder = require("electron-builder");
const { flipFuses, FuseVersion, FuseV1Options } = require("@electron/fuses");
exports.default = function(context) {
if (context.packager.platform.name !== 'linux') {
return
}
const scriptPath = path.join(context.appOutDir, context.packager.executableName)
const binPath = scriptPath + '.bin'
cp.execFileSync('mv', [scriptPath, binPath])
fs.writeFileSync(
scriptPath,
outdent({trimTrailingNewline: false})`
exports.default = async function (context) {
if (context.packager.platform.name === "linux") {
const scriptPath = path.join(
context.appOutDir,
context.packager.executableName
);
const binPath = scriptPath + ".bin";
cp.execFileSync("mv", [scriptPath, binPath]);
fs.writeFileSync(
scriptPath,
outdent({ trimTrailingNewline: false })`
#!/bin/bash
# Resolve symlinks. Warning, readlink -f doesn't work on MacOS/BSD
@@ -26,6 +29,41 @@ exports.default = function(context) {
"\${script_dir}"/${context.packager.executableName}.bin "$@" --no-sandbox
fi
`
)
cp.execFileSync('chmod', ['+x', scriptPath])
}
);
cp.execFileSync("chmod", ["+x", scriptPath]);
}
// Adapted from https://github.com/electron-userland/electron-builder/issues/6365#issue-1033809141
const ext = {
darwin: ".app",
win32: ".exe",
linux: ".bin",
}[context.electronPlatformName];
const IS_LINUX = context.electronPlatformName === "linux";
const executableName = IS_LINUX
? context.packager.appInfo.productFilename.toLowerCase().replace("-dev", "")
: context.packager.appInfo.productFilename; // .toLowerCase() to accomodate Linux file named `name` but productFileName is `Name` -- Replaces '-dev' because on Linux the executable name is `name` even for the DEV builds
const IS_APPLE_SILICON =
context.electronPlatformName === "darwin" &&
context.arch === builder.Arch.arm64;
const electronBinaryPath = path.join(
context.appOutDir,
`${executableName}${ext}`
);
console.log(electronBinaryPath);
await flipFuses(electronBinaryPath, {
version: FuseVersion.V1,
resetAdHocDarwinSignature: IS_APPLE_SILICON, // necessary for building on Apple Silicon
[FuseV1Options.RunAsNode]: false,
[FuseV1Options.EnableCookieEncryption]: true,
[FuseV1Options.EnableNodeOptionsEnvironmentVariable]: false,
[FuseV1Options.EnableNodeCliInspectArguments]: false,
// [FuseV1Options.EnableEmbeddedAsarIntegrityValidation]: true, // Only affects macOS builds, but breaks them -- https://github.com/electron/fuses/issues/7
[FuseV1Options.OnlyLoadAppFromAsar]: false,
});
};

View File

@@ -14,11 +14,5 @@
<true/>
<key>com.apple.security.network.client</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
<key>com.apple.security.get-task-allow</key>
<true/>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
</dict>
</plist>

View File

@@ -16,30 +16,30 @@
import * as electron from 'electron';
import * as remote from '@electron/remote';
import { debounce, capitalize, Dictionary, values } from 'lodash';
import * as sdk from 'etcher-sdk';
import * as _ from 'lodash';
import outdent from 'outdent';
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import { v4 as uuidV4 } from 'uuid';
import * as packageJSON from '../../../package.json';
import { DrivelistDrive } from '../../shared/drive-constraints';
import { DrivelistDrive, isSourceDrive } from '../../shared/drive-constraints';
import * as EXIT_CODES from '../../shared/exit-codes';
import * as messages from '../../shared/messages';
import * as availableDrives from './models/available-drives';
import * as flashState from './models/flash-state';
import { deselectImage, getImage } from './models/selection-state';
import * as settings from './models/settings';
import { Actions, observe, store } from './models/store';
import * as analytics from './modules/analytics';
import { startApiAndSpawnChild } from './modules/api';
import { scanner as driveScanner } from './modules/drive-scanner';
import * as exceptionReporter from './modules/exception-reporter';
import * as osDialog from './os/dialog';
import * as windowProgress from './os/window-progress';
import MainPage from './pages/main/MainPage';
import './css/main.css';
import * as i18next from 'i18next';
import { promises } from 'dns';
import { SourceMetadata } from '../../shared/typings/source-selector';
window.addEventListener(
'unhandledrejection',
@@ -89,7 +89,7 @@ analytics.logEvent('Application start', {
version: currentVersion,
});
const debouncedLog = debounce(console.log, 1000, { maxWait: 1000 });
const debouncedLog = _.debounce(console.log, 1000, { maxWait: 1000 });
function pluralize(word: string, quantity: number) {
return `${quantity} ${word}${quantity === 1 ? '' : 's'}`;
@@ -115,7 +115,7 @@ observe(() => {
// might cause some non-sense flashing state logs including
// `undefined` values.
debouncedLog(outdent({ newline: ' ' })`
${capitalize(currentFlashState.type)}
${_.capitalize(currentFlashState.type)}
${active},
${currentFlashState.percentage}%
at
@@ -128,40 +128,173 @@ observe(() => {
`);
});
function setDrives(drives: Dictionary<DrivelistDrive>) {
// prevent setting drives while flashing otherwise we might lose some while we unmount them
if (!flashState.isFlashing()) {
availableDrives.setDrives(values(drives));
/**
* @summary The radix used by USB ID numbers
*/
const USB_ID_RADIX = 16;
/**
* @summary The expected length of a USB ID number
*/
const USB_ID_LENGTH = 4;
/**
* @summary Convert a USB id (e.g. product/vendor) to a string
*
* @example
* console.log(usbIdToString(2652))
* > '0x0a5c'
*/
function usbIdToString(id: number): string {
return `0x${_.padStart(id.toString(USB_ID_RADIX), USB_ID_LENGTH, '0')}`;
}
/**
* @summary Product ID of BCM2708
*/
const USB_PRODUCT_ID_BCM2708_BOOT = 0x2763;
/**
* @summary Product ID of BCM2710
*/
const USB_PRODUCT_ID_BCM2710_BOOT = 0x2764;
/**
* @summary Compute module descriptions
*/
const COMPUTE_MODULE_DESCRIPTIONS: _.Dictionary<string> = {
[USB_PRODUCT_ID_BCM2708_BOOT]: 'Compute Module 1',
[USB_PRODUCT_ID_BCM2710_BOOT]: 'Compute Module 3',
};
async function driveIsAllowed(drive: {
devicePath: string;
device: string;
raw: string;
}) {
const driveBlacklist = (await settings.get('driveBlacklist')) || [];
return !(
driveBlacklist.includes(drive.devicePath) ||
driveBlacklist.includes(drive.device) ||
driveBlacklist.includes(drive.raw)
);
}
type Drive =
| sdk.sourceDestination.BlockDevice
| sdk.sourceDestination.UsbbootDrive
| sdk.sourceDestination.DriverlessDevice;
function prepareDrive(drive: Drive) {
if (drive instanceof sdk.sourceDestination.BlockDevice) {
// @ts-ignore (BlockDevice.drive is private)
return drive.drive;
} else if (drive instanceof sdk.sourceDestination.UsbbootDrive) {
// This is a workaround etcher expecting a device string and a size
// @ts-ignore
drive.device = drive.usbDevice.portId;
drive.size = null;
// @ts-ignore
drive.progress = 0;
drive.disabled = true;
drive.on('progress', (progress) => {
updateDriveProgress(drive, progress);
});
return drive;
} else if (drive instanceof sdk.sourceDestination.DriverlessDevice) {
const description =
COMPUTE_MODULE_DESCRIPTIONS[
drive.deviceDescriptor.idProduct.toString()
] || 'Compute Module';
return {
device: `${usbIdToString(
drive.deviceDescriptor.idVendor,
)}:${usbIdToString(drive.deviceDescriptor.idProduct)}`,
displayName: 'Missing drivers',
description,
mountpoints: [],
isReadOnly: false,
isSystem: false,
disabled: true,
icon: 'warning',
size: null,
link: 'https://www.raspberrypi.com/documentation/computers/compute-module.html#flashing-the-compute-module-emmc',
linkCTA: 'Install',
linkTitle: 'Install missing drivers',
linkMessage: outdent`
Would you like to download the necessary drivers from the Raspberry Pi Foundation?
This will open your browser.
Once opened, download and run the installer from the "Windows Installer" section to install the drivers
`,
};
}
}
// Spwaning the child process without privileges to get the drives list
// TODO: clean up this mess of exports
export let requestMetadata: any;
function setDrives(drives: _.Dictionary<DrivelistDrive>) {
availableDrives.setDrives(_.values(drives));
}
// start the api and spawn the child process
startApiAndSpawnChild({
withPrivileges: false,
}).then(({ emit, registerHandler }) => {
// start scanning
emit('scan');
function getDrives() {
return _.keyBy(availableDrives.getDrives(), 'device');
}
// make the sourceMetada awaitable to be used on source selection
requestMetadata = async (params: any): Promise<SourceMetadata> => {
emit('sourceMetadata', JSON.stringify(params));
async function addDrive(drive: Drive) {
const preparedDrive = prepareDrive(drive);
if (!(await driveIsAllowed(preparedDrive))) {
return;
}
const drives = getDrives();
drives[preparedDrive.device] = preparedDrive;
setDrives(drives);
}
return new Promise((resolve) =>
registerHandler('sourceMetadata', (data: any) => {
resolve(JSON.parse(data));
}),
);
};
function removeDrive(drive: Drive) {
if (
drive instanceof sdk.sourceDestination.BlockDevice &&
// @ts-ignore BlockDevice.drive is private
isSourceDrive(drive.drive, getImage())
) {
// Deselect the image if it was on the drive that was removed.
// This will also deselect the image if the drive mountpoints change.
deselectImage();
}
const preparedDrive = prepareDrive(drive);
const drives = getDrives();
delete drives[preparedDrive.device];
setDrives(drives);
}
registerHandler('drives', (data: any) => {
setDrives(JSON.parse(data));
});
function updateDriveProgress(
drive: sdk.sourceDestination.UsbbootDrive,
progress: number,
) {
const drives = getDrives();
// @ts-ignore
const driveInMap = drives[drive.device];
if (driveInMap) {
// @ts-ignore
drives[drive.device] = { ...driveInMap, progress };
setDrives(drives);
}
}
driveScanner.on('attach', addDrive);
driveScanner.on('detach', removeDrive);
driveScanner.on('error', (error) => {
// Stop the drive scanning loop in case of errors,
// otherwise we risk presenting the same error over
// and over again to the user, while also heavily
// spamming our error reporting service.
driveScanner.stop();
return exceptionReporter.report(error);
});
driveScanner.start();
let popupExists = false;
analytics.initAnalytics();

View File

@@ -139,9 +139,8 @@ export function FlashResults({
};
} & FlexProps) {
const [showErrorsInfo, setShowErrorsInfo] = React.useState(false);
const allFailed = !skip && results?.devices?.successful === 0;
const someFailed = results?.devices?.failed !== 0 || errors?.length !== 0;
const allFailed = !skip && results.devices.successful === 0;
const someFailed = results.devices.failed !== 0 || errors.length !== 0;
const effectiveSpeed = bytesToMegabytes(getEffectiveSpeed(results)).toFixed(
1,
);

View File

@@ -95,7 +95,6 @@ export class SafeWebview extends React.PureComponent<
);
this.entryHref = url.href;
// Events steal 'this'
this.handleDomReady = _.bind(this.handleDomReady, this);
this.didFailLoad = _.bind(this.didFailLoad, this);
this.didGetResponseDetails = _.bind(this.didGetResponseDetails, this);
// Make a persistent electron session for the webview
@@ -122,8 +121,6 @@ export class SafeWebview extends React.PureComponent<
ref={this.webviewRef}
partition={ELECTRON_SESSION}
style={style}
// @ts-ignore
allowpopups="true"
/>
);
}
@@ -137,8 +134,8 @@ export class SafeWebview extends React.PureComponent<
this.didFailLoad,
);
this.webviewRef.current.addEventListener(
'dom-ready',
this.handleDomReady,
'new-window',
SafeWebview.newWindow,
);
this.webviewRef.current.addEventListener(
'console-message',
@@ -160,8 +157,8 @@ export class SafeWebview extends React.PureComponent<
this.didFailLoad,
);
this.webviewRef.current.removeEventListener(
'dom-ready',
this.handleDomReady,
'new-window',
SafeWebview.newWindow,
);
this.webviewRef.current.removeEventListener(
'console-message',
@@ -171,15 +168,6 @@ export class SafeWebview extends React.PureComponent<
this.session.webRequest.onCompleted(null);
}
handleDomReady() {
const webview = this.webviewRef.current;
if (webview == null) {
return;
}
const id = webview.getWebContentsId();
electron.ipcRenderer.send('webview-dom-ready', id);
}
// Set the element state to hidden
public didFailLoad() {
this.setState({
@@ -208,4 +196,17 @@ export class SafeWebview extends React.PureComponent<
}
}
}
// Open link in browser if it's opened as a 'foreground-tab'
public static async newWindow(event: electron.NewWindowEvent) {
const url = new window.URL(event.url);
if (
(url.protocol === 'http:' || url.protocol === 'https:') &&
event.disposition === 'foreground-tab' &&
// Don't open links if they're disabled by the env var
!(await settings.get('disableExternalLinks'))
) {
electron.shell.openExternal(url.href);
}
}
}

View File

@@ -20,13 +20,13 @@ import LinkSvg from '@fortawesome/fontawesome-free/svgs/solid/link.svg';
import ExclamationTriangleSvg from '@fortawesome/fontawesome-free/svgs/solid/exclamation-triangle.svg';
import ChevronDownSvg from '@fortawesome/fontawesome-free/svgs/solid/chevron-down.svg';
import ChevronRightSvg from '@fortawesome/fontawesome-free/svgs/solid/chevron-right.svg';
import { sourceDestination } from 'etcher-sdk';
import { ipcRenderer, IpcRendererEvent } from 'electron';
import { uniqBy, isNil } from 'lodash';
import * as _ from 'lodash';
import { GPTPartition, MBRPartition } from 'partitioninfo';
import * as path from 'path';
import * as prettyBytes from 'pretty-bytes';
import * as React from 'react';
import { requestMetadata } from '../../app';
import {
Flex,
ButtonProps,
@@ -47,7 +47,7 @@ import { observe } from '../../models/store';
import * as analytics from '../../modules/analytics';
import * as exceptionReporter from '../../modules/exception-reporter';
import * as osDialog from '../../os/dialog';
import { replaceWindowsNetworkDriveLetter } from '../../os/windows-network-drives';
import {
ChangeButton,
DetailsText,
@@ -64,12 +64,8 @@ import ImageSvg from '../../../assets/image.svg';
import SrcSvg from '../../../assets/src.svg';
import { DriveSelector } from '../drive-selector/drive-selector';
import { DrivelistDrive } from '../../../../shared/drive-constraints';
import axios, { AxiosRequestConfig } from 'axios';
import { isJson } from '../../../../shared/utils';
import {
SourceMetadata,
Authentication,
Source,
} from '../../../../shared/typings/source-selector';
import * as i18next from 'i18next';
const recentUrlImagesKey = 'recentUrlImages';
@@ -87,7 +83,7 @@ function normalizeRecentUrlImages(urls: any[]): URL[] {
}
})
.filter((url) => url !== undefined);
urls = uniqBy(urls, (url) => url.href);
urls = _.uniqBy(urls, (url) => url.href);
return urls.slice(urls.length - 5);
}
@@ -305,6 +301,24 @@ const FlowSelector = styled(
}
`;
export type Source =
| typeof sourceDestination.File
| typeof sourceDestination.BlockDevice
| typeof sourceDestination.Http;
export interface SourceMetadata extends sourceDestination.Metadata {
hasMBR?: boolean;
partitions?: MBRPartition[] | GPTPartition[];
path: string;
displayName: string;
description: string;
SourceType: Source;
drive?: DrivelistDrive;
extension?: string;
archiveExtension?: string;
auth?: Authentication;
}
interface SourceSelectorProps {
flashing: boolean;
}
@@ -322,6 +336,11 @@ interface SourceSelectorState {
imageLoading: boolean;
}
interface Authentication {
username: string;
password: string;
}
export class SourceSelector extends React.Component<
SourceSelectorProps,
SourceSelectorState
@@ -362,11 +381,43 @@ export class SourceSelector extends React.Component<
this.setState({ imageLoading: true });
await this.selectSource(
imagePath,
isURL(this.normalizeImagePath(imagePath)) ? 'Http' : 'File',
isURL(this.normalizeImagePath(imagePath))
? sourceDestination.Http
: sourceDestination.File,
).promise;
this.setState({ imageLoading: false });
}
private async createSource(
selected: string,
SourceType: Source,
auth?: Authentication,
) {
try {
selected = await replaceWindowsNetworkDriveLetter(selected);
} catch (error: any) {
analytics.logException(error);
}
if (isJson(decodeURIComponent(selected))) {
const config: AxiosRequestConfig = JSON.parse(
decodeURIComponent(selected),
);
return new sourceDestination.Http({
url: config.url!,
axiosInstance: axios.create(_.omit(config, ['url'])),
});
}
if (SourceType === sourceDestination.File) {
return new sourceDestination.File({
path: selected,
});
}
return new sourceDestination.Http({ url: selected, auth });
}
public normalizeImagePath(imgPath: string) {
const decodedPath = decodeURIComponent(imgPath);
if (isJson(decodedPath)) {
@@ -395,10 +446,11 @@ export class SourceSelector extends React.Component<
},
promise: (async () => {
const sourcePath = isString(selected) ? selected : selected.device;
let source;
let metadata: SourceMetadata | undefined;
if (isString(selected)) {
if (
SourceType === 'Http' &&
SourceType === sourceDestination.Http &&
!isURL(this.normalizeImagePath(selected))
) {
this.handleError(
@@ -418,14 +470,24 @@ export class SourceSelector extends React.Component<
},
});
}
source = await this.createSource(selected, SourceType, auth);
if (cancelled) {
return;
}
try {
// this will send an event down the ipcMain asking for metadata
// we'll get the response through an event
const innerSource = await source.getInnerSource();
if (cancelled) {
return;
}
metadata = await this.getMetadata(innerSource, selected);
if (cancelled) {
return;
}
metadata.SourceType = SourceType;
metadata = await requestMetadata({ selected, SourceType, auth });
if (!metadata?.hasMBR && this.state.warning === null) {
if (!metadata.hasMBR && this.state.warning === null) {
analytics.logEvent('Missing partition table', { metadata });
this.setState({
warning: {
@@ -441,6 +503,12 @@ export class SourceSelector extends React.Component<
messages.error.openSource(sourcePath, error.message),
error,
);
} finally {
try {
await source.close();
} catch (error: any) {
// Noop
}
}
} else {
if (selected.partitionTableType === null) {
@@ -457,14 +525,13 @@ export class SourceSelector extends React.Component<
displayName: selected.displayName,
description: selected.displayName,
size: selected.size as SourceMetadata['size'],
SourceType: 'BlockDevice',
SourceType: sourceDestination.BlockDevice,
drive: selected,
};
}
if (metadata !== undefined) {
metadata.auth = auth;
metadata.SourceType = SourceType;
selectionState.selectSource(metadata);
analytics.logEvent('Select image', {
// An easy way so we can quickly identify if we're making use of
@@ -498,6 +565,25 @@ export class SourceSelector extends React.Component<
analytics.logEvent(title, { path: sourcePath });
}
private async getMetadata(
source: sourceDestination.SourceDestination,
selected: string | DrivelistDrive,
) {
const metadata = (await source.getMetadata()) as SourceMetadata;
const partitionTable = await source.getPartitionTable();
if (partitionTable) {
metadata.hasMBR = true;
metadata.partitions = partitionTable.partitions;
} else {
metadata.hasMBR = false;
}
if (isString(selected)) {
metadata.extension = path.extname(selected).slice(1);
metadata.path = selected;
}
return metadata;
}
private async openImageSelector() {
analytics.logEvent('Open image selector');
this.setState({ imageSelectorOpen: true });
@@ -510,7 +596,7 @@ export class SourceSelector extends React.Component<
analytics.logEvent('Image selector closed');
return;
}
await this.selectSource(imagePath, 'File').promise;
await this.selectSource(imagePath, sourceDestination.File).promise;
} catch (error: any) {
exceptionReporter.report(error);
} finally {
@@ -521,7 +607,7 @@ export class SourceSelector extends React.Component<
private async onDrop(event: React.DragEvent<HTMLDivElement>) {
const [file] = event.dataTransfer.files;
if (file) {
await this.selectSource(file.path, 'File').promise;
await this.selectSource(file.path, sourceDestination.File).promise;
}
}
@@ -637,7 +723,7 @@ export class SourceSelector extends React.Component<
{i18next.t('cancel')}
</ChangeButton>
)}
{!isNil(imageSize) && !imageLoading && (
{!_.isNil(imageSize) && !imageLoading && (
<DetailsText>{prettyBytes(imageSize)}</DetailsText>
)}
</>
@@ -741,7 +827,7 @@ export class SourceSelector extends React.Component<
let promise;
({ promise, cancel: cancelURLSelection } = this.selectSource(
imageURL,
'Http',
sourceDestination.Http,
auth,
));
await promise;
@@ -764,7 +850,10 @@ export class SourceSelector extends React.Component<
if (originalList.length) {
const originalSource = originalList[0];
if (selectionImage?.drive?.device !== originalSource.device) {
this.selectSource(originalSource, 'BlockDevice');
this.selectSource(
originalSource,
sourceDestination.BlockDevice,
);
}
} else {
selectionState.deselectImage();
@@ -779,7 +868,7 @@ export class SourceSelector extends React.Component<
) {
return selectionState.deselectImage();
}
this.selectSource(drive, 'BlockDevice');
this.selectSource(drive, sourceDestination.BlockDevice);
}
}}
/>

View File

@@ -46,8 +46,7 @@ const translation = {
drive: '磁碟',
missingPartitionTable:
'看起來這不是一個可啟動的{{type}}。\n\n這個{{type}}似乎不包含分割表,因此您的設備可能無法識別或無法正確啟動。',
largeDriveSize:
'這是個很大容量的磁碟!請檢查並確認它不包含對您來說存放很重要的資料',
largeDriveSize: '這是個很大容量的磁碟!請檢查並確認它不包含對您來說存放很重要的資料',
systemDrive: '選擇系統分割區很危險,因為這將會刪除你的系統',
sourceDrive: '來源映像檔位於這個分割區中',
noSpace: '磁碟空間不足。請插入另一個較大的磁碟並重試。',
@@ -131,7 +130,8 @@ const translation = {
autoUpdate: '自動更新',
settings: '軟體設定',
systemInformation: '系統資訊',
trimExtPartitions: '修改原始映像檔上未分配的空間(在 ext 類型分割區中)',
trimExtPartitions:
'修改原始映像檔上未分配的空間(在 ext 類型分割區中)',
},
menu: {
edit: '編輯',

View File

@@ -200,7 +200,7 @@ function storeReducer(
constraints.isDriveValid(drive, image) &&
!drive.isReadOnly &&
constraints.isDriveSizeRecommended(drive, image) &&
// We don't want to auto-select large drives except if autoSelectAllDrives is true
// We don't want to auto-select large drives execpt is autoSelectAllDrives is true
(!constraints.isDriveSizeLarge(drive) || shouldAutoselectAll) &&
// We don't want to auto-select system drives
!constraints.isSystemDrive(drive)

View File

@@ -1,184 +0,0 @@
/** This function will :
* - start the ipc server (api)
* - spawn the child process (privileged or not)
* - wait for the child process to connect to the api
* - return a promise that will resolve with the emit function for the api
*
* //TODO:
* - this should be refactored to reverse the control flow:
* - the child process should be the server
* - this should be the client
* - replace the current node-ipc api with a websocket api
* - centralise the api for both the writer and the scanner instead of having two instances running
*/
import * as ipc from 'node-ipc';
import { spawn } from 'child_process';
import * as os from 'os';
import * as path from 'path';
import * as packageJSON from '../../../../package.json';
import * as permissions from '../../../shared/permissions';
import { getAppPath } from '../../../shared/get-app-path';
import * as errors from '../../../shared/errors';
const THREADS_PER_CPU = 16;
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
function writerArgv(): string[] {
let entryPoint = path.join(getAppPath(), 'generated', 'etcher-util');
// AppImages run over FUSE, so the files inside the mount point
// can only be accessed by the user that mounted the AppImage.
// This means we can't re-spawn Etcher as root from the same
// mount-point, and as a workaround, we re-mount the original
// AppImage as root.
if (os.platform() === 'linux' && process.env.APPIMAGE && process.env.APPDIR) {
entryPoint = entryPoint.replace(process.env.APPDIR, '');
return [
process.env.APPIMAGE,
'-e',
`require(\`\${process.env.APPDIR}${entryPoint}\`)`,
];
} else {
return [entryPoint];
}
}
function writerEnv(
IPC_CLIENT_ID: string,
IPC_SERVER_ID: string,
IPC_SOCKET_ROOT: string,
) {
return {
IPC_SERVER_ID,
IPC_CLIENT_ID,
IPC_SOCKET_ROOT,
UV_THREADPOOL_SIZE: (os.cpus().length * THREADS_PER_CPU).toString(),
// This environment variable prevents the AppImages
// desktop integration script from presenting the
// "installation" dialog
SKIP: '1',
...(process.platform === 'win32' ? {} : process.env),
};
}
async function spawnChild({
withPrivileges,
IPC_CLIENT_ID,
IPC_SERVER_ID,
IPC_SOCKET_ROOT,
}: {
withPrivileges: boolean;
IPC_CLIENT_ID: string;
IPC_SERVER_ID: string;
IPC_SOCKET_ROOT: string;
}) {
const argv = writerArgv();
const env = writerEnv(IPC_CLIENT_ID, IPC_SERVER_ID, IPC_SOCKET_ROOT);
if (withPrivileges) {
return await permissions.elevateCommand(argv, {
applicationName: packageJSON.displayName,
environment: env,
});
} else {
const process = await spawn(argv[0], argv.slice(1), {
env,
});
return { cancelled: false, process };
}
}
function terminateServer(server: any) {
// Turns out we need to destroy all sockets for
// the server to actually close. Otherwise, it
// just stops receiving any further connections,
// but remains open if there are active ones.
// @ts-ignore (no Server.sockets in @types/node-ipc)
for (const socket of server.sockets) {
socket.destroy();
}
server.stop();
}
// TODO: replace the custom ipc events by one generic "message" for all communication with the backend
function startApiAndSpawnChild({
withPrivileges,
}: {
withPrivileges: boolean;
}): Promise<any> {
// There might be multiple Etcher instances running at
// the same time, also we might spawn multiple child and api so we must ensure each IPC
// server/client has a different name.
const IPC_SERVER_ID = `etcher-server-${process.pid}-${Date.now()}-${
withPrivileges ? 'privileged' : 'unprivileged'
}}}`;
const IPC_CLIENT_ID = `etcher-client-${process.pid}-${Date.now()}-${
withPrivileges ? 'privileged' : 'unprivileged'
}}`;
const IPC_SOCKET_ROOT = path.join(
process.env.XDG_RUNTIME_DIR || os.tmpdir(),
path.sep,
);
ipc.config.id = IPC_SERVER_ID;
ipc.config.socketRoot = IPC_SOCKET_ROOT;
return new Promise((resolve, reject) => {
ipc.serve();
// log is special message which brings back the logs from the child process and prints them to the console
ipc.server.on('log', (message: string) => {
console.log(message);
});
// api to register more handlers with callbacks
const registerHandler = (event: string, handler: any) => {
ipc.server.on(event, handler);
};
// once api is ready (means child process is connected) we pass the emit and terminate function to the caller
ipc.server.on('ready', (_: any, socket) => {
const emit = (channel: string, data: any) => {
ipc.server.emit(socket, channel, data);
};
resolve({
emit,
terminateServer: () => terminateServer(ipc.server),
registerHandler,
});
});
// on api error we terminate
ipc.server.on('error', (error: any) => {
terminateServer(ipc.server);
const errorObject = errors.fromJSON(error);
reject(errorObject);
});
// when the api is started we spawn the child process
ipc.server.on('start', async () => {
try {
const results = await spawnChild({
withPrivileges,
IPC_CLIENT_ID,
IPC_SERVER_ID,
IPC_SOCKET_ROOT,
});
// this will happen if the child is spawned withPrivileges and privileges has been rejected
if (results.cancelled) {
reject();
}
} catch (error) {
reject(error);
}
});
// start the server
ipc.server.start();
});
}
export { startApiAndSpawnChild };

View File

@@ -17,15 +17,38 @@
import { Drive as DrivelistDrive } from 'drivelist';
import * as sdk from 'etcher-sdk';
import { Dictionary } from 'lodash';
import * as ipc from 'node-ipc';
import * as os from 'os';
import * as path from 'path';
import * as packageJSON from '../../../../package.json';
import * as errors from '../../../shared/errors';
import { SourceMetadata } from '../../../shared/typings/source-selector';
import * as permissions from '../../../shared/permissions';
import { getAppPath } from '../../../shared/utils';
import { SourceMetadata } from '../components/source-selector/source-selector';
import * as flashState from '../models/flash-state';
import * as selectionState from '../models/selection-state';
import * as settings from '../models/settings';
import * as analytics from '../modules/analytics';
import * as windowProgress from '../os/window-progress';
import { startApiAndSpawnChild } from './api';
import { terminateScanningServer } from '../app';
const THREADS_PER_CPU = 16;
// There might be multiple Etcher instances running at
// the same time, therefore we must ensure each IPC
// server/client has a different name.
const IPC_SERVER_ID = `etcher-server-${process.pid}`;
const IPC_CLIENT_ID = `etcher-client-${process.pid}`;
ipc.config.id = IPC_SERVER_ID;
ipc.config.socketRoot = path.join(
process.env.XDG_RUNTIME_DIR || os.tmpdir(),
path.sep,
);
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
/**
* @summary Handle a flash error and log it to analytics
@@ -57,7 +80,51 @@ function handleErrorLogging(
}
}
let cancelEmitter: (type: string) => void | undefined;
function terminateServer() {
// Turns out we need to destroy all sockets for
// the server to actually close. Otherwise, it
// just stops receiving any further connections,
// but remains open if there are active ones.
// @ts-ignore (no Server.sockets in @types/node-ipc)
for (const socket of ipc.server.sockets) {
socket.destroy();
}
ipc.server.stop();
}
function writerArgv(): string[] {
let entryPoint = path.join(getAppPath(), 'generated', 'child-writer.js');
// AppImages run over FUSE, so the files inside the mount point
// can only be accessed by the user that mounted the AppImage.
// This means we can't re-spawn Etcher as root from the same
// mount-point, and as a workaround, we re-mount the original
// AppImage as root.
if (os.platform() === 'linux' && process.env.APPIMAGE && process.env.APPDIR) {
entryPoint = entryPoint.replace(process.env.APPDIR, '');
return [
process.env.APPIMAGE,
'-e',
`require(\`\${process.env.APPDIR}${entryPoint}\`)`,
];
} else {
return [process.argv[0], entryPoint];
}
}
function writerEnv() {
return {
IPC_SERVER_ID,
IPC_CLIENT_ID,
IPC_SOCKET_ROOT: ipc.config.socketRoot,
ELECTRON_RUN_AS_NODE: '1',
UV_THREADPOOL_SIZE: (os.cpus().length * THREADS_PER_CPU).toString(),
// This environment variable prevents the AppImages
// desktop integration script from presenting the
// "installation" dialog
SKIP: '1',
...(process.platform === 'win32' ? {} : process.env),
};
}
interface FlashResults {
skip?: boolean;
@@ -77,13 +144,22 @@ async function performWrite(
drives: DrivelistDrive[],
onProgress: sdk.multiWrite.OnProgressFunction,
): Promise<{ cancelled?: boolean }> {
let cancelled = false;
let skip = false;
ipc.serve();
const { autoBlockmapping, decompressFirst } = await settings.getAll();
return await new Promise((resolve, reject) => {
ipc.server.on('error', (error) => {
terminateServer();
const errorObject = errors.fromJSON(error);
reject(errorObject);
});
console.log({ image, drives });
ipc.server.on('log', (message) => {
console.log(message);
});
return await new Promise(async (resolve, reject) => {
const flashResults: FlashResults = {};
const analyticsData = {
image,
drives,
@@ -92,51 +168,75 @@ async function performWrite(
flashInstanceUuid: flashState.getFlashUuid(),
};
const onFail = ({ device, error }) => {
console.log('fail event');
console.log(device);
console.log(error);
ipc.server.on('fail', ({ device, error }) => {
if (device.devicePath) {
flashState.addFailedDeviceError({ device, error });
}
handleErrorLogging(error, analyticsData);
finish();
};
});
const onDone = (event) => {
console.log('done event');
ipc.server.on('done', (event) => {
event.results.errors = event.results.errors.map(
(data: Dictionary<any> & { message: string }) => {
return errors.fromJSON(data);
},
);
flashResults.results = event.results;
finish();
};
});
const onAbort = () => {
console.log('abort event');
flashResults.cancelled = true;
finish();
};
ipc.server.on('abort', () => {
terminateServer();
cancelled = true;
});
const onSkip = () => {
console.log('skip event');
flashResults.skip = true;
finish();
};
ipc.server.on('skip', () => {
terminateServer();
skip = true;
});
const finish = () => {
ipc.server.on('state', onProgress);
ipc.server.on('ready', (_data, socket) => {
ipc.server.emit(socket, 'write', {
image,
destinations: drives,
SourceType: image.SourceType.name,
autoBlockmapping,
decompressFirst,
});
});
const argv = writerArgv();
ipc.server.on('start', async () => {
console.log(`Elevating command: ${argv.join(' ')}`);
const env = writerEnv();
try {
const results = await permissions.elevateCommand(argv, {
applicationName: packageJSON.displayName,
environment: env,
});
flashResults.cancelled = cancelled || results.cancelled;
flashResults.skip = skip;
} catch (error: any) {
// This happens when the child is killed using SIGKILL
const SIGKILL_EXIT_CODE = 137;
if (error.code === SIGKILL_EXIT_CODE) {
error.code = 'ECHILDDIED';
}
reject(error);
} finally {
console.log('Terminating IPC server');
terminateServer();
}
console.log('Flash results', flashResults);
// The flash wasn't cancelled and we didn't get a 'done' event
// Catch unexepected situation
if (
!flashResults.cancelled &&
!flashResults.skip &&
flashResults.results === undefined
) {
console.log(flashResults);
reject(
errors.createUserError({
title: 'The writer process ended unexpectedly',
@@ -144,40 +244,15 @@ async function performWrite(
'Please try again, and contact the Etcher team if the problem persists',
}),
);
return;
}
console.log('Terminating IPC server');
terminateServer();
resolve(flashResults);
};
});
// Spawn the child process with privileges and wait for the connection to be made
const { emit, registerHandler, terminateServer } =
await startApiAndSpawnChild({
withPrivileges: true,
});
registerHandler('state', onProgress);
registerHandler('fail', onFail);
registerHandler('done', onDone);
registerHandler('abort', onAbort);
registerHandler('skip', onSkip);
cancelEmitter = (cancelStatus: string) => emit(cancelStatus);
// Now that we know we're connected we can instruct the child process to start the write
const paramaters = {
image,
destinations: drives,
SourceType: image.SourceType,
autoBlockmapping,
decompressFirst,
};
console.log('params', paramaters);
emit('write', paramaters);
// Clear the update lock timer to prevent longer
// flashing timing it out, and releasing the lock
ipc.server.start();
});
// The process continue in the event handler
}
/**
@@ -194,7 +269,6 @@ export async function flash(
}
await flashState.setFlashingFlag();
flashState.setDevicePaths(
drives.map((d) => d.devicePath).filter((p) => p != null) as string[],
);
@@ -210,7 +284,6 @@ export async function flash(
analytics.logEvent('Flash', analyticsData);
// start api and call the flasher
try {
const result = await write(image, drives, flashState.setProgressState);
await flashState.unsetFlashingFlag(result);
@@ -219,11 +292,8 @@ export async function flash(
cancelled: false,
errorCode: error.code,
});
windowProgress.clear();
const { results = {} } = flashState.getFlashResults();
const eventData = {
...analyticsData,
errors: results.errors,
@@ -234,9 +304,7 @@ export async function flash(
analytics.logEvent('Write failed', eventData);
throw error;
}
windowProgress.clear();
if (flashState.wasLastFlashCancelled()) {
const eventData = {
...analyticsData,
@@ -259,7 +327,6 @@ export async function flash(
/**
* @summary Cancel write operation
* //TODO: find a better solution to handle cancellation
*/
export async function cancel(type: string) {
const status = type.toLowerCase();
@@ -274,7 +341,15 @@ export async function cancel(type: string) {
};
analytics.logEvent('Cancel', analyticsData);
if (cancelEmitter) {
cancelEmitter(status);
// Re-enable lock release on inactivity
try {
// @ts-ignore (no Server.sockets in @types/node-ipc)
const [socket] = ipc.server.sockets;
if (socket !== undefined) {
ipc.server.emit(socket, status);
}
} catch (error: any) {
analytics.logException(error);
}
}

View File

@@ -27,6 +27,7 @@ import * as availableDrives from '../../models/available-drives';
import * as flashState from '../../models/flash-state';
import * as selection from '../../models/selection-state';
import * as analytics from '../../modules/analytics';
import { scanner as driveScanner } from '../../modules/drive-scanner';
import * as imageWriter from '../../modules/image-writer';
import * as notification from '../../os/notification';
import {
@@ -94,6 +95,10 @@ async function flashImageToDrive(
return '';
}
// Stop scanning drives when flashing
// otherwise Windows throws EPERM
driveScanner.stop();
const iconPath = path.join('media', 'icon.png');
const basename = path.basename(image.path);
try {
@@ -105,7 +110,7 @@ async function flashImageToDrive(
cancelled,
} = flashState.getFlashResults();
if (!skip && !cancelled) {
if (results?.devices?.successful > 0) {
if (results.devices.successful > 0) {
notifySuccess(iconPath, basename, drives, results.devices);
} else {
notifyFailure(iconPath, basename, drives);
@@ -124,6 +129,7 @@ async function flashImageToDrive(
return errorMessage;
} finally {
availableDrives.setDrives([]);
driveScanner.start();
}
return '';

View File

@@ -26,8 +26,10 @@ import styled from 'styled-components';
import FinishPage from '../../components/finish/finish';
import { ReducedFlashingInfos } from '../../components/reduced-flashing-infos/reduced-flashing-infos';
import { SettingsModal } from '../../components/settings/settings';
import { SourceSelector } from '../../components/source-selector/source-selector';
import { SourceMetadata } from '../../../../shared/typings/source-selector';
import {
SourceMetadata,
SourceSelector,
} from '../../components/source-selector/source-selector';
import * as flashState from '../../models/flash-state';
import * as selectionState from '../../models/selection-state';
import * as settings from '../../models/settings';
@@ -311,7 +313,7 @@ export class MainPage extends React.Component<
onClick={() =>
openExternal(
selectionState.getImage()?.supportUrl ||
'https://github.com/balena-io/etcher/blob/master/docs/SUPPORT.md',
'https://github.com/balena-io/etcher/blob/master/SUPPORT.md',
)
}
tabIndex={6}

View File

@@ -21,7 +21,7 @@ import { promises as fs } from 'fs';
import { platform } from 'os';
import * as path from 'path';
import * as semver from 'semver';
import * as lodash from 'lodash';
import * as _ from 'lodash';
import './app/i18n';
@@ -52,7 +52,7 @@ async function checkForUpdates(interval: number) {
const release = await autoUpdater.checkForUpdates();
const isOutdated =
semver.compare(release!.updateInfo.version, version) > 0;
const shouldUpdate = release!.updateInfo.stagingPercentage !== 0; // undefined (default) means 100%
const shouldUpdate = release!.updateInfo.stagingPercentage !== 0; // undefinded (default) means 100%
if (shouldUpdate && isOutdated) {
await autoUpdater.downloadUpdate();
packageUpdated = true;
@@ -107,10 +107,10 @@ async function getCommandLineURL(argv: string[]): Promise<string | undefined> {
}
}
const initSentryMain = lodash.once(() => {
const initSentryMain = _.once(() => {
const dsn =
settings.getSync('analyticsSentryToken') ||
lodash.get(packageJSON, ['analytics', 'sentry', 'token']);
_.get(packageJSON, ['analytics', 'sentry', 'token']);
SentryMain.init({ dsn, beforeSend: anonymizeSentryData });
});
@@ -181,10 +181,10 @@ async function createMainWindow() {
electron.app.setAsDefaultProtocolClient(customProtocol);
// mainWindow.setFullScreen(true);
mainWindow.setFullScreen(true);
// Prevent flash of white when starting the application
mainWindow.once('ready-to-show', () => {
mainWindow.on('ready-to-show', () => {
console.timeEnd('ready-to-show');
// Electron sometimes caches the zoomFactor
// making it obnoxious to switch back-and-forth
@@ -267,24 +267,6 @@ async function main(): Promise<void> {
console.log('Build menu failed. ');
}
});
electron.ipcMain.on('webview-dom-ready', (_, id) => {
const webview = electron.webContents.fromId(id);
// Open link in browser if it's opened as a 'foreground-tab'
webview.setWindowOpenHandler((event) => {
const url = new URL(event.url);
if (
(url.protocol === 'http:' || url.protocol === 'https:') &&
event.disposition === 'foreground-tab' &&
// Don't open links if they're disabled by the env var
!settings.getSync('disableExternalLinks')
) {
electron.shell.openExternal(url.href);
}
return { action: 'deny' };
});
});
}
}
main();

View File

@@ -0,0 +1,333 @@
/*
* Copyright 2017 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Drive as DrivelistDrive } from 'drivelist';
import {
BlockDevice,
File,
Http,
Metadata,
SourceDestination,
} from 'etcher-sdk/build/source-destination';
import {
MultiDestinationProgress,
OnProgressFunction,
OnFailFunction,
decompressThenFlash,
DECOMPRESSED_IMAGE_PREFIX,
} from 'etcher-sdk/build/multi-write';
import { cleanupTmpFiles } from 'etcher-sdk/build/tmp';
import * as ipc from 'node-ipc';
import { totalmem } from 'os';
import { toJSON } from '../../shared/errors';
import { GENERAL_ERROR, SUCCESS } from '../../shared/exit-codes';
import { delay, isJson } from '../../shared/utils';
import { SourceMetadata } from '../app/components/source-selector/source-selector';
import axios from 'axios';
import * as _ from 'lodash';
ipc.config.id = process.env.IPC_CLIENT_ID as string;
ipc.config.socketRoot = process.env.IPC_SOCKET_ROOT as string;
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
// > If set to 0, the client will NOT try to reconnect.
// See https://github.com/RIAEvangelist/node-ipc/
//
// The purpose behind this change is for this process
// to emit a "disconnect" event as soon as the GUI
// process is closed, so we can kill this process as well.
// @ts-ignore (0 is a valid value for stopRetrying and is not the same as false)
ipc.config.stopRetrying = 0;
const DISCONNECT_DELAY = 100;
const IPC_SERVER_ID = process.env.IPC_SERVER_ID as string;
/**
* @summary Send a log debug message to the IPC server
*/
function log(message: string) {
ipc.of[IPC_SERVER_ID].emit('log', message);
}
/**
* @summary Terminate the child writer process
*/
async function terminate(exitCode: number) {
ipc.disconnect(IPC_SERVER_ID);
await cleanupTmpFiles(Date.now(), DECOMPRESSED_IMAGE_PREFIX);
process.nextTick(() => {
process.exit(exitCode || SUCCESS);
});
}
/**
* @summary Handle a child writer error
*/
async function handleError(error: Error) {
ipc.of[IPC_SERVER_ID].emit('error', toJSON(error));
await delay(DISCONNECT_DELAY);
await terminate(GENERAL_ERROR);
}
export interface FlashError extends Error {
description: string;
device: string;
code: string;
}
export interface WriteResult {
bytesWritten?: number;
devices?: {
failed: number;
successful: number;
};
errors: FlashError[];
sourceMetadata?: Metadata;
}
export interface FlashResults extends WriteResult {
skip?: boolean;
cancelled?: boolean;
}
/**
* @summary writes the source to the destinations and valiates the writes
* @param {SourceDestination} source - source
* @param {SourceDestination[]} destinations - destinations
* @param {Boolean} verify - whether to validate the writes or not
* @param {Boolean} autoBlockmapping - whether to trim ext partitions before writing
* @param {Function} onProgress - function to call on progress
* @param {Function} onFail - function to call on fail
* @returns {Promise<{ bytesWritten, devices, errors} >}
*/
async function writeAndValidate({
source,
destinations,
verify,
autoBlockmapping,
decompressFirst,
onProgress,
onFail,
}: {
source: SourceDestination;
destinations: BlockDevice[];
verify: boolean;
autoBlockmapping: boolean;
decompressFirst: boolean;
onProgress: OnProgressFunction;
onFail: OnFailFunction;
}): Promise<WriteResult> {
const { sourceMetadata, failures, bytesWritten } = await decompressThenFlash({
source,
destinations,
onFail,
onProgress,
verify,
trim: autoBlockmapping,
numBuffers: Math.min(
2 + (destinations.length - 1) * 32,
256,
Math.floor(totalmem() / 1024 ** 2 / 8),
),
decompressFirst,
});
const result: WriteResult = {
bytesWritten,
devices: {
failed: failures.size,
successful: destinations.length - failures.size,
},
errors: [],
sourceMetadata,
};
for (const [destination, error] of failures) {
const err = error as FlashError;
const drive = destination as BlockDevice;
err.device = drive.device;
err.description = drive.description;
result.errors.push(err);
}
return result;
}
interface WriteOptions {
image: SourceMetadata;
destinations: DrivelistDrive[];
autoBlockmapping: boolean;
decompressFirst: boolean;
SourceType: string;
httpRequest?: any;
}
ipc.connectTo(IPC_SERVER_ID, () => {
// Remove leftover tmp files older than 1 hour
cleanupTmpFiles(Date.now() - 60 * 60 * 1000);
process.once('uncaughtException', handleError);
// Gracefully exit on the following cases. If the parent
// process detects that child exit successfully but
// no flashing information is available, then it will
// assume that the child died halfway through.
process.once('SIGINT', async () => {
await terminate(SUCCESS);
});
process.once('SIGTERM', async () => {
await terminate(SUCCESS);
});
// The IPC server failed. Abort.
ipc.of[IPC_SERVER_ID].on('error', async () => {
await terminate(SUCCESS);
});
// The IPC server was disconnected. Abort.
ipc.of[IPC_SERVER_ID].on('disconnect', async () => {
await terminate(SUCCESS);
});
ipc.of[IPC_SERVER_ID].on('write', async (options: WriteOptions) => {
/**
* @summary Progress handler
* @param {Object} state - progress state
* @example
* writer.on('progress', onProgress)
*/
const onProgress = (state: MultiDestinationProgress) => {
ipc.of[IPC_SERVER_ID].emit('state', state);
};
let exitCode = SUCCESS;
/**
* @summary Abort handler
* @example
* writer.on('abort', onAbort)
*/
const onAbort = async () => {
log('Abort');
ipc.of[IPC_SERVER_ID].emit('abort');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
const onSkip = async () => {
log('Skip validation');
ipc.of[IPC_SERVER_ID].emit('skip');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
ipc.of[IPC_SERVER_ID].on('cancel', onAbort);
ipc.of[IPC_SERVER_ID].on('skip', onSkip);
/**
* @summary Failure handler (non-fatal errors)
* @param {SourceDestination} destination - destination
* @param {Error} error - error
* @example
* writer.on('fail', onFail)
*/
const onFail = (destination: SourceDestination, error: Error) => {
ipc.of[IPC_SERVER_ID].emit('fail', {
// TODO: device should be destination
// @ts-ignore (destination.drive is private)
device: destination.drive,
error: toJSON(error),
});
};
const destinations = options.destinations.map((d) => d.device);
const imagePath = options.image.path;
log(`Image: ${imagePath}`);
log(`Devices: ${destinations.join(', ')}`);
log(`Auto blockmapping: ${options.autoBlockmapping}`);
log(`Decompress first: ${options.decompressFirst}`);
const dests = options.destinations.map((destination) => {
return new BlockDevice({
drive: destination,
unmountOnSuccess: true,
write: true,
direct: true,
});
});
const { SourceType } = options;
try {
let source;
if (options.image.drive) {
source = new BlockDevice({
drive: options.image.drive,
direct: !options.autoBlockmapping,
});
} else {
if (SourceType === File.name) {
source = new File({
path: imagePath,
});
} else {
const decodedImagePath = decodeURIComponent(imagePath);
if (isJson(decodedImagePath)) {
const imagePathObject = JSON.parse(decodedImagePath);
source = new Http({
url: imagePathObject.url,
avoidRandomAccess: true,
axiosInstance: axios.create(_.omit(imagePathObject, ['url'])),
auth: options.image.auth,
});
} else {
source = new Http({
url: imagePath,
avoidRandomAccess: true,
auth: options.image.auth,
});
}
}
}
const results = await writeAndValidate({
source,
destinations: dests,
verify: true,
autoBlockmapping: options.autoBlockmapping,
decompressFirst: options.decompressFirst,
onProgress,
onFail,
});
log(`Finish: ${results.bytesWritten}`);
results.errors = results.errors.map((error) => {
return toJSON(error);
});
ipc.of[IPC_SERVER_ID].emit('done', { results });
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
} catch (error: any) {
exitCode = GENERAL_ERROR;
ipc.of[IPC_SERVER_ID].emit('error', toJSON(error));
}
});
ipc.of[IPC_SERVER_ID].on('connect', () => {
log(
`Successfully connected to IPC server: ${IPC_SERVER_ID}, socket root ${ipc.config.socketRoot}`,
);
ipc.of[IPC_SERVER_ID].emit('ready', {});
});
});

View File

@@ -1,10 +0,0 @@
{
"bin": "build/util/child-writer.js",
"pkg": {
"assets": [
"node_modules/usb/prebuilds/darwin-x64+arm64/node.napi.node",
"node_modules/lzma-native/prebuilds/darwin-arm64/node.napi.node",
"node_modules/drivelist/build/Release/drivelist.node"
]
}
}

View File

@@ -19,7 +19,7 @@ import { join } from 'path';
import { env } from 'process';
import { promisify } from 'util';
import { getAppPath } from '../get-app-path';
import { getAppPath } from '../utils';
import { supportedLocales } from '../../gui/app/i18n';
const execFileAsync = promisify(execFile);

View File

@@ -15,11 +15,11 @@
*/
import { Drive } from 'drivelist';
import { isNil } from 'lodash';
import * as _ from 'lodash';
import * as pathIsInside from 'path-is-inside';
import * as messages from './messages';
import { SourceMetadata } from './typings/source-selector';
import { SourceMetadata } from '../gui/app/components/source-selector/source-selector';
/**
* @summary The default unknown size for things such as images and drives
@@ -210,8 +210,8 @@ export function getDriveImageCompatibilityStatuses(
});
}
if (
!isNil(drive) &&
!isNil(drive.size) &&
!_.isNil(drive) &&
!_.isNil(drive.size) &&
!isDriveLargeEnough(drive, image)
) {
statusList.push(statuses.small);
@@ -229,7 +229,7 @@ export function getDriveImageCompatibilityStatuses(
if (
image !== undefined &&
!isNil(drive) &&
!_.isNil(drive) &&
!isDriveSizeRecommended(drive, image)
) {
statusList.push(statuses.sizeNotRecommended);

View File

@@ -1,12 +0,0 @@
export function getAppPath(): string {
return (
(require('electron').app || require('@electron/remote').app)
.getAppPath()
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
.replace(/\.asar$/, () =>
process.platform === 'darwin' ? '-' + process.arch : '',
)
);
}

View File

@@ -1,23 +0,0 @@
import { GPTPartition, MBRPartition } from 'partitioninfo';
import { sourceDestination } from 'etcher-sdk';
import { DrivelistDrive } from '../drive-constraints';
export type Source = 'File' | 'BlockDevice' | 'Http';
export interface SourceMetadata extends sourceDestination.Metadata {
hasMBR?: boolean;
partitions?: MBRPartition[] | GPTPartition[];
path: string;
displayName: string;
description: string;
SourceType: Source;
drive?: DrivelistDrive;
extension?: string;
archiveExtension?: string;
auth?: Authentication;
}
export interface Authentication {
username: string;
password: string;
}

View File

@@ -14,6 +14,9 @@
* limitations under the License.
*/
import axios from 'axios';
import { Dictionary } from 'lodash';
import * as errors from './errors';
export function isValidPercentage(percentage: any): boolean {
@@ -35,6 +38,19 @@ export async function delay(duration: number): Promise<void> {
});
}
export function getAppPath(): string {
return (
(require('electron').app || require('@electron/remote').app)
.getAppPath()
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
.replace(/\.asar$/, () =>
process.platform === 'darwin' ? '-' + process.arch : '',
)
);
}
export function isJson(jsonString: string) {
try {
JSON.parse(jsonString);

View File

@@ -1,201 +0,0 @@
/*
* Copyright 2017 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as ipc from 'node-ipc';
import { toJSON } from '../shared/errors';
import { GENERAL_ERROR, SUCCESS } from '../shared/exit-codes';
import { delay } from '../shared/utils';
import { WriteOptions } from './types/types';
import { MultiDestinationProgress } from 'etcher-sdk/build/multi-write';
import { write, cleanup } from './child-writer';
import { startScanning } from './scanner';
import { getSourceMetadata } from './source-metadata';
import { DrivelistDrive } from '../shared/drive-constraints';
import { Dictionary, values } from 'lodash';
ipc.config.id = process.env.IPC_CLIENT_ID as string;
ipc.config.socketRoot = process.env.IPC_SOCKET_ROOT as string;
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
// > If set to 0, the client will NOT try to reconnect.
// See https://github.com/RIAEvangelist/node-ipc/
//
// The purpose behind this change is for this process
// to emit a "disconnect" event as soon as the GUI
// process is closed, so we can kill this process as well.
// @ts-ignore (0 is a valid value for stopRetrying and is not the same as false)
ipc.config.stopRetrying = 0;
const DISCONNECT_DELAY = 100;
const IPC_SERVER_ID = process.env.IPC_SERVER_ID as string;
/**
* @summary Send a message to the IPC server
*/
function emit(channel: string, message?: any) {
ipc.of[IPC_SERVER_ID].emit(channel, message);
}
/**
* @summary Send a log debug message to the IPC server
*/
function log(message: string) {
if (console?.log) {
console.log(message);
}
emit('log', message);
}
/**
* @summary Terminate the child process
*/
async function terminate(exitCode: number) {
ipc.disconnect(IPC_SERVER_ID);
await cleanup(Date.now());
process.nextTick(() => {
process.exit(exitCode || SUCCESS);
});
}
/**
* @summary Handle errors
*/
async function handleError(error: Error) {
emit('error', toJSON(error));
await delay(DISCONNECT_DELAY);
await terminate(GENERAL_ERROR);
}
/**
* @summary Abort handler
* @example
*/
const onAbort = async (exitCode: number) => {
log('Abort');
emit('abort');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
const onSkip = async (exitCode: number) => {
log('Skip validation');
emit('skip');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
ipc.connectTo(IPC_SERVER_ID, () => {
// Gracefully exit on the following cases. If the parent
// process detects that child exit successfully but
// no flashing information is available, then it will
// assume that the child died halfway through.
process.once('uncaughtException', handleError);
process.once('SIGINT', async () => {
await terminate(SUCCESS);
});
process.once('SIGTERM', async () => {
await terminate(SUCCESS);
});
// The IPC server failed. Abort.
ipc.of[IPC_SERVER_ID].on('error', async () => {
await terminate(SUCCESS);
});
// The IPC server was disconnected. Abort.
ipc.of[IPC_SERVER_ID].on('disconnect', async () => {
await terminate(SUCCESS);
});
ipc.of[IPC_SERVER_ID].on('sourceMetadata', async (params) => {
const { selected, SourceType, auth } = JSON.parse(params);
try {
const sourceMatadata = await getSourceMetadata(
selected,
SourceType,
auth,
);
emitSourceMetadata(sourceMatadata);
} catch (error: any) {
emitFail(error);
}
});
ipc.of[IPC_SERVER_ID].on('scan', async () => {
startScanning();
});
// write handler
ipc.of[IPC_SERVER_ID].on('write', async (options: WriteOptions) => {
// Remove leftover tmp files older than 1 hour
cleanup(Date.now() - 60 * 60 * 1000);
let exitCode = SUCCESS;
ipc.of[IPC_SERVER_ID].on('cancel', () => onAbort(exitCode));
ipc.of[IPC_SERVER_ID].on('skip', () => onSkip(exitCode));
const results = await write(options);
if (results.errors.length > 0) {
results.errors = results.errors.map((error: any) => {
return toJSON(error);
});
exitCode = GENERAL_ERROR;
}
emit('done', { results });
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
});
ipc.of[IPC_SERVER_ID].on('connect', () => {
log(
`Successfully connected to IPC server: ${IPC_SERVER_ID}, socket root ${ipc.config.socketRoot}`,
);
emit('ready', {});
});
});
function emitLog(message: string) {
log(message);
}
function emitState(state: MultiDestinationProgress) {
emit('state', state);
}
function emitFail(data: any) {
emit('fail', data);
}
function emitDrives(drives: Dictionary<DrivelistDrive>) {
emit('drives', JSON.stringify(values(drives)));
}
function emitSourceMetadata(sourceMetadata: any) {
emit('sourceMetadata', JSON.stringify(sourceMetadata));
}
export { emitLog, emitState, emitFail, emitDrives, emitSourceMetadata };

View File

@@ -1,200 +0,0 @@
/*
* Copyright 2023 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file handles the writer process.
*/
import {
OnProgressFunction,
OnFailFunction,
decompressThenFlash,
DECOMPRESSED_IMAGE_PREFIX,
MultiDestinationProgress,
} from 'etcher-sdk/build/multi-write';
import { totalmem } from 'os';
import { cleanupTmpFiles } from 'etcher-sdk/build/tmp';
import {
File,
Http,
BlockDevice,
SourceDestination,
} from 'etcher-sdk/build/source-destination';
import { WriteResult, FlashError, WriteOptions } from './types/types';
import { isJson } from '../shared/utils';
import { toJSON } from '../shared/errors';
import axios from 'axios';
import { omit } from 'lodash';
import { emitLog, emitState, emitFail } from './api';
async function write(options: WriteOptions) {
/**
* @summary Failure handler (non-fatal errors)
* @param {SourceDestination} destination - destination
* @param {Error} error - error
*/
const onFail = (destination: SourceDestination, error: Error) => {
emitFail({
// TODO: device should be destination
// @ts-ignore (destination.drive is private)
device: destination.drive,
error: toJSON(error),
});
};
/**
* @summary Progress handler
* @param {Object} state - progress state
* @example
* writer.on('progress', onProgress)
*/
const onProgress = (state: MultiDestinationProgress) => {
emitState(state);
};
// Write the image to the destinations
const destinations = options.destinations.map((d) => d.device);
const imagePath = options.image.path;
emitLog(`Image: ${imagePath}`);
emitLog(`Devices: ${destinations.join(', ')}`);
emitLog(`Auto blockmapping: ${options.autoBlockmapping}`);
emitLog(`Decompress first: ${options.decompressFirst}`);
const dests = options.destinations.map((destination) => {
return new BlockDevice({
drive: destination,
unmountOnSuccess: true,
write: true,
direct: true,
});
});
const { SourceType } = options;
try {
let source;
if (options.image.drive) {
source = new BlockDevice({
drive: options.image.drive,
direct: !options.autoBlockmapping,
});
} else {
if (SourceType === File.name) {
source = new File({
path: imagePath,
});
} else {
const decodedImagePath = decodeURIComponent(imagePath);
if (isJson(decodedImagePath)) {
const imagePathObject = JSON.parse(decodedImagePath);
source = new Http({
url: imagePathObject.url,
avoidRandomAccess: true,
axiosInstance: axios.create(omit(imagePathObject, ['url'])),
auth: options.image.auth,
});
} else {
source = new Http({
url: imagePath,
avoidRandomAccess: true,
auth: options.image.auth,
});
}
}
}
const results = await writeAndValidate({
source,
destinations: dests,
verify: true,
autoBlockmapping: options.autoBlockmapping,
decompressFirst: options.decompressFirst,
onProgress,
onFail,
});
return results;
} catch (error: any) {
return { errors: [error] };
}
}
/** @summary clean up tmp files */
export async function cleanup(until: number) {
await cleanupTmpFiles(until, DECOMPRESSED_IMAGE_PREFIX);
}
/**
* @summary writes the source to the destinations and validates the writes
* @param {SourceDestination} source - source
* @param {SourceDestination[]} destinations - destinations
* @param {Boolean} verify - whether to validate the writes or not
* @param {Boolean} autoBlockmapping - whether to trim ext partitions before writing
* @param {Function} onProgress - function to call on progress
* @param {Function} onFail - function to call on fail
* @returns {Promise<{ bytesWritten, devices, errors} >}
*/
async function writeAndValidate({
source,
destinations,
verify,
autoBlockmapping,
decompressFirst,
onProgress,
onFail,
}: {
source: SourceDestination;
destinations: BlockDevice[];
verify: boolean;
autoBlockmapping: boolean;
decompressFirst: boolean;
onProgress: OnProgressFunction;
onFail: OnFailFunction;
}): Promise<WriteResult> {
const { sourceMetadata, failures, bytesWritten } = await decompressThenFlash({
source,
destinations,
onFail,
onProgress,
verify,
trim: autoBlockmapping,
numBuffers: Math.min(
2 + (destinations.length - 1) * 32,
256,
Math.floor(totalmem() / 1024 ** 2 / 8),
),
decompressFirst,
});
const result: WriteResult = {
bytesWritten,
devices: {
failed: failures.size,
successful: destinations.length - failures.size,
},
errors: [],
sourceMetadata,
};
for (const [destination, error] of failures) {
const err = error as FlashError;
const drive = destination as BlockDevice;
err.device = drive.device;
err.description = drive.description;
result.errors.push(err);
}
return result;
}
export { write };

View File

@@ -1,180 +0,0 @@
import { scanner as driveScanner } from './drive-scanner';
import * as sdk from 'etcher-sdk';
import { DrivelistDrive } from '../shared/drive-constraints';
import outdent from 'outdent';
import { Dictionary, values, keyBy, padStart } from 'lodash';
import { emitDrives } from './api';
let availableDrives: DrivelistDrive[] = [];
export function hasAvailableDrives() {
return availableDrives.length > 0;
}
driveScanner.on('error', (error) => {
// Stop the drive scanning loop in case of errors,
// otherwise we risk presenting the same error over
// and over again to the user, while also heavily
// spamming our error reporting service.
driveScanner.stop();
console.log('scanner error', error);
});
function setDrives(drives: Dictionary<DrivelistDrive>) {
availableDrives = values(drives);
emitDrives(drives);
}
function getDrives() {
return keyBy(availableDrives, 'device');
}
async function addDrive(drive: Drive) {
const preparedDrive = prepareDrive(drive);
if (!(await driveIsAllowed(preparedDrive))) {
return;
}
const drives = getDrives();
drives[preparedDrive.device] = preparedDrive;
setDrives(drives);
}
function removeDrive(drive: Drive) {
const preparedDrive = prepareDrive(drive);
const drives = getDrives();
delete drives[preparedDrive.device];
setDrives(drives);
}
async function driveIsAllowed(drive: {
devicePath: string;
device: string;
raw: string;
}) {
// const driveBlacklist = (await settings.get("driveBlacklist")) || [];
const driveBlacklist: any[] = [];
return !(
driveBlacklist.includes(drive.devicePath) ||
driveBlacklist.includes(drive.device) ||
driveBlacklist.includes(drive.raw)
);
}
type Drive =
| sdk.sourceDestination.BlockDevice
| sdk.sourceDestination.UsbbootDrive
| sdk.sourceDestination.DriverlessDevice;
function prepareDrive(drive: Drive) {
if (drive instanceof sdk.sourceDestination.BlockDevice) {
// @ts-ignore (BlockDevice.drive is private)
return drive.drive;
} else if (drive instanceof sdk.sourceDestination.UsbbootDrive) {
// This is a workaround etcher expecting a device string and a size
// @ts-ignore
drive.device = drive.usbDevice.portId;
drive.size = null;
// @ts-ignore
drive.progress = 0;
drive.disabled = true;
drive.on('progress', (progress) => {
updateDriveProgress(drive, progress);
});
return drive;
} else if (drive instanceof sdk.sourceDestination.DriverlessDevice) {
const description =
COMPUTE_MODULE_DESCRIPTIONS[
drive.deviceDescriptor.idProduct.toString()
] || 'Compute Module';
return {
device: `${usbIdToString(
drive.deviceDescriptor.idVendor,
)}:${usbIdToString(drive.deviceDescriptor.idProduct)}`,
displayName: 'Missing drivers',
description,
mountpoints: [],
isReadOnly: false,
isSystem: false,
disabled: true,
icon: 'warning',
size: null,
link: 'https://www.raspberrypi.com/documentation/computers/compute-module.html#flashing-the-compute-module-emmc',
linkCTA: 'Install',
linkTitle: 'Install missing drivers',
linkMessage: outdent`
Would you like to download the necessary drivers from the Raspberry Pi Foundation?
This will open your browser.
Once opened, download and run the installer from the "Windows Installer" section to install the drivers
`,
};
}
}
/**
* @summary The radix used by USB ID numbers
*/
const USB_ID_RADIX = 16;
/**
* @summary The expected length of a USB ID number
*/
const USB_ID_LENGTH = 4;
/**
* @summary Convert a USB id (e.g. product/vendor) to a string
*
* @example
* console.log(usbIdToString(2652))
* > '0x0a5c'
*/
function usbIdToString(id: number): string {
return `0x${padStart(id.toString(USB_ID_RADIX), USB_ID_LENGTH, '0')}`;
}
function updateDriveProgress(
drive: sdk.sourceDestination.UsbbootDrive,
progress: number,
) {
const drives = getDrives();
// @ts-ignore
const driveInMap = drives[drive.device];
if (driveInMap) {
// @ts-ignore
drives[drive.device] = { ...driveInMap, progress };
setDrives(drives);
}
}
/**
* @summary Product ID of BCM2708
*/
const USB_PRODUCT_ID_BCM2708_BOOT = 0x2763;
/**
* @summary Product ID of BCM2710
*/
const USB_PRODUCT_ID_BCM2710_BOOT = 0x2764;
/**
* @summary Compute module descriptions
*/
const COMPUTE_MODULE_DESCRIPTIONS: Dictionary<string> = {
[USB_PRODUCT_ID_BCM2708_BOOT]: 'Compute Module 1',
[USB_PRODUCT_ID_BCM2710_BOOT]: 'Compute Module 3',
};
const startScanning = () => {
driveScanner.on('attach', (drive) => addDrive(drive));
driveScanner.on('detach', (drive) => removeDrive(drive));
driveScanner.start();
};
const stopScanning = () => {
driveScanner.stop();
};
export { startScanning, stopScanning };

View File

@@ -1,93 +0,0 @@
/** Get metadata for a source */
import { sourceDestination } from 'etcher-sdk';
import { replaceWindowsNetworkDriveLetter } from '../gui/app/os/windows-network-drives';
import axios, { AxiosRequestConfig } from 'axios';
import { isJson } from '../shared/utils';
import * as path from 'path';
import {
SourceMetadata,
Authentication,
Source,
} from '../shared/typings/source-selector';
import { DrivelistDrive } from '../shared/drive-constraints';
import { omit } from 'lodash';
function isString(value: any): value is string {
return typeof value === 'string';
}
async function createSource(
selected: string,
SourceType: Source,
auth?: Authentication,
) {
try {
selected = await replaceWindowsNetworkDriveLetter(selected);
} catch (error: any) {
// TODO: analytics.logException(error);
}
if (isJson(decodeURIComponent(selected))) {
const config: AxiosRequestConfig = JSON.parse(decodeURIComponent(selected));
return new sourceDestination.Http({
url: config.url!,
axiosInstance: axios.create(omit(config, ['url'])),
});
}
if (SourceType === 'File') {
return new sourceDestination.File({
path: selected,
});
}
return new sourceDestination.Http({ url: selected, auth });
}
async function getMetadata(
source: sourceDestination.SourceDestination,
selected: string | DrivelistDrive,
) {
const metadata = (await source.getMetadata()) as SourceMetadata;
const partitionTable = await source.getPartitionTable();
if (partitionTable) {
metadata.hasMBR = true;
metadata.partitions = partitionTable.partitions;
} else {
metadata.hasMBR = false;
}
if (isString(selected)) {
metadata.extension = path.extname(selected).slice(1);
metadata.path = selected;
}
return metadata;
}
async function getSourceMetadata(
selected: string | DrivelistDrive,
SourceType: Source,
auth?: Authentication,
) {
if (isString(selected)) {
const source = await createSource(selected, SourceType, auth);
try {
const innerSource = await source.getInnerSource();
const metadata = await getMetadata(innerSource, selected);
return metadata;
} catch (error: any) {
// TODO: handle error
} finally {
try {
await source.close();
} catch (error: any) {
// Noop
}
}
}
}
export { getSourceMetadata };

View File

@@ -1,33 +0,0 @@
import { Metadata } from 'etcher-sdk/build/source-destination';
import { SourceMetadata } from '../../shared/typings/source-selector';
import { Drive as DrivelistDrive } from 'drivelist';
export interface WriteResult {
bytesWritten?: number;
devices?: {
failed: number;
successful: number;
};
errors: FlashError[];
sourceMetadata?: Metadata;
}
export interface FlashError extends Error {
description: string;
device: string;
code: string;
}
export interface FlashResults extends WriteResult {
skip?: boolean;
cancelled?: boolean;
}
interface WriteOptions {
image: SourceMetadata;
destinations: DrivelistDrive[];
autoBlockmapping: boolean;
decompressFirst: boolean;
SourceType: string;
httpRequest?: any;
}

32837
npm-shrinkwrap.json generated

File diff suppressed because it is too large Load Diff

36371
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
"name": "balena-etcher",
"private": true,
"displayName": "balenaEtcher",
"version": "1.18.13",
"version": "1.18.6",
"packageType": "local",
"main": "generated/etcher.js",
"description": "Flash OS images to SD cards and USB drives, safely and easily.",
@@ -13,9 +13,7 @@
"url": "git@github.com:balena-io/etcher.git"
},
"scripts": {
"build": "npm run webpack && npm run build:sidecar",
"build:rebuild-mountutils": "cd node_modules/mountutils && npm rebuild",
"build:sidecar": "npm run build:rebuild-mountutils && tsc --project tsconfig.sidecar.json && pkg build/util/api.js -c pkg-sidecar.json --target node18 --output generated/etcher-util",
"build": "npm run webpack",
"flowzone-preinstall-linux": "sudo apt-get update && sudo apt-get install -y xvfb libudev-dev && cat < electron-builder.yml | yq e .deb.depends[] - | xargs -L1 echo | sed 's/|//g' | xargs -L1 sudo apt-get --ignore-missing install || true",
"flowzone-preinstall-macos": "true",
"flowzone-preinstall-windows": "npx node-gyp install",
@@ -51,9 +49,9 @@
"author": "Balena Ltd. <hello@balena.io>",
"license": "Apache-2.0",
"devDependencies": {
"@babel/register": "^7.22.15",
"@balena/lint": "5.4.2",
"@balena/sudo-prompt": "9.2.1-workaround-windows-amperstand-in-username-0849e215b947987a643fe5763902aea201255534",
"@electron/fuses": "^1.6.1",
"@electron/remote": "^2.0.9",
"@fortawesome/fontawesome-free": "5.15.4",
"@sentry/electron": "^4.1.2",
@@ -79,7 +77,7 @@
"css-loader": "5.2.7",
"d3": "4.13.0",
"debug": "4.3.4",
"electron": "^25.8.2",
"electron": "^19.1.9",
"electron-builder": "^23.6.0",
"electron-mocha": "^11.0.2",
"electron-notarize": "1.2.2",
@@ -100,7 +98,6 @@
"omit-deep-lodash": "1.1.7",
"outdent": "0.8.0",
"path-is-inside": "1.0.2",
"pkg": "^5.8.1",
"pnp-webpack-plugin": "1.7.0",
"pretty-bytes": "5.6.0",
"react": "16.8.5",
@@ -127,9 +124,9 @@
"webpack-dev-server": "4.11.1"
},
"engines": {
"node": ">=18 <20"
"node": ">=16"
},
"versionist": {
"publishedAt": "2023-10-16T13:32:27.552Z"
"publishedAt": "2023-03-21T13:24:18.905Z"
}
}

View File

@@ -1,10 +0,0 @@
{
"assets": [
"node_modules/usb/**",
"node_modules/lzma-native/**",
"node_modules/drivelist/**",
"node_modules/mountutils/**",
"node_modules/winusb-driver-generator/**",
"node_modules/node-raspberrypi-usbboot/**"
]
}

View File

@@ -1,182 +0,0 @@
/*
* This is a test wrapper for etcher-utils.
* The only use for this file is debugging while developing etcher-utils.
* It will create a IPC server, spawn the cli version of etcher-writer, and wait for it to connect.
* Requires elevated privileges to work (launch with sudo)
* Note that you'll need to to edit `ipc.server.on('ready', ...` function based on what you want to test.
*/
import * as ipc from 'node-ipc';
import * as os from 'os';
import * as path from 'path';
import * as packageJSON from './package.json';
import * as permissions from './lib/shared/permissions';
// if (process.argv.length !== 3) {
// console.error('Expects an image to flash as only arg!');
// process.exit(1);
// }
const THREADS_PER_CPU = 16;
// There might be multiple Etcher instances running at
// the same time, therefore we must ensure each IPC
// server/client has a different name.
const IPC_SERVER_ID = `etcher-server-${process.pid}`;
const IPC_CLIENT_ID = `etcher-client-${process.pid}`;
ipc.config.id = IPC_SERVER_ID;
ipc.config.socketRoot = path.join(
process.env.XDG_RUNTIME_DIR || os.tmpdir(),
path.sep,
);
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
function writerArgv(): string[] {
const entryPoint = path.join('./generated/etcher-util');
return [entryPoint];
}
function writerEnv() {
return {
IPC_SERVER_ID,
IPC_CLIENT_ID,
IPC_SOCKET_ROOT: ipc.config.socketRoot,
UV_THREADPOOL_SIZE: (os.cpus().length * THREADS_PER_CPU).toString(),
// This environment variable prevents the AppImages
// desktop integration script from presenting the
// "installation" dialog
SKIP: '1',
...(process.platform === 'win32' ? {} : process.env),
};
}
async function start(): Promise<any> {
ipc.serve();
return await new Promise((resolve, reject) => {
ipc.server.on('error', (message) => {
console.log('IPC server error', message);
});
ipc.server.on('log', (message) => {
console.log('log', message);
});
ipc.server.on('fail', ({ device, error }) => {
console.log('failure', error, device);
});
ipc.server.on('done', (event) => {
console.log('done', event);
});
ipc.server.on('abort', () => {
console.log('abort');
});
ipc.server.on('skip', () => {
console.log('skip');
});
ipc.server.on('state', (progress) => {
console.log('progress', progress);
});
ipc.server.on('drives', (drives) => {
console.log('drives', drives);
});
ipc.server.on('ready', (_data, socket) => {
console.log('ready');
ipc.server.emit(socket, 'scan', {});
// ipc.server.emit(socket, "hello", { message: "world" });
// ipc.server.emit(socket, "write", {
// image: {
// path: process.argv[2],
// displayName: "Random image for test",
// description: "Random image for test",
// SourceType: "File",
// },
// destinations: [
// {
// size: 15938355200,
// isVirtual: false,
// enumerator: "DiskArbitration",
// logicalBlockSize: 512,
// raw: "/dev/rdisk4",
// error: null,
// isReadOnly: false,
// displayName: "/dev/disk4",
// blockSize: 512,
// isSCSI: false,
// isRemovable: true,
// device: "/dev/disk4",
// busVersion: null,
// isSystem: false,
// busType: "USB",
// isCard: false,
// isUSB: true,
// devicePath:
// "IODeviceTree:/arm-io@10F00000/usb-drd1@2280000/usb-drd1-port-hs@01100000",
// mountpoints: [
// {
// path: "/Volumes/flash-rootB",
// label: "flash-rootB",
// },
// {
// path: "/Volumes/flash-rootA",
// label: "flash-rootA",
// },
// {
// path: "/Volumes/flash-boot",
// label: "flash-boot",
// },
// ],
// description: "Generic Flash Disk Media",
// isUAS: null,
// partitionTableType: "mbr",
// },
// ],
// SourceType: "File",
// autoBlockmapping: true,
// decompressFirst: true,
// });
});
const argv = writerArgv();
ipc.server.on('start', async () => {
console.log(`Elevating command: ${argv.join(' ')}`);
const env = writerEnv();
try {
await permissions.elevateCommand(argv, {
applicationName: packageJSON.displayName,
environment: env,
});
} catch (error: any) {
console.log('error', error);
// This happens when the child is killed using SIGKILL
const SIGKILL_EXIT_CODE = 137;
if (error.code === SIGKILL_EXIT_CODE) {
error.code = 'ECHILDDIED';
}
reject(error);
} finally {
console.log('Terminating IPC server');
}
resolve(true);
});
// Clear the update lock timer to prevent longer
// flashing timing it out, and releasing the lock
ipc.server.start();
});
}
start();

View File

@@ -1,18 +0,0 @@
{
"compilerOptions": {
"target": "ES2019",
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"typeRoots": ["./node_modules/@types", "./typings"],
"module": "CommonJS",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"outDir": "build"
},
"include": ["lib/util"]
}

View File

@@ -15,8 +15,12 @@
*/
import * as CopyPlugin from 'copy-webpack-plugin';
import { readdirSync } from 'fs';
import * as _ from 'lodash';
import * as os from 'os';
import outdent from 'outdent';
import * as path from 'path';
import { env } from 'process';
import * as SimpleProgressWebpackPlugin from 'simple-progress-webpack-plugin';
import * as TerserPlugin from 'terser-webpack-plugin';
import {
@@ -44,6 +48,24 @@ function externalPackageJson(packageJsonPath: string) {
};
}
function platformSpecificModule(
platform: string,
module: string,
replacement = '{}',
) {
// Resolves module on platform, otherwise resolves the replacement
return (
{ request }: { context: string; request: string },
callback: (error?: Error, result?: string, type?: string) => void,
) => {
if (request === module && os.platform() !== platform) {
callback(undefined, replacement);
return;
}
callback();
};
}
function renameNodeModules(resourcePath: string) {
// electron-builder excludes the node_modules folder even if you specifically include it
// Work around by renaming it to "modules"
@@ -52,11 +74,78 @@ function renameNodeModules(resourcePath: string) {
path
.relative(__dirname, resourcePath)
.replace('node_modules', 'modules')
// use the same name on all architectures so electron-builder can build a universal dmg on mac
.replace(LZMA_BINDINGS_FOLDER, LZMA_BINDINGS_FOLDER_RENAMED)
// file-loader expects posix paths, even on Windows
.replace(/\\/g, '/')
);
}
function findUsbPrebuild(): string[] {
const usbPrebuildsFolder = path.join('node_modules', 'usb', 'prebuilds');
const prebuildFolders = readdirSync(usbPrebuildsFolder);
let bindingFile: string | undefined = 'node.napi.node';
const platformFolder = prebuildFolders.find(
(f) => f.startsWith(os.platform()) && f.indexOf(os.arch()) > -1,
);
if (platformFolder === undefined) {
throw new Error(
'Could not find usb prebuild. Should try fallback to node-gyp and use /build/Release instead of /prebuilds',
);
}
const bindingFiles = readdirSync(
path.join(usbPrebuildsFolder, platformFolder),
);
if (!bindingFiles.length) {
throw new Error('Could not find usb prebuild for platform');
}
if (bindingFiles.length === 1) {
bindingFile = bindingFiles[0];
}
// armv6 vs v7 in linux-arm and
// glibc vs musl in linux-x64
if (bindingFiles.length > 1) {
bindingFile = bindingFiles.find((file) => {
if (bindingFiles.indexOf('arm') > -1) {
const process = require('process');
return file.indexOf(process.config.variables.arm_version) > -1;
} else {
return file.indexOf('glibc') > -1;
}
});
}
if (bindingFile === undefined) {
throw new Error('Could not find usb prebuild for platform');
}
return [platformFolder, bindingFile];
}
const [USB_BINDINGS_FOLDER, USB_BINDINGS_FILE] = findUsbPrebuild();
function findLzmaNativeBindingsFolder(): string {
const files = readdirSync(
path.join('node_modules', 'lzma-native', 'prebuilds'),
);
const bindingsFolder = files.find(
(f) =>
f.startsWith(os.platform()) &&
f.endsWith(env.npm_config_target_arch || os.arch()),
);
if (bindingsFolder === undefined) {
throw new Error('Could not find lzma_native binding');
}
return bindingsFolder;
}
const LZMA_BINDINGS_FOLDER = findLzmaNativeBindingsFolder();
const LZMA_BINDINGS_FOLDER_RENAMED = 'binding';
interface ReplacementRule {
search: string;
replace: string | (() => string);
@@ -133,10 +222,94 @@ const commonConfig = {
search: './adapters/xhr',
replace: './adapters/http',
}),
// remove bindings magic from drivelist
replace(
/node_modules\/drivelist\/js\/index\.js$/,
{
search: 'require("bindings");',
replace: "require('../build/Release/drivelist.node')",
},
{
search: "bindings('drivelist')",
replace: 'bindings',
},
),
replace(
/node_modules\/lzma-native\/index\.js$/,
// remove node-pre-gyp magic from lzma-native
{
search: `require('node-gyp-build')(__dirname);`,
replace: `require('./prebuilds/${LZMA_BINDINGS_FOLDER}/electron.napi.node')`,
},
// use regular stream module instead of readable-stream
{
search: "var stream = require('readable-stream');",
replace: "var stream = require('stream');",
},
),
// remove node-pre-gyp magic from usb
replace(/node_modules\/usb\/dist\/usb\/bindings\.js$/, {
search: `require('node-gyp-build')(path_1.join(__dirname, '..', '..'));`,
replace: `require('../../prebuilds/${USB_BINDINGS_FOLDER}/${USB_BINDINGS_FILE}')`,
}),
// remove bindings magic from mountutils
replace(/node_modules\/mountutils\/index\.js$/, {
search: outdent`
require('bindings')({
bindings: 'MountUtils',
/* eslint-disable camelcase */
module_root: __dirname
/* eslint-enable camelcase */
})
`,
replace: "require('./build/Release/MountUtils.node')",
}),
// remove bindings magic from winusb-driver-generator
replace(/node_modules\/winusb-driver-generator\/index\.js$/, {
search: outdent`
require('bindings')({
bindings: 'Generator',
/* eslint-disable camelcase */
module_root: __dirname
/* eslint-enable camelcase */
});
`,
replace: "require('./build/Release/Generator.node')",
}),
replace(/node_modules\/node-raspberrypi-usbboot\/build\/index\.js$/, {
search:
"return await readFile(Path.join(__dirname, '..', 'blobs', filename));",
replace: outdent`
const remote = require('@electron/remote');
return await readFile(
Path.join(
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
remote.app.getAppPath().replace(/\\.asar$/, () => process.platform === 'darwin' ? '-' + process.arch : ''),
'generated',
__dirname.replace('node_modules', 'modules'),
'..',
'blobs',
filename
)
);
`,
}),
// Copy native modules to generated folder
{
test: /\.node$/,
use: [
{
loader: 'native-addon-loader',
options: { name: renameNodeModules },
},
],
},
],
},
resolve: {
extensions: ['.js', '.json', '.ts', '.tsx'],
extensions: ['.node', '.js', '.json', '.ts', '.tsx'],
},
plugins: [
PnpWebpackPlugin,
@@ -168,9 +341,32 @@ const commonConfig = {
externals: [
// '../package.json' because we are in 'generated'
externalPackageJson('../package.json'),
// Only exists on windows
platformSpecificModule('win32', 'winusb-driver-generator'),
// Not needed but required by resin-corvus > os-locale > execa > cross-spawn
platformSpecificModule('none', 'spawn-sync'),
// Not needed as we replace all requires for it
platformSpecificModule('none', 'node-pre-gyp', '{ find: () => {} }'),
// Not needed as we replace all requires for it
platformSpecificModule('none', 'bindings'),
],
};
const guiConfigCopyPatterns = [
{
from: 'node_modules/node-raspberrypi-usbboot/blobs',
to: 'modules/node-raspberrypi-usbboot/blobs',
},
];
if (os.platform() === 'win32') {
// liblzma.dll is required on Windows for lzma-native
guiConfigCopyPatterns.push({
from: `node_modules/lzma-native/prebuilds/${LZMA_BINDINGS_FOLDER}/liblzma.dll`,
to: `modules/lzma-native/prebuilds/${LZMA_BINDINGS_FOLDER_RENAMED}/liblzma.dll`,
});
}
const guiConfig = {
...commonConfig,
target: 'electron-renderer',
@@ -181,6 +377,7 @@ const guiConfig = {
entry: {
gui: path.join(__dirname, 'lib', 'gui', 'app', 'renderer.ts'),
},
// entry: path.join(__dirname, 'lib', 'gui', 'app', 'renderer.ts'),
plugins: [
...commonConfig.plugins,
new CopyPlugin({
@@ -196,6 +393,7 @@ const guiConfig = {
banner: '__REACT_DEVTOOLS_GLOBAL_HOOK__ = { isDisabled: true };',
raw: true,
}),
new CopyPlugin({ patterns: guiConfigCopyPatterns }),
],
};
@@ -215,4 +413,17 @@ const etcherConfig = {
},
};
export default [guiConfig, etcherConfig];
const childWriterConfig = {
...mainConfig,
entry: {
'child-writer': path.join(
__dirname,
'lib',
'gui',
'modules',
'child-writer.ts',
),
},
};
export default [guiConfig, etcherConfig, childWriterConfig];