Compare commits

..

1 Commits

Author SHA1 Message Date
Edwin Joassart
ec7e0b745e patch: send sourcemap to sentry at build 2023-01-12 17:46:00 +01:00
61 changed files with 19730 additions and 35339 deletions

View File

@@ -15,7 +15,7 @@ inputs:
default: "accounts+apple@balena.io"
NODE_VERSION:
type: string
default: "18.x"
default: "14.x"
VERBOSE:
type: string
default: "true"
@@ -31,17 +31,10 @@ runs:
path: ${{ runner.temp }}
- name: Extract custom source artifact
if: runner.os != 'Windows'
shell: pwsh
working-directory: .
run: tar -xf ${{ runner.temp }}/custom.tgz
- name: Extract custom source artifact
if: runner.os == 'Windows'
shell: pwsh
working-directory: .
run: C:\"Program Files"\Git\usr\bin\tar.exe --force-local -xf ${{ runner.temp }}\custom.tgz
- name: Setup Node.js
uses: actions/setup-node@v3
with:
@@ -53,6 +46,55 @@ runs:
run: choco install yq
if: runner.os == 'Windows'
# FIXME: resinci-deploy is not actively maintained
# https://github.com/product-os/resinci-deploy
- name: Checkout resinci-deploy
uses: actions/checkout@v3
with:
repository: product-os/resinci-deploy
token: ${{ fromJSON(inputs.secrets).FLOWZONE_TOKEN }}
path: resinci-deploy
- name: Build and install resinci-deploy
shell: bash --noprofile --norc -eo pipefail -x {0}
run: |
set -ea
[[ '${{ inputs.VERBOSE }}' =~ on|On|Yes|yes|true|True ]] && set -x
runner_os="$(echo "${RUNNER_OS}" | tr '[:upper:]' '[:lower:]')"
rm -rf ../resinci-deploy && mv resinci-deploy ..
pushd ../resinci-deploy && npm ci && npm link && popd
if [[ $runner_os =~ linux|macos ]]; then
chmod +x "$(dirname "$(which node)")/resinci-deploy" && which resinci-deploy
fi
# Upload sourcemaps to sentry
- name: Generate Sentry DSN
id: sentry
shell: bash --noprofile --norc -eo pipefail -x {0}
run: |
set -ea
[[ '${{ inputs.VERBOSE }}' =~ on|On|Yes|yes|true|True ]] && set -x
branch="$(echo '${{ github.event.pull_request.head.ref }}' | sed 's/[^[:alnum:]]/-/g')"
stdout="$(resinci-deploy store sentry \
--branch="${branch}" \
--name="$(jq -r '.name' package.json)" \
--team="$(yq e '.sentry.team' repo.yml)" \
--org="$(yq e '.sentry.org' repo.yml)" \
--type="$(yq e '.sentry.type' repo.yml)")"
echo "dsn=$(echo "${stdout}" | tail -n 1)" >> $GITHUB_OUTPUT
env:
SENTRY_TOKEN: ${{ fromJSON(inputs.secrets).SENTRY_AUTH_TOKEN }}
# https://www.electron.build/code-signing.html
# https://github.com/Apple-Actions/import-codesign-certs
- name: Import Apple code signing certificate
@@ -126,8 +168,8 @@ runs:
for target in ${TARGETS}; do
electron-builder ${ELECTRON_BUILDER_OS} ${target} ${ARCHITECTURE_FLAGS} \
--c.extraMetadata.analytics.sentry.token='https://739bbcfc0ba4481481138d3fc831136d@o95242.ingest.sentry.io/4504451487301632' \
--c.extraMetadata.analytics.amplitude.token='balena-etcher' \
--c.extraMetadata.analytics.sentry.token='${{ steps.sentry.outputs.dsn }}' \
--c.extraMetadata.analytics.mixpanel.token='balena-etcher' \
--c.extraMetadata.packageType="${target}"
find dist -type f -maxdepth 1
@@ -161,6 +203,16 @@ runs:
-name "latest*.yml" \
-exec yq -i e .stagingPercentage=\"$percentage\" {} \;
- name: Upload sourcemap to Sentry
shell: bash --noprofile --norc -eo pipefail -x {0}
run: |
VERSION=${{ steps.package_release.outputs.version }} npm run uploadSourcemap
env:
SENTRY_AUTH_TOKEN: ${{ fromJSON(inputs.secrets).SENTRY_AUTH_TOKEN }}
npm_config_SENTRY_ORG: balenaEtcher
npm_config_SENTRY_PROJECT: balenaetcher
npm_config_SENTRY_VERSION: ${{ steps.package_release.outputs.version }}
- name: Upload artifacts
uses: actions/upload-artifact@v3
with:

View File

@@ -12,7 +12,7 @@ inputs:
# --- custom environment
NODE_VERSION:
type: string
default: "16.x"
default: "14.x"
VERBOSE:
type: string
default: "true"
@@ -47,15 +47,9 @@ runs:
ELECTRON_NO_ATTACH_CONSOLE: true
- name: Compress custom source
if: runner.os != 'Windows'
shell: pwsh
run: tar -acf ${{ runner.temp }}/custom.tgz .
- name: Compress custom source
if: runner.os == 'Windows'
shell: pwsh
run: C:\"Program Files"\Git\usr\bin\tar.exe --force-local -acf ${{ runner.temp }}\custom.tgz .
- name: Upload custom artifact
uses: actions/upload-artifact@v3
with:

View File

@@ -1,4 +1,5 @@
name: Flowzone
on:
pull_request:
types: [opened, synchronize, closed]
@@ -7,6 +8,7 @@ on:
pull_request_target:
types: [opened, synchronize, closed]
branches: [main, master]
jobs:
flowzone:
name: Flowzone
@@ -21,4 +23,7 @@ jobs:
tests_run_on: '["ubuntu-20.04","macos-latest","windows-2019"]'
restrict_custom_actions: false
github_prerelease: true
cloudflare_website: "etcher"
repo_config: true
repo_description: "Flash OS images to SD cards & USB drives, safely and easily."
repo_homepage: https://etcher.io/
repo_enable_wiki: true

1
.gitignore vendored
View File

@@ -28,7 +28,6 @@ pids
# Generated files
/generated
/binaries
# Dependency directory
# https://docs.npmjs.com/misc/faq#should-i-check-my-node-modules-folder-into-git

2
.nvmrc
View File

@@ -1 +1 @@
16
14

View File

@@ -1,485 +1,3 @@
- commits:
- subject: "patch: upgrade to electron 25"
hash: f38bca290fe26121bed58d1131265e1aa350ddb5
body: ""
footer: {}
author: Edwin Joassart
nested: []
- subject: "patch: refactor scanner, loader and flasher out of gui + upgrade to
electron 25"
hash: fb8ed5b529e22bc9e766bfe99c2b6955ed695b58
body: ""
footer: {}
author: Edwin Joassart
nested: []
version: 1.18.13
title: ""
date: 2023-10-16T13:32:26.738Z
- commits:
- subject: Update instructions for installing deb file
hash: acab03ad77a1c1901d0c8a65999e93c1d27169a0
body: ""
footer:
Change-type: patch
change-type: patch
author: Jorge Capona
nested: []
version: 1.18.12
title: ""
date: 2023-07-19T10:24:22.407Z
- commits:
- subject: "fix: prevent stealing window focus from auth dialog"
hash: f716c74ef7cb164b4d825828e4e46033484ad9af
body: ""
footer:
Change-type: patch
change-type: patch
author: leadpogrommer
nested: []
version: 1.18.11
title: ""
date: 2023-07-13T14:31:40.021Z
- commits:
- subject: "spelling: validates"
hash: 06d246e3fd1c573b9e04d23ab3bc3c4036fb9859
body: ""
footer:
Change-type: patch
change-type: patch
Signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
author: Josh Soref
nested: []
- subject: "spelling: undefined"
hash: 67b26a5b69f819066c6419d3d915846b63fdbcf0
body: ""
footer:
Change-type: patch
change-type: patch
Signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
author: Josh Soref
nested: []
- subject: "spelling: except if"
hash: b4b9db7ffa2104c19e7bd079e4f394a817f40bc0
body: ""
footer:
Change-type: patch
change-type: patch
Signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
signed-off-by: Josh Soref <jsoref@users.noreply.github.com>
author: Josh Soref
nested: []
version: 1.18.10
title: ""
date: 2023-07-12T11:21:59.231Z
- commits:
- subject: Fix opening links from within SafeWebView
hash: 497bb0e2cbefad3e9a1188ee5df49cf61f6bd6e4
body: ""
footer:
Change-type: patch
change-type: patch
author: Akis Kesoglou
nested: []
version: 1.18.9
title: ""
date: 2023-07-12T09:07:17.666Z
- commits:
- subject: "Patch: Fix Support link"
hash: 882b385c88111a192e5f37e20c1c8aeca9950b21
body: ""
footer: {}
author: Oliver Plummer
nested: []
version: 1.18.8
title: ""
date: 2023-04-26T09:57:46.155Z
- commits:
- subject: "patch: update docs to remove cloudsmith install instructions for linux"
hash: 02a406711852cf237e41da4cd39350d8acc1f0b0
body: ""
footer: {}
author: Edwin Joassart
nested: []
version: 1.18.7
title: ""
date: 2023-04-25T15:25:35.584Z
- commits:
- subject: add-flash-with-etcher-to-docs
hash: 856b426dc98925f5e339976a5cac144f4bb4ea59
body: ""
footer:
Change-type: patch
change-type: patch
author: Lizzie Epton
nested: []
version: 1.18.6
title: ""
date: 2023-03-21T13:24:18.265Z
- commits:
- subject: "patch: add apt-get update in flowzone preinstall"
hash: 0d9ac710880e6b9413b09e4c35a505034d1e9d51
body: libudev package has changed and cannot be installed if we not update apt
cache
footer: {}
author: Edwin Joassart
nested: []
version: 1.18.5
title: ""
date: 2023-03-09T11:30:34.540Z
- commits:
- subject: "patch: bump etcher-sdk to 8.3.1"
hash: bf0360e7f46ac620f95021e0c48a3a04d302e725
body: ""
footer: {}
author: JOASSART Edwin
nested: []
version: 1.18.4
title: ""
date: 2023-03-02T17:31:31.788Z
- commits:
- subject: fix-typo
hash: 496f131c4b024dfcd17fde5173016f70c0d0599c
body: ""
footer:
Change-type: patch
change-type: patch
author: Lizzie Epton
nested: []
- subject: edits-to-info-about-efp
hash: f582b0215c2cf66acf652afdaa47353e1a7eac07
body: ""
footer:
Change-type: patch
change-type: patch
author: Lizzie Epton
nested: []
- subject: Add reference to etcher-efp in publishing.md
hash: 4c3c4babea5efdadbed7ba0df85f08b68a7b6f20
body: |
Add reference to etcher-efp in publishing.md
footer:
Change-type: patch
change-type: patch
author: Edwin Joassart
nested: []
version: 1.18.3
title: ""
date: 2023-02-22T12:12:40.270Z
- commits:
- subject: "patch: organize docs"
hash: e479b95d72bed6a50ae6a971598a18d8a7562f0d
body: ""
footer: {}
author: mcraa
nested: []
- subject: "patch: actualized develop guide"
hash: 926ff2b7549d8b187b18ee452ce48c62f6cd3531
body: ""
footer: {}
author: mcraa
nested: []
- subject: "patch: updated commit message guide"
hash: 394b64319de11b1010b8acfe160de13a6f3851cd
body: ""
footer: {}
author: mcraa
nested: []
- subject: add-item-from-FAQs
hash: 96fa53b6ee4ec7a29522df488b927074c0f301ca
body: ""
footer:
Change-type: patch
change-type: patch
author: Lizzie Epton
nested: []
- subject: "patch: removed gt characters from contributing guide"
hash: 9b54e2af0b9356bb73e197cccbcc2ff89673361f
body: ""
footer: {}
author: mcraa
nested: []
- subject: "patch: added docosaurus site name"
hash: b01cf3c2e1c3a7a234c8b957bd570ecdca81e0c1
body: ""
footer: {}
author: mcraa
nested: []
version: 1.18.2
title: ""
date: 2023-02-21T13:17:09.606Z
- commits:
- subject: "patch: use @electron/remote for locating rpiboot files"
hash: 04fa3dcd8c619dce927221cef5799b5210354d2e
body: ""
footer: {}
author: mcraa
nested: []
version: 1.18.1
title: ""
date: 2023-02-15T14:54:45.951Z
- commits:
- subject: Update to Electron 19
hash: c11db0a2797a6b1093dd3fa6f55bee5f100c6da4
body: ""
footer:
Change-type: minor
change-type: minor
author: Akis Kesoglou
nested: []
- subject: Remove Spectron and related (low-value) tests
hash: 6f7570d265e4b457afe832d00e5f45e0bf5a8a53
body: >
Spectron is long deprecated and abandoned and the browser tests are so
rudimentary that its no longer worth having them around. We will
introduce a proper browser-based test suite in the short term — its a
project in progress.
footer:
Change-type: minor
change-type: minor
author: Akis Kesoglou
nested: []
version: 1.18.0
title: ""
date: 2023-02-14T18:07:05.870Z
- commits:
- subject: Update to Electron 17 and Node 16
hash: 3c1dd6ce29ddf43ef35e58236d25713fa2026c10
body: |
This is the latest Electron version officially supported by Spectron.
footer:
Change-type: minor
change-type: minor
author: Akis Kesoglou
nested: []
version: 1.17.0
title: ""
date: 2023-02-14T16:18:54.834Z
- commits:
- subject: Update to Electron 14
hash: df7854111a901b620e3284edf10768d308ce7755
body: ""
footer:
Change-type: minor
change-type: minor
author: Akis Kesoglou
nested: []
version: 1.16.0
title: ""
date: 2023-02-14T12:40:40.820Z
- commits:
- subject: "patch: app: i18n: Translation: Update zh-TW strings * Improve
translate. * Sync layout with English strings ts file."
hash: b51418814f5ef48d09e3157c92bda5eab173dbd5
body: ""
footer:
Signed-off-by: Edward Wu <bluehome.wu@gmail.com>
signed-off-by: Edward Wu <bluehome.wu@gmail.com>
author: Edward Wu
nested: []
version: 1.15.6
title: ""
date: 2023-02-13T11:23:13.079Z
- commits:
- subject: revert auto-update feature
hash: e6d33eda2b8f767679a43f8056e20098b0f2f6d9
body: ""
footer:
Change-type: patch
change-type: patch
author: JOASSART Edwin
nested: []
version: 1.15.5
title: ""
date: 2023-02-03T14:25:19.611Z
- commits:
- subject: Switch to `@electron/remote`
hash: 7ee174edcecbfc2d7370db6d4185b3ee4eedbe28
body: >
Electron 12 deprecated `electron.remote` and the functionality was
removed in Electron 14, but became available as a separate
`@electron/remote` module. This commit makes the transition to the
external module as an intermediary step to enable updating to a newer
Electron version.
footer:
Change-type: patch
change-type: patch
author: Akis Kesoglou
nested: []
version: 1.15.4
title: ""
date: 2023-02-02T18:26:45.877Z
- commits:
- subject: move EFP & success-banner to efp.balena.io
hash: a140faaebe087a96387604f12c3510ee22374d92
body: ""
footer:
Change-type: patch
change-type: patch
author: Edwin Joassart
nested: []
version: 1.15.3
title: ""
date: 2023-02-02T17:23:16.454Z
- commits:
- subject: Remove configuration remote update
hash: 85a49a221fa7fc9b1943dc8ed43b29995f9d8260
body: ""
footer:
Change-type: patch
change-type: patch
author: Edwin Joassart
nested: []
version: 1.15.2
title: ""
date: 2023-02-02T13:05:01.310Z
- commits:
- subject: Remove redundant resinci-deploy build step
hash: 48ddafd120cc9cd4fb94c0d6f7530a14be46f28d
body: ""
footer:
Change-type: patch
change-type: patch
author: Akis Kesoglou
nested: []
- subject: Lazily import Electron from child-writer process
hash: 851219f835ed037d9fd970f538095e4b339c5342
body: >
No idea how this *used* to work, but it doesnt since 887ec428 and this
is fixing it properly.
footer:
Change-type: patch
change-type: patch
author: Akis Kesoglou
nested: []
version: 1.15.1
title: ""
date: 2023-02-01T12:18:55.922Z
- commits:
- subject: Add support for Node 18
hash: 887ec42847acbd4a935b4e9ed6abb2b8d87058ce
body: >
The Electron version were currently using is on Node 14 but this is a
step forward to upgrading to a newer Electron and Node version.
Updates etcher-sdk and switches the redundant aws4-axios dependency to just axios.
Also changed bundler to stop trying to bundle wasm files — they must be included inline with JS code as data — and removed some now redundant code.
The crucial changes that enable support are:
1. The update to etcher-sdk@8 where some dependency fixes and updates took place
2. The downgrade and pinning of "electron-rebuild" to v3.2.3 until were able to update to Electron >= 14.2. The patch we need to avoid is https://github.com/electron/rebuild/pull/907. Also see: https://github.com/nodejs/node-gyp/issues/2673 and https://github.com/electron/rebuild/issues/913
3. A rule in webpack.config to ignore `aws-crt` which is a dependency of (ultimately) `aws4-axios` which is used by etcher-sdk and does a runtime check to its availability. Were not currently using the “assume role” functionality (AFAIU) of aws4-axios and we dont care that its not found, so force webpack to ignore the import. See https://github.com/aws/aws-sdk-js-v3/issues/3025
footer:
Change-type: minor
change-type: minor
author: Akis Kesoglou
nested: []
version: 1.15.0
title: ""
date: 2023-01-27T11:36:32.980Z
- commits:
- subject: "patch: fixed mac sudo on other languages"
hash: 19d1e093fc2b1588492c9868f7604ee15ab3fd5b
body: ""
footer: {}
author: Peter Makra
nested: []
version: 1.14.3
title: ""
date: 2023-01-19T12:21:02.651Z
- commits:
- subject: "patch: revert to lockfile v1"
hash: 72af77860bee3685635c9f4db602c2a07e825037
body: ""
footer: {}
author: Peter Makra
nested: []
- subject: "patch: update etcher-sdk for cm4v5"
hash: 8e63be2efecada2ad6abd9d9d7728859e2c30ebc
body: ""
footer:
Change-type: patch
change-type: patch
author: builder555
nested: []
version: 1.14.2
title: ""
date: 2023-01-17T14:37:41.555Z
- commits:
- subject: fix disabled-screensaver unhandled exception outside balena-electron env
hash: 46c406e8c1e3b5e41890aff7f65b1711e4426782
body: ""
footer:
Change-type: patch
change-type: patch
author: Edwin Joassart
nested: []
version: 1.14.1
title: ""
date: 2023-01-16T13:22:36.972Z
- commits:
- subject: Anonymizes all paths before sending
hash: 86d43a536f7c9aa6b450a9f2f90341e07364208e
body: ""
footer:
Change-type: patch
change-type: patch
author: Otávio Jacobi
nested: []
- subject: "patch: Sentry fix path"
hash: 6c417e35a13873cd95d25f42a819de3750cdf65d
body: ""
footer: {}
author: Edwin Joassart
nested: []
- subject: Remove personal path on etcher
hash: 2b728d3c521b76177a2c019b4891627272f35aac
body: ""
footer:
Change-type: minor
change-type: minor
author: Otávio Jacobi
nested: []
- subject: Unifying sentry reports in a single project
hash: f3f7ecb852503d4d97dbe6a78bf920ca177bddd1
body: ""
footer:
Change-type: patch
change-type: patch
author: Edwin Joassart
nested: []
- subject: Removes corvus in favor of sentry and analytics client
hash: 41fca03c98d4a72bd8c3842d7e6b9d41f65336f9
body: ""
footer:
Change-type: patch
change-type: patch
Signed-off-by: Otavio Jacobi
signed-off-by: Otavio Jacobi
author: Otávio Jacobi
nested: []
- subject: Removes corvus in favor of sentry and analytics client
hash: 10caf8f1b6a174762192b13ce7bb4eaa71e90fcc
body: ""
footer:
Change-type: patch
change-type: patch
Signed-off-by: Otavio Jacobi
signed-off-by: Otavio Jacobi
author: Otávio Jacobi
nested: []
version: 1.14.0
title: ""
date: 2023-01-16T11:23:54.866Z
- commits:
- subject: Adding EtcherPro device serial number to the Settings modal
hash: d25eda9a7d6bf89284b630b2d55cbb0a7e3a9432
@@ -1292,8 +810,7 @@
- subject: Fixing call to electron block screensaver methods invocation
hash: 1b5b64713505dfb69448bc2184839b4c23bd677b
body: >
Replacing `send` calls to `invoke` for `enable/disable-screensaver`
calls.
Replacing `send` calls to `invoke` for `enable/disable-screensaver` calls.
footer:
Change-type: patch
change-type: patch
@@ -2045,11 +1562,11 @@
- subject: Add support for basic auth when downloading images from URL.
hash: b2d0c1c9ddbbfe87d5a905d420d615821610e825
body: >
When selecting "Flash from URL" the user can optionally provide a
username and password for basic authentication. The authentication input
fields are collapsed by default. When the authentication input fields
are collapsed after entering values the values are cleared to ensure
that the user sees all parameter passed to the server.
When selecting "Flash from URL" the user can optionally provide a username
and password for basic authentication. The authentication input fields
are collapsed by default. When the authentication input fields are
collapsed after entering values the values are cleared to ensure that
the user sees all parameter passed to the server.
footer:
Change-Type: minor
change-type: minor
@@ -2606,8 +2123,7 @@
- subject: Ignore ENOENT errors on unlink in withTmpFile
hash: 7bb2a23c4e94dcda6a7b494fe0435c0b59b56b06
body: >
The temporary file might have been already deleted by
cleanupTmpFiles
The temporary file might have been already deleted by cleanupTmpFiles
footer:
Change-type: patch
change-type: patch
@@ -2677,8 +2193,7 @@
- subject: Pass strings between methods as std::string instead of char *
hash: 1ec6a8ffc4c9e138b78210f0db84a9ebd6c9182b
body: >
- Fixes "basic_string::_M_construct null not valid"
exception
- Fixes "basic_string::_M_construct null not valid" exception
aborting program, because WCharToUtf8() returned NULL
in some cases, and NULL was being fed to string constructor.
- Fixes memory leak because memory allocated with calloc()
@@ -4766,8 +4281,8 @@
change-type: patch
subject: Fixes the Command for macOS drive recovery
body: >-
Changes the documentation to update the disktutil command which didn't
fix my case, cause the boot partition was broken.
Changes the documentation to update the disktutil command which didn't fix
my case, cause the boot partition was broken.
This way it rewrites the drive into a FAT32 partition editable in Unix/Windows.
- hash: b3f25c176b1bdb487d1a7bf111d7f170fe008842
@@ -7696,8 +7211,7 @@
changelog-entry: Add support for configuration files
subject: "feat(gui): Add ability to read settings from a config file"
body: >-
This adds the capability to configure settings via a `.etcher.json`
file,
This adds the capability to configure settings via a `.etcher.json` file,
either in the user's home directory, or the current working directory.
@@ -7815,8 +7329,7 @@
change-type: patch
subject: "doc: Update MAINTAINERS.md with Symantec Whitelisting"
body: >-
This adds instructions for submitting Etcher for false positive
detection
This adds instructions for submitting Etcher for false positive detection
to Symantec Endpoint Protection.
- hash: bb2dac75040554c0ba2c7e50ff9ecd61608e7d38
@@ -7992,8 +7505,7 @@
change-type: patch
subject: "fix(image-writer): Remove use of _.isError"
body: >-
`_.isError()` returns `true` for anything that has a `name` and
`message`
`_.isError()` returns `true` for anything that has a `name` and `message`
property, causing the check here to always keep the plain object as error.
- hash: 355373f24df6be0989fad9429c2230166b33a3bf
@@ -8009,8 +7521,7 @@
change-type: patch
subject: "upgrade(package): Update drivelist 6.1.5 -> 6.1.7"
body: >-
This fixes a ReferenceError that could occur when the DeviceNode was
null,
This fixes a ReferenceError that could occur when the DeviceNode was null,
as well as devices being null when run after the system recovers from sleep / standby.
- hash: 6e7484d3dabc2aeaa7cd471822d7019860cc4a5c
@@ -8846,8 +8357,7 @@
changelog-entry: Remove stale `invalidKey` check in store.
subject: "refactor: remove stale invalid key check in store"
body: >-
We remove a piece of code checking whether `_.keys` returns any
non-string
We remove a piece of code checking whether `_.keys` returns any non-string
values in its array, but per the Lodash documentation `_.keys` always returns an
@@ -8875,8 +8385,7 @@
changelog-entry: Make the drive-selector button orange on warnings.
subject: "feat(GUI): warning makes drive-selector button orange"
body: >-
We make the drive-selector button orange when there is a warning
attached
We make the drive-selector button orange when there is a warning attached
to the image-drive pair.
- hash: 4ce89f97fe02d714ce7f247a6a03ad6d326c3a8a

View File

@@ -3,159 +3,6 @@
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](http://semver.org/).
# v1.18.13
## (2023-10-16)
* patch: upgrade to electron 25 [Edwin Joassart]
* patch: refactor scanner, loader and flasher out of gui + upgrade to electron 25 [Edwin Joassart]
# v1.18.12
## (2023-07-19)
* Update instructions for installing deb file [Jorge Capona]
# v1.18.11
## (2023-07-13)
* fix: prevent stealing window focus from auth dialog [leadpogrommer]
# v1.18.10
## (2023-07-12)
* spelling: validates [Josh Soref]
* spelling: undefined [Josh Soref]
* spelling: except if [Josh Soref]
# v1.18.9
## (2023-07-12)
* Fix opening links from within SafeWebView [Akis Kesoglou]
# v1.18.8
## (2023-04-26)
* Patch: Fix Support link [Oliver Plummer]
# v1.18.7
## (2023-04-25)
* patch: update docs to remove cloudsmith install instructions for linux [Edwin Joassart]
# v1.18.6
## (2023-03-21)
* add-flash-with-etcher-to-docs [Lizzie Epton]
# v1.18.5
## (2023-03-09)
* patch: add apt-get update in flowzone preinstall [Edwin Joassart]
# v1.18.4
## (2023-03-02)
* patch: bump etcher-sdk to 8.3.1 [JOASSART Edwin]
# v1.18.3
## (2023-02-22)
* fix-typo [Lizzie Epton]
* edits-to-info-about-efp [Lizzie Epton]
* Add reference to etcher-efp in publishing.md [Edwin Joassart]
# v1.18.2
## (2023-02-21)
* patch: organize docs [mcraa]
* patch: actualized develop guide [mcraa]
* patch: updated commit message guide [mcraa]
* add-item-from-FAQs [Lizzie Epton]
* patch: removed gt characters from contributing guide [mcraa]
* patch: added docosaurus site name [mcraa]
# v1.18.1
## (2023-02-15)
* patch: use @electron/remote for locating rpiboot files [mcraa]
# v1.18.0
## (2023-02-14)
* Update to Electron 19 [Akis Kesoglou]
* Remove Spectron and related (low-value) tests [Akis Kesoglou]
# v1.17.0
## (2023-02-14)
* Update to Electron 17 and Node 16 [Akis Kesoglou]
# v1.16.0
## (2023-02-14)
* Update to Electron 14 [Akis Kesoglou]
# v1.15.6
## (2023-02-13)
* patch: app: i18n: Translation: Update zh-TW strings * Improve translate. * Sync layout with English strings ts file. [Edward Wu]
# v1.15.5
## (2023-02-03)
* revert auto-update feature [JOASSART Edwin]
# v1.15.4
## (2023-02-02)
* Switch to `@electron/remote` [Akis Kesoglou]
# v1.15.3
## (2023-02-02)
* move EFP & success-banner to efp.balena.io [Edwin Joassart]
# v1.15.2
## (2023-02-02)
* Remove configuration remote update [Edwin Joassart]
# v1.15.1
## (2023-02-01)
* Remove redundant resinci-deploy build step [Akis Kesoglou]
* Lazily import Electron from child-writer process [Akis Kesoglou]
# v1.15.0
## (2023-01-27)
* Add support for Node 18 [Akis Kesoglou]
# v1.14.3
## (2023-01-19)
* patch: fixed mac sudo on other languages [Peter Makra]
# v1.14.2
## (2023-01-17)
* patch: revert to lockfile v1 [Peter Makra]
* patch: update etcher-sdk for cm4v5 [builder555]
# v1.14.1
## (2023-01-16)
* fix disabled-screensaver unhandled exception outside balena-electron env [Edwin Joassart]
# v1.14.0
## (2023-01-16)
* Anonymizes all paths before sending [Otávio Jacobi]
* patch: Sentry fix path [Edwin Joassart]
* Remove personal path on etcher [Otávio Jacobi]
* Unifying sentry reports in a single project [Edwin Joassart]
* Removes corvus in favor of sentry and analytics client [Otávio Jacobi]
* Removes corvus in favor of sentry and analytics client [Otávio Jacobi]
# v1.13.4
## (2023-01-12)

View File

@@ -44,9 +44,3 @@ Etcher requires an available [polkit authentication agent](https://wiki.archlinu
## May I run Etcher in older macOS versions?
Etcher GUI is based on the [Electron](http://electron.atom.io/) framework, [which only supports macOS 10.10 and newer versions](https://github.com/electron/electron/blob/master/docs/tutorial/support.md#supported-platforms).
## Can I use the Flash With Etcher button on my site?
You can use the Flash with Etcher button on your site or blog, if you have an OS that you want your users to be able to easily flash using Etcher, add the following code where you want to button to be:
`<a href="https://efp.balena.io/open-image-url?imageUrl=<your image URL>"><img src="http://balena.io/flash-with-etcher.png" /></a>`

134
README.md
View File

@@ -32,32 +32,128 @@ installers for all supported operating systems.
## Packages
> [![Hosted By: Cloudsmith](https://img.shields.io/badge/OSS%20hosting%20by-cloudsmith-blue?logo=cloudsmith&style=for-the-badge)](https://cloudsmith.com) \
Package repository hosting is graciously provided by [Cloudsmith](https://cloudsmith.com).
Cloudsmith is the only fully hosted, cloud-native, universal package management solution, that
enables your organization to create, store and share packages in any format, to any place, with total
confidence.
#### Debian and Ubuntu based Package Repository (GNU/Linux x86/x64)
Package for Debian and Ubuntu can be downloaded from the [Github release page](https://github.com/balena-io/etcher/releases/)
> Detailed or alternative steps in the [instructions by Cloudsmith](https://cloudsmith.io/~balena/repos/etcher/setup/#formats-deb)
##### Install .deb file using apt
1. Add Etcher Debian repository:
```sh
sudo apt install ./balena-etcher_******_amd64.deb
curl -1sLf \
'https://dl.cloudsmith.io/public/balena/etcher/setup.deb.sh' \
| sudo -E bash
```
2. Update and install:
```sh
sudo apt-get update
sudo apt-get install balena-etcher-electron
```
##### Uninstall
```sh
sudo apt remove balena-etcher
```
```sh
sudo apt-get remove balena-etcher-electron
rm /etc/apt/sources.list.d/balena-etcher.list
apt-get clean
rm -rf /var/lib/apt/lists/*
apt-get update
```
#### Redhat (RHEL) and Fedora-based Package Repository (GNU/Linux x86/x64)
##### Yum
> Detailed or alternative steps in the [instructions by Cloudsmith](https://cloudsmith.io/~balena/repos/etcher/setup/#formats-rpm)
Package for Fedora-based and Redhat can be downloaded from the [Github release page](https://github.com/balena-io/etcher/releases/)
1. Install using yum
##### DNF
1. Add Etcher rpm repository:
```sh
curl -1sLf \
'https://dl.cloudsmith.io/public/balena/etcher/setup.rpm.sh' \
| sudo -E bash
```
2. Update and install:
```sh
sudo dnf install -y balena-etcher-electron
```
###### Uninstall
```sh
sudo yum localinstall balena-etcher-***.x86_64.rpm
rm /etc/yum.repos.d/balena-etcher.repo
rm /etc/yum.repos.d/balena-etcher-source.repo
```
##### Yum
1. Add Etcher rpm repository:
```sh
curl -1sLf \
'https://dl.cloudsmith.io/public/balena/etcher/setup.rpm.sh' \
| sudo -E bash
```
2. Update and install:
```sh
sudo yum install -y balena-etcher-electron
```
###### Uninstall
```sh
sudo yum remove -y balena-etcher-electron
rm /etc/yum.repos.d/balena-etcher.repo
rm /etc/yum.repos.d/balena-etcher-source.repo
```
#### OpenSUSE LEAP & Tumbleweed install (zypper)
1. Add the repo
```sh
curl -1sLf \
'https://dl.cloudsmith.io/public/balena/etcher/setup.rpm.sh' \
| sudo -E bash
```
2. Update and install
```sh
sudo zypper up
sudo zypper install balena-etcher-electron
```
##### Uninstall
```sh
sudo zypper rm balena-etcher-electron
# remove the repo
sudo zypper rr balena-etcher
sudo zypper rr balena-etcher-source
```
#### Solus (GNU/Linux x64)
```sh
sudo eopkg it etcher
```
##### Uninstall
```sh
sudo eopkg rm etcher
```
#### Arch/Manjaro Linux (GNU/Linux x64)
@@ -74,20 +170,6 @@ yay -S balena-etcher
yay -R balena-etcher
```
#### WinGet (Windows)
This package is updated by [gh-action](https://github.com/vedantmgoyal2009/winget-releaser), and is kept up to date automatically.
```sh
winget install balenaEtcher #or Balena.Etcher
```
##### Uninstall
```sh
winget uninstall balenaEtcher
```
#### Chocolatey (Windows)
This package is maintained by [@majkinetor](https://github.com/majkinetor), and
@@ -116,9 +198,11 @@ the [license].
[etcher]: https://balena.io/etcher
[electron]: https://electronjs.org/
[electron-supported-platforms]: https://electronjs.org/docs/tutorial/support#supported-platforms
[support]: https://github.com/balena-io/etcher/blob/master/docs/SUPPORT.md
[support]: https://github.com/balena-io/etcher/blob/master/SUPPORT.md
[contributing]: https://github.com/balena-io/etcher/blob/master/docs/CONTRIBUTING.md
[user-documentation]: https://github.com/balena-io/etcher/blob/master/docs/USER-DOCUMENTATION.md
[milestones]: https://github.com/balena-io/etcher/milestones
[newissue]: https://github.com/balena-io/etcher/issues/new
[license]: https://github.com/balena-io/etcher/blob/master/LICENSE

View File

@@ -12,29 +12,67 @@ over the commit history.
- Be able to automatically reference relevant changes from a dependency
upgrade.
The guidelines are inspired by the [AngularJS git commit
guidelines][angular-commit-guidelines].
Commit structure
----------------
Each commit message needs to specify the semver-type. Which can be `patch|minor|major`.
See the [Semantic Versioning][semver] specification for a more detailed explanation of the meaning of these types.
See balena commit guidelines for more info about the whole commit structure.
Each commit message consists of a header, a body and a footer. The header has a
special format that includes a type, a scope and a subject.
```
<semver-type>: <subject>
```
or
```
<subject>
<type>(<scope>): <subject>
<BLANK LINE>
<details>
<body>
<BLANK LINE>
Change-Type: <semver-type>
<footer>
```
The subject should not contain more than 70 characters, including the type and
scope, and the body should be wrapped at 72 characters.
Type
----
Must be one of the following:
- `feat`: A new feature.
- `fix`: A bug fix.
- `minifix`: A minimal fix that doesn't warrant an entry in the CHANGELOG.
- `docs`: Documentation only changes.
- `style`: Changes that do not affect the meaning of the code (white-space,
formatting, missing semi-colons, JSDoc annotations, comments, etc).
- `refactor`: A code change that neither fixes a bug nor adds a feature.
- `perf`: A code change that improves performance.
- `test`: Adding missing tests.
- `chore`: Changes to the build process or auxiliary tools and libraries.
- `upgrade`: A version upgrade of a project dependency.
Scope
-----
The scope is required for types that make sense, such as `feat`, `fix`,
`test`, etc. Certain commit types, such as `chore` might not have a clearly
defined scope, in which case its better to omit it.
Subject
-------
The subject should contain a short description of the change:
- Use the imperative, present tense.
- Don't capitalize the first letter.
- No dot (.) at the end.
Footer
------
The footer contains extra information about the commit, such as tags.
**Breaking Changes** should start with the word BREAKING CHANGE: with a space
or two newlines. The rest of the commit message is then used for this.
Tags
----
@@ -83,4 +121,125 @@ Closes: https://github.com/balena-io/etcher/issues/XXX
Fixes: https://github.com/balena-io/etcher/issues/XXX
```
### `Change-Type: <type>`
This tag is used to determine the change type that a commit introduces. The
following types are supported:
- `major`
- `minor`
- `patch`
This tag can be omitted for commits that don't change the application from the
user's point of view, such as for refactoring commits.
Examples:
```
Change-Type: major
Change-Type: minor
Change-Type: patch
```
See the [Semantic Versioning][semver] specification for a more detailed
explanation of the meaning of these types.
### `Changelog-Entry: <message>`
This tag is used to describe the changes introduced by the commit in a more
human style that would fit the `CHANGELOG.md` better.
If the commit type is either `fix` or `feat`, the commit will take part in the
CHANGELOG. If this tag is not defined, then the commit subject will be used
instead.
You explicitly can use this tag to make a commit whose type is not `fix` nor
`feat` appear in the `CHANGELOG.md`.
Since whatever your write here will be shown *as it is* in the `CHANGELOG.md`,
take some time to write a decent entry. Consider the following guidelines:
- Use the imperative, present tense.
- Capitalize the first letter.
There is no fixed length limit for the contents of this tag, but always strive
to make as short as possible without compromising its quality.
Examples:
```
Changelog-Entry: Fix EPERM errors when flashing to a GPT drive.
```
Complete examples
-----------------
```
fix(GUI): ignore extensions before the first non-compressed extension
Currently, we extract all the extensions from an image path and report back
that the image is invalid if *any* of the extensions is not valid , however
this can cause trouble with images including information between dots that are
not strictly extensions, for example:
elementaryos-0.3.2-stable-i386.20151209.iso
Etcher will consider `20151209` to be an invalid extension and therefore
will prevent such image from being selected at all.
As a way to allow these corner cases but still make use of our enforced check
controls, the validation routine now only consider extensions starting from the
first non compressed extension.
Change-Type: patch
Changelog-Entry: Don't interpret image file name information between dots as image extensions.
Fixes: https://github.com/balena-io/etcher/issues/492
```
***
```
upgrade: etcher-image-write to v5.0.2
This version contains a fix to an `EPERM` issue happening to some Windows user,
triggered by the `write` system call during the first ~5% of a flash given that
the operating system still thinks the drive has a file system.
Change-Type: patch
Changelog-Entry: Upgrade `etcher-image-write` to v5.0.2.
Link: https://github.com/balena-io-modules/etcher-image-write/blob/master/CHANGELOG.md#502---2016-06-27
Fixes: https://github.com/balena-io/etcher/issues/531
```
***
```
feat(GUI): implement update notifier functionality
Auto-update functionality is not ready for usage. As a workaround to
prevent users staying with older versions, we now check for updates at
startup, and if the user is not running the latest version, we present a
modal informing the user of the availiblity of a new version, and
provide a call to action to open the Etcher website in his web browser.
Extra features:
- The user can skip the update, and tell the program to delay the
notification for 7 days.
Misc changes:
- Center modal with flexbox, to allow more flexibility on the modal height.
interacting with the S3 server.
- Implement `ManifestBindService`, which now serves as a backend for the
`manifest-bind` directive to allow the directive's functionality to be
re-used by other services.
- Namespace checkbox styles that are specific to the settings page.
Change-Type: minor
Changelog-Entry: Check for updates and show a modal prompting the user to download the latest version.
Closes: https://github.com/balena-io/etcher/issues/396
```
[angular-commit-guidelines]: https://github.com/angular/angular.js/blob/master/CONTRIBUTING.md#commit
[semver]: http://semver.org

View File

@@ -17,11 +17,11 @@ Developing
#### Common
- [NodeJS](https://nodejs.org) (at least v16.11)
- [Python 3](https://www.python.org)
- [NodeJS](https://nodejs.org) (at least v6.11)
- [Python 2.7](https://www.python.org)
- [jq](https://stedolan.github.io/jq/)
- [curl](https://curl.haxx.se/)
- [npm](https://www.npmjs.com/)
- [npm](https://www.npmjs.com/) (version 6.7)
```sh
pip install -r requirements.txt
@@ -33,16 +33,16 @@ You might need to run this with `sudo` or administrator permissions.
- [NSIS v2.51](http://nsis.sourceforge.net/Main_Page) (v3.x won't work)
- Either one of the following:
- [Visual C++ 2019 Build Tools](https://visualstudio.microsoft.com/vs/features/cplusplus/) containing standalone compilers, libraries and scripts
- The [windows-build-tools](https://github.com/felixrieseberg/windows-build-tools#windows-build-tools) should be installed along with NodeJS
- [Visual Studio Community 2019](https://visualstudio.microsoft.com/vs/) (free) (other editions, like Professional and Enterprise, should work too)
**NOTE:** Visual Studio doesn't install C++ by default. You have to rerun the
- [Visual C++ 2015 Build Tools](http://landinghub.visualstudio.com/visual-cpp-build-tools) containing standalone compilers, libraries and scripts
- Install the [windows-build-tools](https://github.com/felixrieseberg/windows-build-tools) via npm with `npm install --global windows-build-tools`
- [Visual Studio Community 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48146) (free) (other editions, like Professional and Enterprise, should work too)
**NOTE:** Visual Studio 2015 doesn't install C++ by default. You have to rerun the
setup, select "Modify" and then check `Visual C++ -> Common Tools for Visual
C++` (see http://stackoverflow.com/a/31955339)
C++ 2015` (see http://stackoverflow.com/a/31955339)
- [MinGW](http://www.mingw.org)
You might need to `npm config set msvs_version 2019` for node-gyp to correctly detect
the version of Visual Studio you're using (in this example VS2019).
You might need to `npm config set msvs_version 2015` for node-gyp to correctly detect
the version of Visual Studio you're using (in this example VS2015).
The following MinGW packages are required:
@@ -61,7 +61,7 @@ as well.
#### Linux
- `libudev-dev` for libusb (for example install with `sudo apt install libudev-dev`, or on fedora `systemd-devel` contains the required package)
- `libudev-dev` for libusb (install with `sudo apt install libudev-dev` for example)
### Cloning the project
@@ -70,13 +70,28 @@ git clone --recursive https://github.com/balena-io/etcher
cd etcher
```
### Installing npm dependencies
**NOTE:** Please make use of the following command to install npm dependencies rather
than simply running `npm install` given that we need to do extra configuration
to make sure native dependencies are correctly compiled for Electron, otherwise
the application might not run successfully.
If you're on Windows, **run the command from the _Developer Command Prompt for
VS2015_**, to ensure all Visual Studio command utilities are available in the
`%PATH%`.
```sh
make electron-develop
```
### Running the application
#### GUI
```sh
# Build the GUI
npm run webpack #or npm run build
npm run webpack
# Start Electron
npm start
```
@@ -104,6 +119,10 @@ systems as they can before sending a pull request.
*The test suite is run automatically by CI servers when you send a pull
request.*
We also rely on various `make` targets to perform some common tasks:
- `make lint`: Run the linter.
- `make sass`: Compile SCSS files.
We make use of [EditorConfig] to communicate indentation, line endings and
other text editing default. We encourage you to install the relevant plugin in
@@ -113,7 +132,20 @@ process.
Updating a dependency
---------------------
- Commit *both* `package.json` and `package-lock.json`.
Given we use [npm shrinkwrap][shrinkwrap], we have to take extra steps to make
sure the `npm-shrinkwrap.json` file gets updated correctly when we update a
dependency.
Use the following steps to ensure everything goes flawlessly:
- Run `make electron-develop` to ensure you don't have extraneous dependencies
you might have brought during development, or you are running older
dependencies because you come from another branch or reference.
- Install the new version of the dependency. For example: `npm install --save
<package>@<version>`. This will update the `npm-shrinkwrap.json` file.
- Commit *both* `package.json` and `npm-shrinkwrap.json`.
Diffing Binaries
----------------

View File

@@ -8,14 +8,10 @@ Releasing
### Release Types
- **draft**: A continues snapshot of current master, made by the CI services
- **pre-release** (default): A continues snapshot of current master, made by the CI services
- **release**: Full releases
Draft release is created from each PR, tagged with the branch name.
All merged PR will generate a new tag/version as a *pre-release*.
Mark the pre-release as final when it is necessary, then distribute the packages in alternative channels as necessary.
- **snapshot** (default): A continues snapshot of current master, made by the CI services
- **production**: Full releases
### Flight Plan
#### Preparation
@@ -35,10 +31,11 @@ Mark the pre-release as final when it is necessary, then distribute the packages
- [Post release note to forums](https://forums.balena.io/c/etcher)
- [Submit Windows binaries to Symantec for whitelisting](#submitting-binaries-to-symantec)
- [Update the website](https://github.com/balena-io/etcher-homepage)
- Wait 2-3 hours for analytics (Sentry, Amplitude) to trickle in and check for elevated error rates, or regressions
- Wait 2-3 hours for analytics (Sentry, Mixpanel) to trickle in and check for elevated error rates, or regressions
- If regressions arise; pull the release, and release a patched version, else:
- [Upload deb & rpm packages to Cloudfront](#uploading-packages-to-cloudfront)
- Post changelog with `#release-notes` tag on internal chat
- [Upload deb & rpm packages to Bintray](#uploading-packages-to-bintray)
- [Upload build artifacts to Amazon S3](#uploading-binaries-to-amazon-s3)
- Post changelog with `#release-notes` tag on Flowdock
- If this release packs noteworthy major changes:
- Write a blog post about it, and / or
- Write about it to the Etcher mailing list
@@ -51,7 +48,7 @@ Make sure to set the analytics tokens when generating production release binarie
```bash
export ANALYTICS_SENTRY_TOKEN="xxxxxx"
export ANALYTICS_AMPLITUDE_TOKEN="xxxxxx"
export ANALYTICS_MIXPANEL_TOKEN="xxxxxx"
```
#### Linux
@@ -60,16 +57,46 @@ export ANALYTICS_AMPLITUDE_TOKEN="xxxxxx"
**NOTE:** Make sure to adjust the path as necessary (here the Etcher repository has been cloned to `/home/$USER/code/etcher`)
```bash
./scripts/build/docker/run-command.sh -r x64 -s . -c "make distclean"
```
##### Generating artifacts
The artifacts are generated by the CI and published as draft-release or pre-release.
`electron-builder` is used to create the packaged application.
```bash
# x64
# Build Debian packages
./scripts/build/docker/run-command.sh -r x64 -s . -c "make electron-develop && make RELEASE_TYPE=production electron-installer-debian"
# Build RPM packages
./scripts/build/docker/run-command.sh -r x64 -s . -c "make electron-develop && make RELEASE_TYPE=production electron-installer-redhat"
# Build AppImages
./scripts/build/docker/run-command.sh -r x64 -s . -c "make electron-develop && make RELEASE_TYPE=production electron-installer-appimage"
# x86
# Build Debian packages
./scripts/build/docker/run-command.sh -r x86 -s . -c "make electron-develop && make RELEASE_TYPE=production electron-installer-debian"
# Build RPM packages
./scripts/build/docker/run-command.sh -r x86 -s . -c "make electron-develop && make RELEASE_TYPE=production electron-installer-redhat"
# Build AppImages
./scripts/build/docker/run-command.sh -r x86 -s . -c "make electron-develop && make RELEASE_TYPE=production electron-installer-appimage"
```
#### Mac OS
**ATTENTION:** For production releases you'll need the code-signing key,
and set `CSC_NAME` to generate signed binaries on Mac OS.
```bash
make electron-develop
# Build the zip
make RELEASE_TYPE=production electron-installer-app-zip
# Build the dmg
make RELEASE_TYPE=production electron-installer-dmg
```
#### Windows
**ATTENTION:** For production releases you'll need the code-signing key,
@@ -78,10 +105,38 @@ and set `CSC_LINK`, and `CSC_KEY_PASSWORD` to generate signed binaries on Window
**NOTE:**
- Keep in mind to also generate artifacts for x86, with `TARGET_ARCH=x86`.
```bash
make electron-develop
### Uploading packages to Cloudfront
# Build the Portable version
make RELEASE_TYPE=production electron-installer-portable
# Build the Installer
make RELEASE_TYPE=production electron-installer-nsis
```
Log in to cloudfront and upload the `rpm` and `deb` files.
### Uploading packages to Bintray
```bash
export BINTRAY_USER="username@account"
export BINTRAY_API_KEY="youruserapikey"
```
```bash
./scripts/publish/bintray.sh -c "etcher" -t "production" -v "1.2.1" -o "etcher" -p "debian" -y "debian" -r "x64" -f "dist/etcher-electron_1.2.1_amd64.deb"
./scripts/publish/bintray.sh -c "etcher" -t "production" -v "1.2.1" -o "etcher" -p "debian" -y "debian" -r "x86" -f "dist/etcher-electron_1.2.1_i386.deb"
./scripts/publish/bintray.sh -c "etcher" -t "production" -v "1.2.1" -o "etcher" -p "redhat" -y "redhat" -r "x64" -f "dist/etcher-electron-1.2.1.x86_64.rpm"
./scripts/publish/bintray.sh -c "etcher" -t "production" -v "1.2.1" -o "etcher" -p "redhat" -y "redhat" -r "x86" -f "dist/etcher-electron-1.2.1.i686.rpm"
```
### Uploading binaries to Amazon S3
```bash
export S3_KEY="..."
```
```bash
./scripts/publish/aws-s3.sh -b "balena-production-downloads" -v "1.2.1" -p "etcher" -f "dist/<filename>"
```
### Dealing with a Problematic Release

View File

@@ -112,4 +112,4 @@ Analytics
- [ ] Disable analytics, open DevTools Network pane or a packet sniffer, and
check that no request is sent
- [ ] **Disable analytics, refresh application from DevTools (using Cmd-R or
F5), and check that initial events are not sent to Amplitude**
F5), and check that initial events are not sent to Mixpanel**

View File

@@ -7,9 +7,44 @@ systems.
Release Types
-------------
Etcher supports **pre-release** and **final** release types as does Github. Each is
published to Github releases.
The release version is generated automatically from the commit messasges.
Etcher supports **production** and **snapshot** release types. Each is
published to a different S3 bucket, and production release types are code
signed, while snapshot release types aren't and include a short git commit-hash
as a build number. For example, `1.0.0-beta.19` is a production release type,
while `1.0.0-beta.19+531ab82` is a snapshot release type.
In terms of comparison: `1.0.0-beta.19` (production) < `1.0.0-beta.19+531ab82`
(snapshot) < `1.0.0-rc.1` (production) < `1.0.0-rc.1+7fde24a` (snapshot) <
`1.0.0` (production) < `1.0.0+2201e5f` (snapshot). Keep in mind that if you're
running a production release type, you'll only be prompted to update to
production release types, and if you're running a snapshot release type, you'll
only be prompted to update to other snapshot release types.
The build system creates (and publishes) snapshot release types by default, but
you can build a specific release type by setting the `RELEASE_TYPE` make
variable. For example:
```sh
make <target> RELEASE_TYPE=snapshot
make <target> RELEASE_TYPE=production
```
We can control the version range a specific Etcher version will consider when
showing the update notification dialog by tweaking the `updates.semverRange`
property of `package.json`.
Update Channels
---------------
Etcher has a setting to include the unstable update channel. If this option is
set, Etcher will consider both stable and unstable versions when showing the
update notifier dialog. Unstable versions are the ones that contain a `beta`
pre-release tag. For example:
- Production unstable version: `1.4.0-beta.1`
- Snapshot unstable version: `1.4.0-beta.1+7fde24a`
- Production stable version: `1.4.0`
- Snapshot stable version: `1.4.0+7fde24a`
Signing
-------
@@ -38,19 +73,63 @@ Packaging
The resulting installers will be saved to `dist/out`.
Run the following commands on all platforms with the right arguments:
Run the following commands:
### OS X
```sh
./node_modules/electron-builder build <...>
make electron-installer-dmg
make electron-installer-app-zip
```
### GNU/Linux
Publishing to Cloudfront
```sh
make electron-installer-appimage
make electron-installer-debian
```
### Windows
```sh
make electron-installer-zip
make electron-installer-nsis
```
Publishing to Bintray
---------------------
We publish GNU/Linux Debian packages to [Cloudfront][cloudfront].
We publish GNU/Linux Debian packages to [Bintray][bintray].
Log in to cloudfront and upload the `rpm` and `deb` files.
Make sure you set the following environment variables:
- `BINTRAY_USER`
- `BINTRAY_API_KEY`
Run the following command:
```sh
make publish-bintray-debian
```
Publishing to S3
----------------
- [AWS CLI][aws-cli]
Make sure you have the [AWS CLI tool][aws-cli] installed and configured to
access balena.io's production or snapshot S3 bucket.
Run the following command to publish all files for the current combination of
_platform_ and _arch_ (building them if necessary):
```sh
make publish-aws-s3
```
Also add links to each AWS S3 file in [GitHub Releases][github-releases]. See
[`v1.0.0-beta.17`](https://github.com/balena-io/etcher/releases/tag/v1.0.0-beta.17)
as an example.
Publishing to Homebrew Cask
---------------------------
@@ -68,12 +147,8 @@ Post messages to the [Etcher forum][balena-forum-etcher] announcing the new vers
of Etcher, and including the relevant section of the Changelog.
[aws-cli]: https://aws.amazon.com/cli
[cloudfront]: https://cloudfront.com
[bintray]: https://bintray.com
[etcher-cask-file]: https://github.com/caskroom/homebrew-cask/blob/master/Casks/balenaetcher.rb
[homebrew-cask]: https://github.com/caskroom/homebrew-cask
[balena-forum-etcher]: https://forums.balena.io/c/etcher
[github-releases]: https://github.com/balena-io/etcher/releases
Updating EFP / Success-Banner
-----------------------------
Etcher Featured Project is automatically run based on an algorithm which promoted projects from the balena marketplace which have been contributed by the community, the algorithm prioritises projects which give uses the best experience. Editing both EFP and the Etcher Success-Banner can only be done by someone from balena, instruction are on the [Etcher-EFP repo (private)](https://github.com/balena-io/etcher-efp)

View File

@@ -3,11 +3,6 @@ Etcher User Documentation
This document contains how-tos and FAQs oriented to Etcher users.
Config
------
Etcher's configuration is saved to the `config.json` file in the apps folder.
Not all the options are surfaced to the UI. You may edit this file to tweak settings even before launching the app.
Why is my drive not bootable?
-----------------------------
@@ -223,5 +218,3 @@ macOS 10.10 (Yosemite) and newer versions][electron-supported-platforms].
[unetbootin]: https://unetbootin.github.io
[windows-iot-dashboard]: https://developer.microsoft.com/en-us/windows/iot/downloads
[woeusb]: https://github.com/slacka/WoeUSB
See [PUBLISHING](/docs/PUBLISHING.md) for more details about release types.

View File

@@ -14,11 +14,5 @@
<true/>
<key>com.apple.security.network.client</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
<key>com.apple.security.get-task-allow</key>
<true/>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
</dict>
</plist>

View File

@@ -15,31 +15,30 @@
*/
import * as electron from 'electron';
import * as remote from '@electron/remote';
import { debounce, capitalize, Dictionary, values } from 'lodash';
import * as sdk from 'etcher-sdk';
import * as _ from 'lodash';
import outdent from 'outdent';
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import { v4 as uuidV4 } from 'uuid';
import * as packageJSON from '../../../package.json';
import { DrivelistDrive } from '../../shared/drive-constraints';
import { DrivelistDrive, isSourceDrive } from '../../shared/drive-constraints';
import * as EXIT_CODES from '../../shared/exit-codes';
import * as messages from '../../shared/messages';
import * as availableDrives from './models/available-drives';
import * as flashState from './models/flash-state';
import { deselectImage, getImage } from './models/selection-state';
import * as settings from './models/settings';
import { Actions, observe, store } from './models/store';
import * as analytics from './modules/analytics';
import { startApiAndSpawnChild } from './modules/api';
import { scanner as driveScanner } from './modules/drive-scanner';
import * as exceptionReporter from './modules/exception-reporter';
import * as osDialog from './os/dialog';
import * as windowProgress from './os/window-progress';
import MainPage from './pages/main/MainPage';
import './css/main.css';
import * as i18next from 'i18next';
import { promises } from 'dns';
import { SourceMetadata } from '../../shared/typings/source-selector';
window.addEventListener(
'unhandledrejection',
@@ -89,7 +88,7 @@ analytics.logEvent('Application start', {
version: currentVersion,
});
const debouncedLog = debounce(console.log, 1000, { maxWait: 1000 });
const debouncedLog = _.debounce(console.log, 1000, { maxWait: 1000 });
function pluralize(word: string, quantity: number) {
return `${quantity} ${word}${quantity === 1 ? '' : 's'}`;
@@ -115,7 +114,7 @@ observe(() => {
// might cause some non-sense flashing state logs including
// `undefined` values.
debouncedLog(outdent({ newline: ' ' })`
${capitalize(currentFlashState.type)}
${_.capitalize(currentFlashState.type)}
${active},
${currentFlashState.percentage}%
at
@@ -128,43 +127,174 @@ observe(() => {
`);
});
function setDrives(drives: Dictionary<DrivelistDrive>) {
// prevent setting drives while flashing otherwise we might lose some while we unmount them
if (!flashState.isFlashing()) {
availableDrives.setDrives(values(drives));
/**
* @summary The radix used by USB ID numbers
*/
const USB_ID_RADIX = 16;
/**
* @summary The expected length of a USB ID number
*/
const USB_ID_LENGTH = 4;
/**
* @summary Convert a USB id (e.g. product/vendor) to a string
*
* @example
* console.log(usbIdToString(2652))
* > '0x0a5c'
*/
function usbIdToString(id: number): string {
return `0x${_.padStart(id.toString(USB_ID_RADIX), USB_ID_LENGTH, '0')}`;
}
/**
* @summary Product ID of BCM2708
*/
const USB_PRODUCT_ID_BCM2708_BOOT = 0x2763;
/**
* @summary Product ID of BCM2710
*/
const USB_PRODUCT_ID_BCM2710_BOOT = 0x2764;
/**
* @summary Compute module descriptions
*/
const COMPUTE_MODULE_DESCRIPTIONS: _.Dictionary<string> = {
[USB_PRODUCT_ID_BCM2708_BOOT]: 'Compute Module 1',
[USB_PRODUCT_ID_BCM2710_BOOT]: 'Compute Module 3',
};
async function driveIsAllowed(drive: {
devicePath: string;
device: string;
raw: string;
}) {
const driveBlacklist = (await settings.get('driveBlacklist')) || [];
return !(
driveBlacklist.includes(drive.devicePath) ||
driveBlacklist.includes(drive.device) ||
driveBlacklist.includes(drive.raw)
);
}
type Drive =
| sdk.sourceDestination.BlockDevice
| sdk.sourceDestination.UsbbootDrive
| sdk.sourceDestination.DriverlessDevice;
function prepareDrive(drive: Drive) {
if (drive instanceof sdk.sourceDestination.BlockDevice) {
// @ts-ignore (BlockDevice.drive is private)
return drive.drive;
} else if (drive instanceof sdk.sourceDestination.UsbbootDrive) {
// This is a workaround etcher expecting a device string and a size
// @ts-ignore
drive.device = drive.usbDevice.portId;
drive.size = null;
// @ts-ignore
drive.progress = 0;
drive.disabled = true;
drive.on('progress', (progress) => {
updateDriveProgress(drive, progress);
});
return drive;
} else if (drive instanceof sdk.sourceDestination.DriverlessDevice) {
const description =
COMPUTE_MODULE_DESCRIPTIONS[
drive.deviceDescriptor.idProduct.toString()
] || 'Compute Module';
return {
device: `${usbIdToString(
drive.deviceDescriptor.idVendor,
)}:${usbIdToString(drive.deviceDescriptor.idProduct)}`,
displayName: 'Missing drivers',
description,
mountpoints: [],
isReadOnly: false,
isSystem: false,
disabled: true,
icon: 'warning',
size: null,
link: 'https://www.raspberrypi.com/documentation/computers/compute-module.html#flashing-the-compute-module-emmc',
linkCTA: 'Install',
linkTitle: 'Install missing drivers',
linkMessage: outdent`
Would you like to download the necessary drivers from the Raspberry Pi Foundation?
This will open your browser.
Once opened, download and run the installer from the "Windows Installer" section to install the drivers
`,
};
}
}
// Spwaning the child process without privileges to get the drives list
// TODO: clean up this mess of exports
export let requestMetadata: any;
function setDrives(drives: _.Dictionary<DrivelistDrive>) {
availableDrives.setDrives(_.values(drives));
}
// start the api and spawn the child process
startApiAndSpawnChild({
withPrivileges: false,
}).then(({ emit, registerHandler }) => {
// start scanning
emit('scan');
function getDrives() {
return _.keyBy(availableDrives.getDrives(), 'device');
}
// make the sourceMetada awaitable to be used on source selection
requestMetadata = async (params: any): Promise<SourceMetadata> => {
emit('sourceMetadata', JSON.stringify(params));
async function addDrive(drive: Drive) {
const preparedDrive = prepareDrive(drive);
if (!(await driveIsAllowed(preparedDrive))) {
return;
}
const drives = getDrives();
drives[preparedDrive.device] = preparedDrive;
setDrives(drives);
}
return new Promise((resolve) =>
registerHandler('sourceMetadata', (data: any) => {
resolve(JSON.parse(data));
}),
);
};
function removeDrive(drive: Drive) {
if (
drive instanceof sdk.sourceDestination.BlockDevice &&
// @ts-ignore BlockDevice.drive is private
isSourceDrive(drive.drive, getImage())
) {
// Deselect the image if it was on the drive that was removed.
// This will also deselect the image if the drive mountpoints change.
deselectImage();
}
const preparedDrive = prepareDrive(drive);
const drives = getDrives();
delete drives[preparedDrive.device];
setDrives(drives);
}
registerHandler('drives', (data: any) => {
setDrives(JSON.parse(data));
});
function updateDriveProgress(
drive: sdk.sourceDestination.UsbbootDrive,
progress: number,
) {
const drives = getDrives();
// @ts-ignore
const driveInMap = drives[drive.device];
if (driveInMap) {
// @ts-ignore
drives[drive.device] = { ...driveInMap, progress };
setDrives(drives);
}
}
driveScanner.on('attach', addDrive);
driveScanner.on('detach', removeDrive);
driveScanner.on('error', (error) => {
// Stop the drive scanning loop in case of errors,
// otherwise we risk presenting the same error over
// and over again to the user, while also heavily
// spamming our error reporting service.
driveScanner.stop();
return exceptionReporter.report(error);
});
let popupExists = false;
driveScanner.start();
analytics.initAnalytics();
let popupExists = false;
window.addEventListener('beforeunload', async (event) => {
if (!flashState.isFlashing() || popupExists) {
@@ -195,8 +325,8 @@ window.addEventListener('beforeunload', async (event) => {
});
// This circumvents the 'beforeunload' event unlike
// remote.app.quit() which does not.
remote.process.exit(EXIT_CODES.SUCCESS);
// electron.remote.app.quit() which does not.
electron.remote.process.exit(EXIT_CODES.SUCCESS);
}
analytics.logEvent('Close rejected while flashing', {

View File

@@ -43,7 +43,7 @@ function restart(goToMain: () => void) {
async function getSuccessBannerURL() {
return (
(await settings.get('successBannerURL')) ??
'https://efp.balena.io/success-banner?borderTop=false&darkBackground=true'
'https://www.balena.io/etcher/success-banner?borderTop=false&darkBackground=true'
);
}

View File

@@ -139,9 +139,8 @@ export function FlashResults({
};
} & FlexProps) {
const [showErrorsInfo, setShowErrorsInfo] = React.useState(false);
const allFailed = !skip && results?.devices?.successful === 0;
const someFailed = results?.devices?.failed !== 0 || errors?.length !== 0;
const allFailed = !skip && results.devices.successful === 0;
const someFailed = results.devices.failed !== 0 || errors.length !== 0;
const effectiveSpeed = bytesToMegabytes(getEffectiveSpeed(results)).toFixed(
1,
);

View File

@@ -15,7 +15,6 @@
*/
import * as electron from 'electron';
import * as remote from '@electron/remote';
import * as _ from 'lodash';
import * as React from 'react';
@@ -95,11 +94,10 @@ export class SafeWebview extends React.PureComponent<
);
this.entryHref = url.href;
// Events steal 'this'
this.handleDomReady = _.bind(this.handleDomReady, this);
this.didFailLoad = _.bind(this.didFailLoad, this);
this.didGetResponseDetails = _.bind(this.didGetResponseDetails, this);
// Make a persistent electron session for the webview
this.session = remote.session.fromPartition(ELECTRON_SESSION, {
this.session = electron.remote.session.fromPartition(ELECTRON_SESSION, {
// Disable the cache for the session such that new content shows up when refreshing
cache: false,
});
@@ -122,8 +120,6 @@ export class SafeWebview extends React.PureComponent<
ref={this.webviewRef}
partition={ELECTRON_SESSION}
style={style}
// @ts-ignore
allowpopups="true"
/>
);
}
@@ -137,8 +133,8 @@ export class SafeWebview extends React.PureComponent<
this.didFailLoad,
);
this.webviewRef.current.addEventListener(
'dom-ready',
this.handleDomReady,
'new-window',
SafeWebview.newWindow,
);
this.webviewRef.current.addEventListener(
'console-message',
@@ -160,8 +156,8 @@ export class SafeWebview extends React.PureComponent<
this.didFailLoad,
);
this.webviewRef.current.removeEventListener(
'dom-ready',
this.handleDomReady,
'new-window',
SafeWebview.newWindow,
);
this.webviewRef.current.removeEventListener(
'console-message',
@@ -171,15 +167,6 @@ export class SafeWebview extends React.PureComponent<
this.session.webRequest.onCompleted(null);
}
handleDomReady() {
const webview = this.webviewRef.current;
if (webview == null) {
return;
}
const id = webview.getWebContentsId();
electron.ipcRenderer.send('webview-dom-ready', id);
}
// Set the element state to hidden
public didFailLoad() {
this.setState({
@@ -196,10 +183,7 @@ export class SafeWebview extends React.PureComponent<
// only care about this event if it's a request for the main frame
if (event.resourceType === 'mainFrame') {
const HTTP_OK = 200;
const { webContents, ...webviewEvent } = event;
analytics.logEvent('SafeWebview loaded', {
...webviewEvent,
});
analytics.logEvent('SafeWebview loaded', { event });
this.setState({
shouldShow: event.statusCode === HTTP_OK,
});
@@ -208,4 +192,17 @@ export class SafeWebview extends React.PureComponent<
}
}
}
// Open link in browser if it's opened as a 'foreground-tab'
public static async newWindow(event: electron.NewWindowEvent) {
const url = new window.URL(event.url);
if (
(url.protocol === 'http:' || url.protocol === 'https:') &&
event.disposition === 'foreground-tab' &&
// Don't open links if they're disabled by the env var
!(await settings.get('disableExternalLinks'))
) {
electron.shell.openExternal(url.href);
}
}
}

View File

@@ -20,13 +20,13 @@ import LinkSvg from '@fortawesome/fontawesome-free/svgs/solid/link.svg';
import ExclamationTriangleSvg from '@fortawesome/fontawesome-free/svgs/solid/exclamation-triangle.svg';
import ChevronDownSvg from '@fortawesome/fontawesome-free/svgs/solid/chevron-down.svg';
import ChevronRightSvg from '@fortawesome/fontawesome-free/svgs/solid/chevron-right.svg';
import { sourceDestination } from 'etcher-sdk';
import { ipcRenderer, IpcRendererEvent } from 'electron';
import { uniqBy, isNil } from 'lodash';
import * as _ from 'lodash';
import { GPTPartition, MBRPartition } from 'partitioninfo';
import * as path from 'path';
import * as prettyBytes from 'pretty-bytes';
import * as React from 'react';
import { requestMetadata } from '../../app';
import {
Flex,
ButtonProps,
@@ -47,7 +47,7 @@ import { observe } from '../../models/store';
import * as analytics from '../../modules/analytics';
import * as exceptionReporter from '../../modules/exception-reporter';
import * as osDialog from '../../os/dialog';
import { replaceWindowsNetworkDriveLetter } from '../../os/windows-network-drives';
import {
ChangeButton,
DetailsText,
@@ -64,12 +64,8 @@ import ImageSvg from '../../../assets/image.svg';
import SrcSvg from '../../../assets/src.svg';
import { DriveSelector } from '../drive-selector/drive-selector';
import { DrivelistDrive } from '../../../../shared/drive-constraints';
import axios, { AxiosRequestConfig } from 'axios';
import { isJson } from '../../../../shared/utils';
import {
SourceMetadata,
Authentication,
Source,
} from '../../../../shared/typings/source-selector';
import * as i18next from 'i18next';
const recentUrlImagesKey = 'recentUrlImages';
@@ -87,7 +83,7 @@ function normalizeRecentUrlImages(urls: any[]): URL[] {
}
})
.filter((url) => url !== undefined);
urls = uniqBy(urls, (url) => url.href);
urls = _.uniqBy(urls, (url) => url.href);
return urls.slice(urls.length - 5);
}
@@ -305,6 +301,24 @@ const FlowSelector = styled(
}
`;
export type Source =
| typeof sourceDestination.File
| typeof sourceDestination.BlockDevice
| typeof sourceDestination.Http;
export interface SourceMetadata extends sourceDestination.Metadata {
hasMBR?: boolean;
partitions?: MBRPartition[] | GPTPartition[];
path: string;
displayName: string;
description: string;
SourceType: Source;
drive?: DrivelistDrive;
extension?: string;
archiveExtension?: string;
auth?: Authentication;
}
interface SourceSelectorProps {
flashing: boolean;
}
@@ -322,6 +336,11 @@ interface SourceSelectorState {
imageLoading: boolean;
}
interface Authentication {
username: string;
password: string;
}
export class SourceSelector extends React.Component<
SourceSelectorProps,
SourceSelectorState
@@ -362,11 +381,43 @@ export class SourceSelector extends React.Component<
this.setState({ imageLoading: true });
await this.selectSource(
imagePath,
isURL(this.normalizeImagePath(imagePath)) ? 'Http' : 'File',
isURL(this.normalizeImagePath(imagePath))
? sourceDestination.Http
: sourceDestination.File,
).promise;
this.setState({ imageLoading: false });
}
private async createSource(
selected: string,
SourceType: Source,
auth?: Authentication,
) {
try {
selected = await replaceWindowsNetworkDriveLetter(selected);
} catch (error: any) {
analytics.logException(error);
}
if (isJson(decodeURIComponent(selected))) {
const config: AxiosRequestConfig = JSON.parse(
decodeURIComponent(selected),
);
return new sourceDestination.Http({
url: config.url!,
axiosInstance: axios.create(_.omit(config, ['url'])),
});
}
if (SourceType === sourceDestination.File) {
return new sourceDestination.File({
path: selected,
});
}
return new sourceDestination.Http({ url: selected, auth });
}
public normalizeImagePath(imgPath: string) {
const decodedPath = decodeURIComponent(imgPath);
if (isJson(decodedPath)) {
@@ -395,10 +446,11 @@ export class SourceSelector extends React.Component<
},
promise: (async () => {
const sourcePath = isString(selected) ? selected : selected.device;
let source;
let metadata: SourceMetadata | undefined;
if (isString(selected)) {
if (
SourceType === 'Http' &&
SourceType === sourceDestination.Http &&
!isURL(this.normalizeImagePath(selected))
) {
this.handleError(
@@ -418,14 +470,24 @@ export class SourceSelector extends React.Component<
},
});
}
source = await this.createSource(selected, SourceType, auth);
if (cancelled) {
return;
}
try {
// this will send an event down the ipcMain asking for metadata
// we'll get the response through an event
const innerSource = await source.getInnerSource();
if (cancelled) {
return;
}
metadata = await this.getMetadata(innerSource, selected);
if (cancelled) {
return;
}
metadata.SourceType = SourceType;
metadata = await requestMetadata({ selected, SourceType, auth });
if (!metadata?.hasMBR && this.state.warning === null) {
if (!metadata.hasMBR && this.state.warning === null) {
analytics.logEvent('Missing partition table', { metadata });
this.setState({
warning: {
@@ -441,6 +503,12 @@ export class SourceSelector extends React.Component<
messages.error.openSource(sourcePath, error.message),
error,
);
} finally {
try {
await source.close();
} catch (error: any) {
// Noop
}
}
} else {
if (selected.partitionTableType === null) {
@@ -457,14 +525,13 @@ export class SourceSelector extends React.Component<
displayName: selected.displayName,
description: selected.displayName,
size: selected.size as SourceMetadata['size'],
SourceType: 'BlockDevice',
SourceType: sourceDestination.BlockDevice,
drive: selected,
};
}
if (metadata !== undefined) {
metadata.auth = auth;
metadata.SourceType = SourceType;
selectionState.selectSource(metadata);
analytics.logEvent('Select image', {
// An easy way so we can quickly identify if we're making use of
@@ -498,6 +565,25 @@ export class SourceSelector extends React.Component<
analytics.logEvent(title, { path: sourcePath });
}
private async getMetadata(
source: sourceDestination.SourceDestination,
selected: string | DrivelistDrive,
) {
const metadata = (await source.getMetadata()) as SourceMetadata;
const partitionTable = await source.getPartitionTable();
if (partitionTable) {
metadata.hasMBR = true;
metadata.partitions = partitionTable.partitions;
} else {
metadata.hasMBR = false;
}
if (isString(selected)) {
metadata.extension = path.extname(selected).slice(1);
metadata.path = selected;
}
return metadata;
}
private async openImageSelector() {
analytics.logEvent('Open image selector');
this.setState({ imageSelectorOpen: true });
@@ -510,7 +596,7 @@ export class SourceSelector extends React.Component<
analytics.logEvent('Image selector closed');
return;
}
await this.selectSource(imagePath, 'File').promise;
await this.selectSource(imagePath, sourceDestination.File).promise;
} catch (error: any) {
exceptionReporter.report(error);
} finally {
@@ -521,7 +607,7 @@ export class SourceSelector extends React.Component<
private async onDrop(event: React.DragEvent<HTMLDivElement>) {
const [file] = event.dataTransfer.files;
if (file) {
await this.selectSource(file.path, 'File').promise;
await this.selectSource(file.path, sourceDestination.File).promise;
}
}
@@ -637,7 +723,7 @@ export class SourceSelector extends React.Component<
{i18next.t('cancel')}
</ChangeButton>
)}
{!isNil(imageSize) && !imageLoading && (
{!_.isNil(imageSize) && !imageLoading && (
<DetailsText>{prettyBytes(imageSize)}</DetailsText>
)}
</>
@@ -741,7 +827,7 @@ export class SourceSelector extends React.Component<
let promise;
({ promise, cancel: cancelURLSelection } = this.selectSource(
imageURL,
'Http',
sourceDestination.Http,
auth,
));
await promise;
@@ -764,7 +850,10 @@ export class SourceSelector extends React.Component<
if (originalList.length) {
const originalSource = originalList[0];
if (selectionImage?.drive?.device !== originalSource.device) {
this.selectSource(originalSource, 'BlockDevice');
this.selectSource(
originalSource,
sourceDestination.BlockDevice,
);
}
} else {
selectionState.deselectImage();
@@ -779,7 +868,7 @@ export class SourceSelector extends React.Component<
) {
return selectionState.deselectImage();
}
this.selectSource(drive, 'BlockDevice');
this.selectSource(drive, sourceDestination.BlockDevice);
}
}}
/>

View File

@@ -39,6 +39,4 @@ i18next.use(initReactI18next).init({
},
});
export const supportedLocales = ['en', 'zh'];
export default i18next;

View File

@@ -138,8 +138,7 @@ const translation = {
autoUpdate: 'Auto-updates enabled',
settings: 'Settings',
systemInformation: 'System Information',
trimExtPartitions:
'Trim unallocated space on raw images (in ext-type partitions)',
trimExtPartitions: 'Trim unallocated space on raw images (in ext-type partitions)',
},
menu: {
edit: 'Edit',

View File

@@ -1,33 +1,33 @@
const translation = {
translation: {
continue: '繼續',
ok: '好',
cancel: '取消',
continue: '繼續',
skip: '跳過',
sure: '我確定',
warning: '請注意!',
attention: '請注意',
failed: '失敗',
completed: '完',
completed: '完',
yesExit: '是的,可以退出',
reallyExit: '真的要現在退出 Etcher 嗎?',
yesContinue: '是的,繼續',
reallyExit: '真的要現在結束 Etcher 嗎?',
yesExit: '是的,可以結束',
progress: {
starting: '正在動……',
decompressing: '正在解壓……',
starting: '正在動……',
decompressing: '正在解壓……',
flashing: '正在燒錄……',
finishing: '正在結束……',
verifying: '正在驗證……',
failing: '失敗……',
},
message: {
sizeNotRecommended: '大小不建議',
sizeNotRecommended: '大小不推薦',
tooSmall: '空間太小',
locked: '被鎖定',
system: '系統',
containsImage: '存放來源映像檔',
largeDrive: '很大的磁',
sourceLarger: '所選的映像檔比目標磁碟大了 {{byte}} 位元組。',
system: '系統',
containsImage: '存放源鏡像',
largeDrive: '很大的磁',
sourceLarger: '所選的鏡像比目標大了 {{byte}} 比特。',
flashSucceed_one: '燒錄成功',
flashSucceed_other: '燒錄成功',
flashFail_one: '燒錄失敗',
@@ -39,73 +39,87 @@ const translation = {
andFailTarget_other: '並燒錄失敗了 {{num}} 個目標',
succeedTo: '{{name}} 被成功燒錄 {{target}}',
exitWhileFlashing:
'您前正在刷寫。關閉 Etcher 可能會導致您的磁無法使用。',
'您前正在刷機。 關閉 Etcher 可能會導致您的磁無法使用。',
looksLikeWindowsImage:
'看起來您正在嘗試錄 Windows 映像檔。\n\n與其他映像檔不同Windows 映像檔需要特殊處理才能使其可動。我們建議您使用專門此目的設計的工具,例如 <a href="https://rufus.akeo.ie">Rufus</a> (Windows)、<a href="https://github. com/slacka/WoeUSB">WoeUSB</a> (Linux) 或 Boot Camp 助理 (macOS)。',
image: '映像檔',
drive: '磁',
'看起來您正在嘗試錄 Windows 鏡像。\n\n與其他鏡像不同Windows 鏡像需要特殊處理才能使其可動。 我們建議您使用專門此目的設計的工具,例如 <a href="https://rufus.akeo.ie">Rufus</a> (Windows)、<a href="https://github. com/slacka/WoeUSB">WoeUSB</a> (Linux) 或 Boot Camp 助理 (macOS)。',
image: '鏡像',
drive: '磁',
missingPartitionTable:
'看起來這不是一個可動的{{type}}。\n\n這個{{type}}似乎不包含分表,因此您的設備可能無法識別或無法正確動。',
largeDriveSize:
'這是個很大容量的磁碟!請檢查並確認它不包含對您來說存放很重要的資料',
systemDrive: '選擇系統分割區很危險,因為這將會刪除你的系統',
sourceDrive: '來源映像檔位於這個分割區中',
noSpace: '磁碟空間不足。請插入另一個較大的磁碟並重試。',
'看起來這不是一個可動的{{type}}。\n\n這個{{type}}似乎不包含分表,因此您的設備可能無法識別或無法正確動。',
largeDriveSize: '這是個很大的磁盤!請檢查並確認它不包含對您很重要的信息',
systemDrive: '選擇系統盤很危險,因爲這將會刪除你的系統',
sourceDrive: '源鏡像位於這個分區中',
noSpace: '磁盤空間不足。 請插入另一個較大的磁盤並重試。',
genericFlashError:
'出了點問題。如果來源映像檔曾被壓縮過,請檢查它是否已損壞。\n{{error}}',
'出了點問題。如果源鏡像曾被壓縮過,請檢查它是否已損壞。\n{{error}}',
validation:
'寫入已成功完成,但 Etcher 在從磁讀取映像檔時檢測到潛在的損壞問題。\n\n請考慮將映像檔寫入其他磁。',
openError: '打開 {{source}} 時發生錯誤。\n\n錯誤息: {{error}}',
'寫入已成功完成,但 Etcher 在從磁讀取鏡像時檢測到潛在的損壞問題。 \n\n請考慮將鏡像寫入其他磁。',
openError: '打開 {{source}} 時出錯。\n\n錯誤息: {{error}}',
flashError: '燒錄 {{image}} {{targets}} 失敗。',
unplug:
'看起來 Etcher 失去了對磁的連接。是不是被意外拔掉了?\n\n有時這個錯誤是因讀卡器出了故障。',
'看起來 Etcher 失去了對磁的連接。是不是被意外拔掉了?\n\n有時這個錯誤是因讀卡器出了故障。',
cannotWrite:
'看起來 Etcher 無法寫入磁的這個位置。此錯誤通常是由故障的磁、讀取器或連接埠引起的。\n\n請使用其他磁、讀卡器或連接埠重試。',
'看起來 Etcher 無法寫入磁的這個位置。 此錯誤通常是由故障的磁、讀取器或端口引起的。 \n\n請使用其他磁、讀卡器或端口重試。',
childWriterDied:
'寫入處理程序意外崩潰。請再試一次,如果問題仍然存在,請聯 Etcher 團隊。',
badProtocol: '僅支 http:// 和 https:// 開頭的網址。',
'寫入進程意外崩潰。請再試一次,如果問題仍然存在,請聯 Etcher 團隊。',
badProtocol: '僅支 http:// 和 https:// 開頭的網址。',
},
target: {
selectTarget: '選擇目標磁',
plugTarget: '請插入目標磁',
selectTarget: '選擇目標磁',
plugTarget: '請插入目標磁',
targets: '個目標',
change: '更改',
},
menu: {
edit: '編輯',
view: '視圖',
devTool: '打開開發者工具',
window: '窗口',
help: '幫助',
pro: 'Etcher 專業版',
website: 'Etcher 的官網',
issue: '提交一個 issue',
about: '關於 Etcher',
hide: '隱藏 Etcher',
hideOthers: '隱藏其它窗口',
unhide: '取消隱藏',
quit: '退出 Etcher',
},
source: {
useSourceURL: '使用映像檔網址',
useSourceURL: '使用鏡像網絡地址',
auth: '驗證',
username: '輸入使用者名稱',
username: '輸入用戶名',
password: '輸入密碼',
unsupportedProtocol: '不支持的通訊協定',
windowsImage: '這可能是 Windows 系統映像檔',
partitionTable: '找不到分表',
errorOpen: '打開來源映像檔時出錯',
fromFile: '從檔案燒錄',
fromURL: '從址燒錄',
clone: '再製磁碟',
image: '映像檔訊息',
unsupportedProtocol: '不支持的協議',
windowsImage: '這可能是 Windows 系統鏡像',
partitionTable: '找不到分表',
errorOpen: '打開源鏡像時出錯',
fromFile: '從文件燒錄',
fromURL: '從在線地址燒錄',
clone: '克隆磁盤',
image: '鏡像信息',
name: '名稱:',
path: '路徑:',
selectSource: '選擇源',
plugSource: '請插入源磁',
osImages: '系統映像檔格式',
allFiles: '任何檔案格式',
enterValidURL: '請輸入正確的址',
selectSource: '選擇源',
plugSource: '請插入源磁',
osImages: '系統鏡像格式',
allFiles: '任何文件格式',
enterValidURL: '請輸入一個正確的址',
},
drives: {
name: '名稱',
size: '大小',
location: '位置',
find: '找到 {{length}} 個',
select: '選 {{select}}',
showHidden: '顯示 {{num}} 個隱藏的磁',
systemDriveDanger: '選擇系統分割區很危險,因這將會刪除你的系統!',
select: '選 {{select}}',
showHidden: '顯示 {{num}} 個隱藏的磁',
systemDriveDanger: '選擇系統很危險,因這將會刪除你的系統!',
openInBrowser: 'Etcher 會在瀏覽器中打開 {{link}}',
changeTarget: '改目標',
largeDriveWarning: '您即將格式化一個非常大的磁',
largeDriveWarningMsg: '您確定所選磁不是儲存資料的磁碟嗎?',
systemDriveWarning: '您將要格式化系統分割區',
systemDriveWarningMsg: '您確定要燒錄到系統分割區嗎?',
changeTarget: '改目標',
largeDriveWarning: '您即將擦除一個非常大的磁',
largeDriveWarningMsg: '您確定所選磁不是存儲磁盤嗎?',
systemDriveWarning: '您將要擦除系統盤',
systemDriveWarningMsg: '您確定要燒錄到系統嗎?',
},
flash: {
another: '燒錄另一目標',
@@ -115,38 +129,22 @@ const translation = {
flash: '燒錄',
flashNow: '現在燒錄!',
skip: '跳過了驗證',
moreInfo: '更多資訊',
moreInfo: '更多信息',
speedTip:
'過將映像檔大小除以燒錄時間來計算速度。\n由於我們能夠跳過未使用的部分因此具有 ext 分割區的磁碟映像檔燒錄速度更快。',
'過將鏡像大小除以燒錄時間來計算速度。\n由於我們能夠跳過未使用的部分因此具有EXT分區的磁盤鏡像燒錄速度更快。',
speed: '速度:{{speed}} MB/秒',
speedShort: '{{speed}} MB/秒',
eta: '預計還需要:{{eta}}',
failedTarget: '目標燒錄失敗',
failedRetry: '重試燒錄失敗目標',
failedTarget: '失敗的燒錄目標',
failedRetry: '重試燒錄失敗目標',
flashFailed: '燒錄失敗。',
flashCompleted: '燒錄成功!',
},
settings: {
errorReporting: '匿名向 balena.io 回報程式錯誤和使用統計資料',
errorReporting: '匿名向 balena.io 報告運行錯誤和使用統計',
autoUpdate: '自動更新',
settings: '軟體設定',
systemInformation: '系統資訊',
trimExtPartitions: '修改原始映像檔上未分配的空間(在 ext 類型分割區中)',
},
menu: {
edit: '編輯',
view: '預覽',
devTool: '打開開發者工具',
window: '視窗',
help: '協助',
pro: 'Etcher 專業版',
website: 'Etcher 的官網',
issue: '提交 issue',
about: '關於 Etcher',
hide: '隱藏 Etcher',
hideOthers: '隱藏其它視窗',
unhide: '取消隱藏',
quit: '結束 Etcher',
settings: '軟件設置',
systemInformation: '系統信息',
},
},
};

View File

@@ -47,13 +47,7 @@ export function isFlashing(): boolean {
*/
export function setFlashingFlag() {
// see https://github.com/balenablocks/balena-electron-env/blob/4fce9c461f294d4a768db8f247eea6f75d7b08b0/README.md#remote-methods
try {
electron.ipcRenderer.invoke('disable-screensaver');
} catch (error) {
console.log(
"Can't disable-screensaver, we're probably not running on a balena-electron env",
);
}
electron.ipcRenderer.invoke('disable-screensaver');
store.dispatch({
type: Actions.SET_FLASHING_FLAG,
data: {},

View File

@@ -41,10 +41,11 @@ export const DEFAULT_HEIGHT = 480;
* NOTE: We use the remote property when this module
* is loaded in the Electron's renderer process
*/
function getConfigPath() {
const app = electron.app || require('@electron/remote').app;
return join(app.getPath('userData'), 'config.json');
}
const app = electron.app || electron.remote.app;
const USER_DATA_DIR = app.getPath('userData');
const CONFIG_PATH = join(USER_DATA_DIR, 'config.json');
async function readConfigFile(filename: string): Promise<_.Dictionary<any>> {
let contents = '{}';
@@ -63,7 +64,7 @@ async function readConfigFile(filename: string): Promise<_.Dictionary<any>> {
// exported for tests
export async function readAll() {
return await readConfigFile(getConfigPath());
return await readConfigFile(CONFIG_PATH);
}
// exported for tests
@@ -102,7 +103,7 @@ export async function set(
const previousValue = settings[key];
settings[key] = value;
try {
await writeConfigFileFn(getConfigPath(), settings);
await writeConfigFileFn(CONFIG_PATH, settings);
} catch (error: any) {
// Revert to previous value if persisting settings failed
settings[key] = previousValue;

View File

@@ -200,7 +200,7 @@ function storeReducer(
constraints.isDriveValid(drive, image) &&
!drive.isReadOnly &&
constraints.isDriveSizeRecommended(drive, image) &&
// We don't want to auto-select large drives except if autoSelectAllDrives is true
// We don't want to auto-select large drives execpt is autoSelectAllDrives is true
(!constraints.isDriveSizeLarge(drive) || shouldAutoselectAll) &&
// We don't want to auto-select system drives
!constraints.isSystemDrive(drive)

View File

@@ -15,188 +15,84 @@
*/
import * as _ from 'lodash';
import { Client, createClient, createNoopClient } from 'analytics-client';
import * as SentryRenderer from '@sentry/electron/renderer';
import * as resinCorvus from 'resin-corvus/browser';
import * as packageJSON from '../../../../package.json';
import { getConfig } from '../../../shared/utils';
import * as settings from '../models/settings';
import { store } from '../models/store';
import * as packageJSON from '../../../../package.json';
type AnalyticsPayload = _.Dictionary<any>;
const DEFAULT_PROBABILITY = 0.1;
const clearUserPath = (filename: string): string => {
const generatedFile = filename.split('generated').reverse()[0];
return generatedFile !== filename ? `generated${generatedFile}` : filename;
};
export const anonymizeSentryData = (
event: SentryRenderer.Event,
): SentryRenderer.Event => {
event.exception?.values?.forEach((exception) => {
exception.stacktrace?.frames?.forEach((frame) => {
if (frame.filename) {
frame.filename = clearUserPath(frame.filename);
}
});
async function installCorvus(): Promise<void> {
const sentryToken =
(await settings.get('analyticsSentryToken')) ||
_.get(packageJSON, ['analytics', 'sentry', 'token']);
const mixpanelToken =
(await settings.get('analyticsMixpanelToken')) ||
_.get(packageJSON, ['analytics', 'mixpanel', 'token']);
resinCorvus.install({
services: {
sentry: sentryToken,
mixpanel: mixpanelToken,
},
options: {
release: packageJSON.version,
shouldReport: () => {
return settings.getSync('errorReporting');
},
mixpanelDeferred: true,
},
});
}
event.breadcrumbs?.forEach((breadcrumb) => {
if (breadcrumb.data?.url) {
breadcrumb.data.url = clearUserPath(breadcrumb.data.url);
}
});
let mixpanelSample = DEFAULT_PROBABILITY;
if (event.request?.url) {
event.request.url = clearUserPath(event.request.url);
}
return event;
};
const extractPathRegex = /(.*)(^|\s)(file\:\/\/)?(\w\:)?([\\\/].+)/;
const etcherSegmentMarkers = ['app.asar', 'Resources'];
export const anonymizePath = (input: string) => {
// First, extract a part of the value that matches a path pattern.
const match = extractPathRegex.exec(input);
if (match === null) {
return input;
}
const mainPart = match[5];
const space = match[2];
const beginning = match[1];
const uriPrefix = match[3] || '';
// We have to deal with both Windows and POSIX here.
// The path starts with its separator (we work with absolute paths).
const sep = mainPart[0];
const segments = mainPart.split(sep);
// Moving from the end, find the first marker and cut the path from there.
const startCutIndex = _.findLastIndex(segments, (segment) =>
etcherSegmentMarkers.includes(segment),
);
return (
beginning +
space +
uriPrefix +
'[PERSONAL PATH]' +
sep +
segments.splice(startCutIndex).join(sep)
);
};
const safeAnonymizePath = (input: string) => {
try {
return anonymizePath(input);
} catch (e) {
return '[ANONYMIZE PATH FAILED]';
}
};
const sensitiveEtcherProperties = [
'error.description',
'error.message',
'error.stack',
'image',
'image.path',
'path',
];
export const anonymizeAnalyticsPayload = (
data: AnalyticsPayload,
): AnalyticsPayload => {
for (const prop of sensitiveEtcherProperties) {
const value = data[prop];
if (value != null) {
data[prop] = safeAnonymizePath(value.toString());
}
}
return data;
};
let analyticsClient: Client;
/**
* @summary Init analytics configurations
*/
export const initAnalytics = _.once(() => {
const dsn =
settings.getSync('analyticsSentryToken') ||
_.get(packageJSON, ['analytics', 'sentry', 'token']);
SentryRenderer.init({ dsn, beforeSend: anonymizeSentryData });
const projectName =
settings.getSync('analyticsAmplitudeToken') ||
_.get(packageJSON, ['analytics', 'amplitude', 'token']);
const clientConfig = {
projectName,
endpoint: 'data.balena-cloud.com',
componentName: 'etcher',
componentVersion: packageJSON.version,
};
analyticsClient = projectName
? createClient(clientConfig)
: createNoopClient();
});
const getCircularReplacer = () => {
const seen = new WeakSet();
return (key: any, value: any) => {
if (typeof value === 'object' && value !== null) {
if (seen.has(value)) {
return;
}
seen.add(value);
}
return value;
};
};
function flattenObject(obj: any) {
const toReturn: AnalyticsPayload = {};
for (const i in obj) {
if (!obj.hasOwnProperty(i)) {
continue;
}
if (Array.isArray(obj[i])) {
toReturn[i] = obj[i];
continue;
}
if (typeof obj[i] === 'object' && obj[i] !== null) {
const flatObject = flattenObject(obj[i]);
for (const x in flatObject) {
if (!flatObject.hasOwnProperty(x)) {
continue;
}
toReturn[i.toLowerCase() + '.' + x.toLowerCase()] = flatObject[x];
}
} else {
toReturn[i] = obj[i];
async function initConfig() {
await installCorvus();
let validatedConfig = null;
try {
const configUrl = await settings.get('configUrl');
const config = await getConfig(configUrl);
const mixpanel = _.get(config, ['analytics', 'mixpanel'], {});
mixpanelSample = mixpanel.probability || DEFAULT_PROBABILITY;
if (isClientEligible(mixpanelSample)) {
validatedConfig = validateMixpanelConfig(mixpanel);
}
} catch (err) {
resinCorvus.logException(err);
}
return toReturn;
}
function formatEvent(data: any): AnalyticsPayload {
const event = JSON.parse(JSON.stringify(data, getCircularReplacer()));
return anonymizeAnalyticsPayload(flattenObject(event));
}
function reportAnalytics(message: string, data: AnalyticsPayload = {}) {
const { applicationSessionUuid, flashingWorkflowUuid } = store
.getState()
.toJS();
const event = formatEvent({
...data,
applicationSessionUuid,
flashingWorkflowUuid,
resinCorvus.setConfigs({
mixpanel: validatedConfig,
});
analyticsClient.track(message, event);
}
initConfig();
/**
* @summary Check that the client is eligible for analytics
*/
function isClientEligible(probability: number) {
return Math.random() < probability;
}
/**
* @summary Check that config has at least HTTP_PROTOCOL and api_host
*/
function validateMixpanelConfig(config: {
api_host?: string;
HTTP_PROTOCOL?: string;
}) {
const mixpanelConfig = {
api_host: 'https://api.mixpanel.com',
};
if (config.HTTP_PROTOCOL !== undefined && config.api_host !== undefined) {
mixpanelConfig.api_host = `${config.HTTP_PROTOCOL}://${config.api_host}`;
}
return mixpanelConfig;
}
/**
@@ -205,12 +101,16 @@ function reportAnalytics(message: string, data: AnalyticsPayload = {}) {
* @description
* This function sends the debug message to product analytics services.
*/
export async function logEvent(message: string, data: AnalyticsPayload = {}) {
const shouldReportAnalytics = await settings.get('errorReporting');
if (shouldReportAnalytics) {
initAnalytics();
reportAnalytics(message, data);
}
export function logEvent(message: string, data: _.Dictionary<any> = {}) {
const { applicationSessionUuid, flashingWorkflowUuid } = store
.getState()
.toJS();
resinCorvus.logEvent(message, {
...data,
sample: mixpanelSample,
applicationSessionUuid,
flashingWorkflowUuid,
});
}
/**
@@ -219,11 +119,4 @@ export async function logEvent(message: string, data: AnalyticsPayload = {}) {
* @description
* This function logs an exception to error reporting services.
*/
export function logException(error: any) {
const shouldReportErrors = settings.getSync('errorReporting');
console.error(error);
if (shouldReportErrors) {
initAnalytics();
SentryRenderer.captureException(error);
}
}
export const logException = resinCorvus.logException;

View File

@@ -1,184 +0,0 @@
/** This function will :
* - start the ipc server (api)
* - spawn the child process (privileged or not)
* - wait for the child process to connect to the api
* - return a promise that will resolve with the emit function for the api
*
* //TODO:
* - this should be refactored to reverse the control flow:
* - the child process should be the server
* - this should be the client
* - replace the current node-ipc api with a websocket api
* - centralise the api for both the writer and the scanner instead of having two instances running
*/
import * as ipc from 'node-ipc';
import { spawn } from 'child_process';
import * as os from 'os';
import * as path from 'path';
import * as packageJSON from '../../../../package.json';
import * as permissions from '../../../shared/permissions';
import { getAppPath } from '../../../shared/get-app-path';
import * as errors from '../../../shared/errors';
const THREADS_PER_CPU = 16;
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
function writerArgv(): string[] {
let entryPoint = path.join(getAppPath(), 'generated', 'etcher-util');
// AppImages run over FUSE, so the files inside the mount point
// can only be accessed by the user that mounted the AppImage.
// This means we can't re-spawn Etcher as root from the same
// mount-point, and as a workaround, we re-mount the original
// AppImage as root.
if (os.platform() === 'linux' && process.env.APPIMAGE && process.env.APPDIR) {
entryPoint = entryPoint.replace(process.env.APPDIR, '');
return [
process.env.APPIMAGE,
'-e',
`require(\`\${process.env.APPDIR}${entryPoint}\`)`,
];
} else {
return [entryPoint];
}
}
function writerEnv(
IPC_CLIENT_ID: string,
IPC_SERVER_ID: string,
IPC_SOCKET_ROOT: string,
) {
return {
IPC_SERVER_ID,
IPC_CLIENT_ID,
IPC_SOCKET_ROOT,
UV_THREADPOOL_SIZE: (os.cpus().length * THREADS_PER_CPU).toString(),
// This environment variable prevents the AppImages
// desktop integration script from presenting the
// "installation" dialog
SKIP: '1',
...(process.platform === 'win32' ? {} : process.env),
};
}
async function spawnChild({
withPrivileges,
IPC_CLIENT_ID,
IPC_SERVER_ID,
IPC_SOCKET_ROOT,
}: {
withPrivileges: boolean;
IPC_CLIENT_ID: string;
IPC_SERVER_ID: string;
IPC_SOCKET_ROOT: string;
}) {
const argv = writerArgv();
const env = writerEnv(IPC_CLIENT_ID, IPC_SERVER_ID, IPC_SOCKET_ROOT);
if (withPrivileges) {
return await permissions.elevateCommand(argv, {
applicationName: packageJSON.displayName,
environment: env,
});
} else {
const process = await spawn(argv[0], argv.slice(1), {
env,
});
return { cancelled: false, process };
}
}
function terminateServer(server: any) {
// Turns out we need to destroy all sockets for
// the server to actually close. Otherwise, it
// just stops receiving any further connections,
// but remains open if there are active ones.
// @ts-ignore (no Server.sockets in @types/node-ipc)
for (const socket of server.sockets) {
socket.destroy();
}
server.stop();
}
// TODO: replace the custom ipc events by one generic "message" for all communication with the backend
function startApiAndSpawnChild({
withPrivileges,
}: {
withPrivileges: boolean;
}): Promise<any> {
// There might be multiple Etcher instances running at
// the same time, also we might spawn multiple child and api so we must ensure each IPC
// server/client has a different name.
const IPC_SERVER_ID = `etcher-server-${process.pid}-${Date.now()}-${
withPrivileges ? 'privileged' : 'unprivileged'
}}}`;
const IPC_CLIENT_ID = `etcher-client-${process.pid}-${Date.now()}-${
withPrivileges ? 'privileged' : 'unprivileged'
}}`;
const IPC_SOCKET_ROOT = path.join(
process.env.XDG_RUNTIME_DIR || os.tmpdir(),
path.sep,
);
ipc.config.id = IPC_SERVER_ID;
ipc.config.socketRoot = IPC_SOCKET_ROOT;
return new Promise((resolve, reject) => {
ipc.serve();
// log is special message which brings back the logs from the child process and prints them to the console
ipc.server.on('log', (message: string) => {
console.log(message);
});
// api to register more handlers with callbacks
const registerHandler = (event: string, handler: any) => {
ipc.server.on(event, handler);
};
// once api is ready (means child process is connected) we pass the emit and terminate function to the caller
ipc.server.on('ready', (_: any, socket) => {
const emit = (channel: string, data: any) => {
ipc.server.emit(socket, channel, data);
};
resolve({
emit,
terminateServer: () => terminateServer(ipc.server),
registerHandler,
});
});
// on api error we terminate
ipc.server.on('error', (error: any) => {
terminateServer(ipc.server);
const errorObject = errors.fromJSON(error);
reject(errorObject);
});
// when the api is started we spawn the child process
ipc.server.on('start', async () => {
try {
const results = await spawnChild({
withPrivileges,
IPC_CLIENT_ID,
IPC_SERVER_ID,
IPC_SOCKET_ROOT,
});
// this will happen if the child is spawned withPrivileges and privileges has been rejected
if (results.cancelled) {
reject();
}
} catch (error) {
reject(error);
}
});
// start the server
ipc.server.start();
});
}
export { startApiAndSpawnChild };

View File

@@ -17,15 +17,38 @@
import { Drive as DrivelistDrive } from 'drivelist';
import * as sdk from 'etcher-sdk';
import { Dictionary } from 'lodash';
import * as ipc from 'node-ipc';
import * as os from 'os';
import * as path from 'path';
import * as packageJSON from '../../../../package.json';
import * as errors from '../../../shared/errors';
import { SourceMetadata } from '../../../shared/typings/source-selector';
import * as permissions from '../../../shared/permissions';
import { getAppPath } from '../../../shared/utils';
import { SourceMetadata } from '../components/source-selector/source-selector';
import * as flashState from '../models/flash-state';
import * as selectionState from '../models/selection-state';
import * as settings from '../models/settings';
import * as analytics from '../modules/analytics';
import * as windowProgress from '../os/window-progress';
import { startApiAndSpawnChild } from './api';
import { terminateScanningServer } from '../app';
const THREADS_PER_CPU = 16;
// There might be multiple Etcher instances running at
// the same time, therefore we must ensure each IPC
// server/client has a different name.
const IPC_SERVER_ID = `etcher-server-${process.pid}`;
const IPC_CLIENT_ID = `etcher-client-${process.pid}`;
ipc.config.id = IPC_SERVER_ID;
ipc.config.socketRoot = path.join(
process.env.XDG_RUNTIME_DIR || os.tmpdir(),
path.sep,
);
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
/**
* @summary Handle a flash error and log it to analytics
@@ -57,7 +80,51 @@ function handleErrorLogging(
}
}
let cancelEmitter: (type: string) => void | undefined;
function terminateServer() {
// Turns out we need to destroy all sockets for
// the server to actually close. Otherwise, it
// just stops receiving any further connections,
// but remains open if there are active ones.
// @ts-ignore (no Server.sockets in @types/node-ipc)
for (const socket of ipc.server.sockets) {
socket.destroy();
}
ipc.server.stop();
}
function writerArgv(): string[] {
let entryPoint = path.join(getAppPath(), 'generated', 'child-writer.js');
// AppImages run over FUSE, so the files inside the mount point
// can only be accessed by the user that mounted the AppImage.
// This means we can't re-spawn Etcher as root from the same
// mount-point, and as a workaround, we re-mount the original
// AppImage as root.
if (os.platform() === 'linux' && process.env.APPIMAGE && process.env.APPDIR) {
entryPoint = entryPoint.replace(process.env.APPDIR, '');
return [
process.env.APPIMAGE,
'-e',
`require(\`\${process.env.APPDIR}${entryPoint}\`)`,
];
} else {
return [process.argv[0], entryPoint];
}
}
function writerEnv() {
return {
IPC_SERVER_ID,
IPC_CLIENT_ID,
IPC_SOCKET_ROOT: ipc.config.socketRoot,
ELECTRON_RUN_AS_NODE: '1',
UV_THREADPOOL_SIZE: (os.cpus().length * THREADS_PER_CPU).toString(),
// This environment variable prevents the AppImages
// desktop integration script from presenting the
// "installation" dialog
SKIP: '1',
...(process.platform === 'win32' ? {} : process.env),
};
}
interface FlashResults {
skip?: boolean;
@@ -77,13 +144,22 @@ async function performWrite(
drives: DrivelistDrive[],
onProgress: sdk.multiWrite.OnProgressFunction,
): Promise<{ cancelled?: boolean }> {
let cancelled = false;
let skip = false;
ipc.serve();
const { autoBlockmapping, decompressFirst } = await settings.getAll();
return await new Promise((resolve, reject) => {
ipc.server.on('error', (error) => {
terminateServer();
const errorObject = errors.fromJSON(error);
reject(errorObject);
});
console.log({ image, drives });
ipc.server.on('log', (message) => {
console.log(message);
});
return await new Promise(async (resolve, reject) => {
const flashResults: FlashResults = {};
const analyticsData = {
image,
drives,
@@ -92,51 +168,75 @@ async function performWrite(
flashInstanceUuid: flashState.getFlashUuid(),
};
const onFail = ({ device, error }) => {
console.log('fail event');
console.log(device);
console.log(error);
ipc.server.on('fail', ({ device, error }) => {
if (device.devicePath) {
flashState.addFailedDeviceError({ device, error });
}
handleErrorLogging(error, analyticsData);
finish();
};
});
const onDone = (event) => {
console.log('done event');
ipc.server.on('done', (event) => {
event.results.errors = event.results.errors.map(
(data: Dictionary<any> & { message: string }) => {
return errors.fromJSON(data);
},
);
flashResults.results = event.results;
finish();
};
});
const onAbort = () => {
console.log('abort event');
flashResults.cancelled = true;
finish();
};
ipc.server.on('abort', () => {
terminateServer();
cancelled = true;
});
const onSkip = () => {
console.log('skip event');
flashResults.skip = true;
finish();
};
ipc.server.on('skip', () => {
terminateServer();
skip = true;
});
const finish = () => {
ipc.server.on('state', onProgress);
ipc.server.on('ready', (_data, socket) => {
ipc.server.emit(socket, 'write', {
image,
destinations: drives,
SourceType: image.SourceType.name,
autoBlockmapping,
decompressFirst,
});
});
const argv = writerArgv();
ipc.server.on('start', async () => {
console.log(`Elevating command: ${argv.join(' ')}`);
const env = writerEnv();
try {
const results = await permissions.elevateCommand(argv, {
applicationName: packageJSON.displayName,
environment: env,
});
flashResults.cancelled = cancelled || results.cancelled;
flashResults.skip = skip;
} catch (error: any) {
// This happens when the child is killed using SIGKILL
const SIGKILL_EXIT_CODE = 137;
if (error.code === SIGKILL_EXIT_CODE) {
error.code = 'ECHILDDIED';
}
reject(error);
} finally {
console.log('Terminating IPC server');
terminateServer();
}
console.log('Flash results', flashResults);
// The flash wasn't cancelled and we didn't get a 'done' event
// Catch unexepected situation
if (
!flashResults.cancelled &&
!flashResults.skip &&
flashResults.results === undefined
) {
console.log(flashResults);
reject(
errors.createUserError({
title: 'The writer process ended unexpectedly',
@@ -144,40 +244,15 @@ async function performWrite(
'Please try again, and contact the Etcher team if the problem persists',
}),
);
return;
}
console.log('Terminating IPC server');
terminateServer();
resolve(flashResults);
};
});
// Spawn the child process with privileges and wait for the connection to be made
const { emit, registerHandler, terminateServer } =
await startApiAndSpawnChild({
withPrivileges: true,
});
registerHandler('state', onProgress);
registerHandler('fail', onFail);
registerHandler('done', onDone);
registerHandler('abort', onAbort);
registerHandler('skip', onSkip);
cancelEmitter = (cancelStatus: string) => emit(cancelStatus);
// Now that we know we're connected we can instruct the child process to start the write
const paramaters = {
image,
destinations: drives,
SourceType: image.SourceType,
autoBlockmapping,
decompressFirst,
};
console.log('params', paramaters);
emit('write', paramaters);
// Clear the update lock timer to prevent longer
// flashing timing it out, and releasing the lock
ipc.server.start();
});
// The process continue in the event handler
}
/**
@@ -194,7 +269,6 @@ export async function flash(
}
await flashState.setFlashingFlag();
flashState.setDevicePaths(
drives.map((d) => d.devicePath).filter((p) => p != null) as string[],
);
@@ -210,7 +284,6 @@ export async function flash(
analytics.logEvent('Flash', analyticsData);
// start api and call the flasher
try {
const result = await write(image, drives, flashState.setProgressState);
await flashState.unsetFlashingFlag(result);
@@ -219,11 +292,8 @@ export async function flash(
cancelled: false,
errorCode: error.code,
});
windowProgress.clear();
const { results = {} } = flashState.getFlashResults();
const eventData = {
...analyticsData,
errors: results.errors,
@@ -234,9 +304,7 @@ export async function flash(
analytics.logEvent('Write failed', eventData);
throw error;
}
windowProgress.clear();
if (flashState.wasLastFlashCancelled()) {
const eventData = {
...analyticsData,
@@ -259,7 +327,6 @@ export async function flash(
/**
* @summary Cancel write operation
* //TODO: find a better solution to handle cancellation
*/
export async function cancel(type: string) {
const status = type.toLowerCase();
@@ -274,7 +341,15 @@ export async function cancel(type: string) {
};
analytics.logEvent('Cancel', analyticsData);
if (cancelEmitter) {
cancelEmitter(status);
// Re-enable lock release on inactivity
try {
// @ts-ignore (no Server.sockets in @types/node-ipc)
const [socket] = ipc.server.sockets;
if (socket !== undefined) {
ipc.server.emit(socket, status);
}
} catch (error: any) {
analytics.logException(error);
}
}

View File

@@ -15,7 +15,6 @@
*/
import * as electron from 'electron';
import * as remote from '@electron/remote';
import * as _ from 'lodash';
import * as errors from '../../../shared/errors';
@@ -64,9 +63,10 @@ export async function selectImage(): Promise<string | undefined> {
},
],
};
const currentWindow = remote.getCurrentWindow();
const [file] = (await remote.dialog.showOpenDialog(currentWindow, options))
.filePaths;
const currentWindow = electron.remote.getCurrentWindow();
const [file] = (
await electron.remote.dialog.showOpenDialog(currentWindow, options)
).filePaths;
return file;
}
@@ -92,8 +92,8 @@ export async function showWarning(options: {
);
const BUTTON_REJECTION_INDEX = _.indexOf(BUTTONS, options.rejectionLabel);
const { response } = await remote.dialog.showMessageBox(
remote.getCurrentWindow(),
const { response } = await electron.remote.dialog.showMessageBox(
electron.remote.getCurrentWindow(),
{
type: 'warning',
buttons: BUTTONS,
@@ -113,5 +113,5 @@ export async function showWarning(options: {
export function showError(error: Error) {
const title = errors.getTitle(error);
const message = errors.getDescription(error);
remote.dialog.showErrorBox(title, message);
electron.remote.dialog.showErrorBox(title, message);
}

View File

@@ -14,7 +14,7 @@
* limitations under the License.
*/
import * as remote from '@electron/remote';
import * as electron from 'electron';
import * as settings from '../models/settings';
@@ -28,8 +28,8 @@ export async function send(title: string, body: string, icon: string) {
}
// `app.dock` is only defined in OS X
if (remote.app.dock) {
remote.app.dock.bounce();
if (electron.remote.app.dock) {
electron.remote.app.dock.bounce();
}
return new window.Notification(title, { body, icon });

View File

@@ -14,7 +14,7 @@
* limitations under the License.
*/
import * as remote from '@electron/remote';
import * as electron from 'electron';
import { percentageToFloat } from '../../../shared/utils';
import { FlashState, titleFromFlashState } from '../modules/progress-status';
@@ -40,7 +40,7 @@ function getWindowTitle(state?: FlashState) {
* @description
* We expose this property to `this` for testability purposes.
*/
export const currentWindow = remote.getCurrentWindow();
export const currentWindow = electron.remote.getCurrentWindow();
/**
* @summary Set operating system window progress

View File

@@ -27,6 +27,7 @@ import * as availableDrives from '../../models/available-drives';
import * as flashState from '../../models/flash-state';
import * as selection from '../../models/selection-state';
import * as analytics from '../../modules/analytics';
import { scanner as driveScanner } from '../../modules/drive-scanner';
import * as imageWriter from '../../modules/image-writer';
import * as notification from '../../os/notification';
import {
@@ -94,6 +95,10 @@ async function flashImageToDrive(
return '';
}
// Stop scanning drives when flashing
// otherwise Windows throws EPERM
driveScanner.stop();
const iconPath = path.join('media', 'icon.png');
const basename = path.basename(image.path);
try {
@@ -105,7 +110,7 @@ async function flashImageToDrive(
cancelled,
} = flashState.getFlashResults();
if (!skip && !cancelled) {
if (results?.devices?.successful > 0) {
if (results.devices.successful > 0) {
notifySuccess(iconPath, basename, drives, results.devices);
} else {
notifyFailure(iconPath, basename, drives);
@@ -124,6 +129,7 @@ async function flashImageToDrive(
return errorMessage;
} finally {
availableDrives.setDrives([]);
driveScanner.start();
}
return '';

View File

@@ -26,8 +26,10 @@ import styled from 'styled-components';
import FinishPage from '../../components/finish/finish';
import { ReducedFlashingInfos } from '../../components/reduced-flashing-infos/reduced-flashing-infos';
import { SettingsModal } from '../../components/settings/settings';
import { SourceSelector } from '../../components/source-selector/source-selector';
import { SourceMetadata } from '../../../../shared/typings/source-selector';
import {
SourceMetadata,
SourceSelector,
} from '../../components/source-selector/source-selector';
import * as flashState from '../../models/flash-state';
import * as selectionState from '../../models/selection-state';
import * as settings from '../../models/settings';
@@ -146,7 +148,7 @@ export class MainPage extends React.Component<
private async getFeaturedProjectURL() {
const url = new URL(
(await settings.get('featuredProjectEndpoint')) ||
'https://efp.balena.io/index.html',
'https://assets.balena.io/etcher-featured/index.html',
);
url.searchParams.append('borderRight', 'false');
url.searchParams.append('darkBackground', 'true');
@@ -311,7 +313,7 @@ export class MainPage extends React.Component<
onClick={() =>
openExternal(
selectionState.getImage()?.supportUrl ||
'https://github.com/balena-io/etcher/blob/master/docs/SUPPORT.md',
'https://github.com/balena-io/etcher/blob/master/SUPPORT.md',
)
}
tabIndex={6}

View File

@@ -17,10 +17,10 @@
import { Dictionary } from 'lodash';
type BalenaTag = {
id: number;
name: string;
value: string;
};
id: number,
name: string,
value: string
}
export class EtcherPro {
private supervisorAddr: string;

View File

@@ -15,24 +15,21 @@
*/
import * as electron from 'electron';
import * as remoteMain from '@electron/remote/main';
import { autoUpdater } from 'electron-updater';
import { promises as fs } from 'fs';
import { platform } from 'os';
import * as path from 'path';
import * as semver from 'semver';
import * as lodash from 'lodash';
import './app/i18n';
import { packageType, version } from '../../package.json';
import * as EXIT_CODES from '../shared/exit-codes';
import { delay, getConfig } from '../shared/utils';
import * as settings from './app/models/settings';
import { logException } from './app/modules/analytics';
import { buildWindowMenu } from './menu';
import * as i18n from 'i18next';
import * as SentryMain from '@sentry/electron/main';
import * as packageJSON from '../../package.json';
import { anonymizeSentryData } from './app/modules/analytics';
const customProtocol = 'etcher';
const scheme = `${customProtocol}://`;
@@ -41,8 +38,6 @@ const packageUpdatable = updatablePackageTypes.includes(packageType);
let packageUpdated = false;
let mainWindow: any = null;
remoteMain.initialize();
async function checkForUpdates(interval: number) {
// We use a while loop instead of a setInterval to preserve
// async execution time between each function call
@@ -51,28 +46,20 @@ async function checkForUpdates(interval: number) {
try {
const release = await autoUpdater.checkForUpdates();
const isOutdated =
semver.compare(release!.updateInfo.version, version) > 0;
const shouldUpdate = release!.updateInfo.stagingPercentage !== 0; // undefined (default) means 100%
semver.compare(release.updateInfo.version, version) > 0;
const shouldUpdate = release.updateInfo.stagingPercentage !== 0; // undefinded (default) means 100%
if (shouldUpdate && isOutdated) {
await autoUpdater.downloadUpdate();
packageUpdated = true;
}
} catch (err) {
logMainProcessException(err);
logException(err);
}
}
await delay(interval);
}
}
function logMainProcessException(error: any) {
const shouldReportErrors = settings.getSync('errorReporting');
console.error(error);
if (shouldReportErrors) {
SentryMain.captureException(error);
}
}
async function isFile(filePath: string): Promise<boolean> {
try {
const stat = await fs.stat(filePath);
@@ -107,14 +94,6 @@ async function getCommandLineURL(argv: string[]): Promise<string | undefined> {
}
}
const initSentryMain = lodash.once(() => {
const dsn =
settings.getSync('analyticsSentryToken') ||
lodash.get(packageJSON, ['analytics', 'sentry', 'token']);
SentryMain.init({ dsn, beforeSend: anonymizeSentryData });
});
const sourceSelectorReady = new Promise((resolve) => {
electron.ipcMain.on('source-selector-ready', resolve);
});
@@ -176,15 +155,16 @@ async function createMainWindow() {
contextIsolation: false,
webviewTag: true,
zoomFactor: width / defaultWidth,
enableRemoteModule: true,
},
});
electron.app.setAsDefaultProtocolClient(customProtocol);
// mainWindow.setFullScreen(true);
mainWindow.setFullScreen(true);
// Prevent flash of white when starting the application
mainWindow.once('ready-to-show', () => {
mainWindow.on('ready-to-show', () => {
console.timeEnd('ready-to-show');
// Electron sometimes caches the zoomFactor
// making it obnoxious to switch back-and-forth
@@ -208,26 +188,34 @@ async function createMainWindow() {
);
const page = mainWindow.webContents;
remoteMain.enable(page);
page.once('did-frame-finish-load', async () => {
console.log('packageUpdatable', packageUpdatable);
autoUpdater.on('error', (err) => {
logMainProcessException(err);
logException(err);
});
if (packageUpdatable) {
try {
const checkForUpdatesTimer = 300000;
const configUrl = await settings.get('configUrl');
const onlineConfig = await getConfig(configUrl);
const autoUpdaterConfig: AutoUpdaterConfig = onlineConfig?.autoUpdates
?.autoUpdaterConfig ?? {
autoDownload: false,
};
for (const [key, value] of Object.entries(autoUpdaterConfig)) {
autoUpdater[key as keyof AutoUpdaterConfig] = value;
}
const checkForUpdatesTimer =
onlineConfig?.autoUpdates?.checkForUpdatesTimer ?? 300000;
checkForUpdates(checkForUpdatesTimer);
} catch (err) {
logMainProcessException(err);
logException(err);
}
}
});
return mainWindow;
}
electron.app.allowRendererProcessReuse = false;
electron.app.on('window-all-closed', electron.app.quit);
// Sending a `SIGINT` (e.g: Ctrl-C) to an Electron app that registers
@@ -245,7 +233,6 @@ async function main(): Promise<void> {
if (!electron.app.requestSingleInstanceLock()) {
electron.app.quit();
} else {
initSentryMain();
await electron.app.whenReady();
const window = await createMainWindow();
electron.app.on('second-instance', async (_event, argv) => {
@@ -267,26 +254,9 @@ async function main(): Promise<void> {
console.log('Build menu failed. ');
}
});
electron.ipcMain.on('webview-dom-ready', (_, id) => {
const webview = electron.webContents.fromId(id);
// Open link in browser if it's opened as a 'foreground-tab'
webview.setWindowOpenHandler((event) => {
const url = new URL(event.url);
if (
(url.protocol === 'http:' || url.protocol === 'https:') &&
event.disposition === 'foreground-tab' &&
// Don't open links if they're disabled by the env var
!settings.getSync('disableExternalLinks')
) {
electron.shell.openExternal(url.href);
}
return { action: 'deny' };
});
});
}
}
main();
console.time('ready-to-show');

View File

@@ -0,0 +1,333 @@
/*
* Copyright 2017 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Drive as DrivelistDrive } from 'drivelist';
import {
BlockDevice,
File,
Http,
Metadata,
SourceDestination,
} from 'etcher-sdk/build/source-destination';
import {
MultiDestinationProgress,
OnProgressFunction,
OnFailFunction,
decompressThenFlash,
DECOMPRESSED_IMAGE_PREFIX,
} from 'etcher-sdk/build/multi-write';
import { cleanupTmpFiles } from 'etcher-sdk/build/tmp';
import * as ipc from 'node-ipc';
import { totalmem } from 'os';
import { toJSON } from '../../shared/errors';
import { GENERAL_ERROR, SUCCESS } from '../../shared/exit-codes';
import { delay, isJson } from '../../shared/utils';
import { SourceMetadata } from '../app/components/source-selector/source-selector';
import axios from 'axios';
import * as _ from 'lodash';
ipc.config.id = process.env.IPC_CLIENT_ID as string;
ipc.config.socketRoot = process.env.IPC_SOCKET_ROOT as string;
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
// > If set to 0, the client will NOT try to reconnect.
// See https://github.com/RIAEvangelist/node-ipc/
//
// The purpose behind this change is for this process
// to emit a "disconnect" event as soon as the GUI
// process is closed, so we can kill this process as well.
// @ts-ignore (0 is a valid value for stopRetrying and is not the same as false)
ipc.config.stopRetrying = 0;
const DISCONNECT_DELAY = 100;
const IPC_SERVER_ID = process.env.IPC_SERVER_ID as string;
/**
* @summary Send a log debug message to the IPC server
*/
function log(message: string) {
ipc.of[IPC_SERVER_ID].emit('log', message);
}
/**
* @summary Terminate the child writer process
*/
async function terminate(exitCode: number) {
ipc.disconnect(IPC_SERVER_ID);
await cleanupTmpFiles(Date.now(), DECOMPRESSED_IMAGE_PREFIX);
process.nextTick(() => {
process.exit(exitCode || SUCCESS);
});
}
/**
* @summary Handle a child writer error
*/
async function handleError(error: Error) {
ipc.of[IPC_SERVER_ID].emit('error', toJSON(error));
await delay(DISCONNECT_DELAY);
await terminate(GENERAL_ERROR);
}
export interface FlashError extends Error {
description: string;
device: string;
code: string;
}
export interface WriteResult {
bytesWritten?: number;
devices?: {
failed: number;
successful: number;
};
errors: FlashError[];
sourceMetadata?: Metadata;
}
export interface FlashResults extends WriteResult {
skip?: boolean;
cancelled?: boolean;
}
/**
* @summary writes the source to the destinations and valiates the writes
* @param {SourceDestination} source - source
* @param {SourceDestination[]} destinations - destinations
* @param {Boolean} verify - whether to validate the writes or not
* @param {Boolean} autoBlockmapping - whether to trim ext partitions before writing
* @param {Function} onProgress - function to call on progress
* @param {Function} onFail - function to call on fail
* @returns {Promise<{ bytesWritten, devices, errors} >}
*/
async function writeAndValidate({
source,
destinations,
verify,
autoBlockmapping,
decompressFirst,
onProgress,
onFail,
}: {
source: SourceDestination;
destinations: BlockDevice[];
verify: boolean;
autoBlockmapping: boolean;
decompressFirst: boolean;
onProgress: OnProgressFunction;
onFail: OnFailFunction;
}): Promise<WriteResult> {
const { sourceMetadata, failures, bytesWritten } = await decompressThenFlash({
source,
destinations,
onFail,
onProgress,
verify,
trim: autoBlockmapping,
numBuffers: Math.min(
2 + (destinations.length - 1) * 32,
256,
Math.floor(totalmem() / 1024 ** 2 / 8),
),
decompressFirst,
});
const result: WriteResult = {
bytesWritten,
devices: {
failed: failures.size,
successful: destinations.length - failures.size,
},
errors: [],
sourceMetadata,
};
for (const [destination, error] of failures) {
const err = error as FlashError;
const drive = destination as BlockDevice;
err.device = drive.device;
err.description = drive.description;
result.errors.push(err);
}
return result;
}
interface WriteOptions {
image: SourceMetadata;
destinations: DrivelistDrive[];
autoBlockmapping: boolean;
decompressFirst: boolean;
SourceType: string;
httpRequest?: any;
}
ipc.connectTo(IPC_SERVER_ID, () => {
// Remove leftover tmp files older than 1 hour
cleanupTmpFiles(Date.now() - 60 * 60 * 1000);
process.once('uncaughtException', handleError);
// Gracefully exit on the following cases. If the parent
// process detects that child exit successfully but
// no flashing information is available, then it will
// assume that the child died halfway through.
process.once('SIGINT', async () => {
await terminate(SUCCESS);
});
process.once('SIGTERM', async () => {
await terminate(SUCCESS);
});
// The IPC server failed. Abort.
ipc.of[IPC_SERVER_ID].on('error', async () => {
await terminate(SUCCESS);
});
// The IPC server was disconnected. Abort.
ipc.of[IPC_SERVER_ID].on('disconnect', async () => {
await terminate(SUCCESS);
});
ipc.of[IPC_SERVER_ID].on('write', async (options: WriteOptions) => {
/**
* @summary Progress handler
* @param {Object} state - progress state
* @example
* writer.on('progress', onProgress)
*/
const onProgress = (state: MultiDestinationProgress) => {
ipc.of[IPC_SERVER_ID].emit('state', state);
};
let exitCode = SUCCESS;
/**
* @summary Abort handler
* @example
* writer.on('abort', onAbort)
*/
const onAbort = async () => {
log('Abort');
ipc.of[IPC_SERVER_ID].emit('abort');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
const onSkip = async () => {
log('Skip validation');
ipc.of[IPC_SERVER_ID].emit('skip');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
ipc.of[IPC_SERVER_ID].on('cancel', onAbort);
ipc.of[IPC_SERVER_ID].on('skip', onSkip);
/**
* @summary Failure handler (non-fatal errors)
* @param {SourceDestination} destination - destination
* @param {Error} error - error
* @example
* writer.on('fail', onFail)
*/
const onFail = (destination: SourceDestination, error: Error) => {
ipc.of[IPC_SERVER_ID].emit('fail', {
// TODO: device should be destination
// @ts-ignore (destination.drive is private)
device: destination.drive,
error: toJSON(error),
});
};
const destinations = options.destinations.map((d) => d.device);
const imagePath = options.image.path;
log(`Image: ${imagePath}`);
log(`Devices: ${destinations.join(', ')}`);
log(`Auto blockmapping: ${options.autoBlockmapping}`);
log(`Decompress first: ${options.decompressFirst}`);
const dests = options.destinations.map((destination) => {
return new BlockDevice({
drive: destination,
unmountOnSuccess: true,
write: true,
direct: true,
});
});
const { SourceType } = options;
try {
let source;
if (options.image.drive) {
source = new BlockDevice({
drive: options.image.drive,
direct: !options.autoBlockmapping,
});
} else {
if (SourceType === File.name) {
source = new File({
path: imagePath,
});
} else {
const decodedImagePath = decodeURIComponent(imagePath);
if (isJson(decodedImagePath)) {
const imagePathObject = JSON.parse(decodedImagePath);
source = new Http({
url: imagePathObject.url,
avoidRandomAccess: true,
axiosInstance: axios.create(_.omit(imagePathObject, ['url'])),
auth: options.image.auth,
});
} else {
source = new Http({
url: imagePath,
avoidRandomAccess: true,
auth: options.image.auth,
});
}
}
}
const results = await writeAndValidate({
source,
destinations: dests,
verify: true,
autoBlockmapping: options.autoBlockmapping,
decompressFirst: options.decompressFirst,
onProgress,
onFail,
});
log(`Finish: ${results.bytesWritten}`);
results.errors = results.errors.map((error) => {
return toJSON(error);
});
ipc.of[IPC_SERVER_ID].emit('done', { results });
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
} catch (error: any) {
exitCode = GENERAL_ERROR;
ipc.of[IPC_SERVER_ID].emit('error', toJSON(error));
}
});
ipc.of[IPC_SERVER_ID].on('connect', () => {
log(
`Successfully connected to IPC server: ${IPC_SERVER_ID}, socket root ${ipc.config.socketRoot}`,
);
ipc.of[IPC_SERVER_ID].emit('ready', {});
});
});

View File

@@ -1,10 +0,0 @@
{
"bin": "build/util/child-writer.js",
"pkg": {
"assets": [
"node_modules/usb/prebuilds/darwin-x64+arm64/node.napi.node",
"node_modules/lzma-native/prebuilds/darwin-arm64/node.napi.node",
"node_modules/drivelist/build/Release/drivelist.node"
]
}
}

View File

@@ -19,8 +19,7 @@ import { join } from 'path';
import { env } from 'process';
import { promisify } from 'util';
import { getAppPath } from '../get-app-path';
import { supportedLocales } from '../../gui/app/i18n';
import { getAppPath } from '../utils';
const execFileAsync = promisify(execFile);
@@ -33,12 +32,6 @@ export async function sudo(
try {
let lang = Intl.DateTimeFormat().resolvedOptions().locale;
lang = lang.substr(0, 2);
if (supportedLocales.indexOf(lang) > -1) {
// language should be present
} else {
// fallback to eng
lang = 'en';
}
const { stdout, stderr } = await execFileAsync(
'sudo',
@@ -50,7 +43,7 @@ export async function sudo(
SUDO_ASKPASS: join(
getAppPath(),
__dirname,
`sudo-askpass.osascript-${lang}.js`,
'sudo-askpass.osascript-' + lang + '.js',
),
},
},

View File

@@ -15,11 +15,11 @@
*/
import { Drive } from 'drivelist';
import { isNil } from 'lodash';
import * as _ from 'lodash';
import * as pathIsInside from 'path-is-inside';
import * as messages from './messages';
import { SourceMetadata } from './typings/source-selector';
import { SourceMetadata } from '../gui/app/components/source-selector/source-selector';
/**
* @summary The default unknown size for things such as images and drives
@@ -210,8 +210,8 @@ export function getDriveImageCompatibilityStatuses(
});
}
if (
!isNil(drive) &&
!isNil(drive.size) &&
!_.isNil(drive) &&
!_.isNil(drive.size) &&
!isDriveLargeEnough(drive, image)
) {
statusList.push(statuses.small);
@@ -229,7 +229,7 @@ export function getDriveImageCompatibilityStatuses(
if (
image !== undefined &&
!isNil(drive) &&
!_.isNil(drive) &&
!isDriveSizeRecommended(drive, image)
) {
statusList.push(statuses.sizeNotRecommended);

View File

@@ -1,12 +0,0 @@
export function getAppPath(): string {
return (
(require('electron').app || require('@electron/remote').app)
.getAppPath()
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
.replace(/\.asar$/, () =>
process.platform === 'darwin' ? '-' + process.arch : '',
)
);
}

View File

@@ -1,23 +0,0 @@
import { GPTPartition, MBRPartition } from 'partitioninfo';
import { sourceDestination } from 'etcher-sdk';
import { DrivelistDrive } from '../drive-constraints';
export type Source = 'File' | 'BlockDevice' | 'Http';
export interface SourceMetadata extends sourceDestination.Metadata {
hasMBR?: boolean;
partitions?: MBRPartition[] | GPTPartition[];
path: string;
displayName: string;
description: string;
SourceType: Source;
drive?: DrivelistDrive;
extension?: string;
archiveExtension?: string;
auth?: Authentication;
}
export interface Authentication {
username: string;
password: string;
}

View File

@@ -14,6 +14,10 @@
* limitations under the License.
*/
import axios from 'axios';
import { app, remote } from 'electron';
import { Dictionary } from 'lodash';
import * as errors from './errors';
export function isValidPercentage(percentage: any): boolean {
@@ -29,12 +33,35 @@ export function percentageToFloat(percentage: any) {
return percentage / 100;
}
/**
* @summary Get etcher configs stored online
* @param {String} - url where config.json is stored
*/
export async function getConfig(configUrl?: string): Promise<Dictionary<any>> {
configUrl = configUrl ?? 'https://balena.io/etcher/static/config.json';
const response = await axios.get(configUrl, { responseType: 'json' });
return response.data;
}
export async function delay(duration: number): Promise<void> {
await new Promise((resolve) => {
setTimeout(resolve, duration);
});
}
export function getAppPath(): string {
return (
(app || remote.app)
.getAppPath()
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
.replace(/\.asar$/, () =>
process.platform === 'darwin' ? '-' + process.arch : '',
)
);
}
export function isJson(jsonString: string) {
try {
JSON.parse(jsonString);

View File

@@ -1,201 +0,0 @@
/*
* Copyright 2017 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as ipc from 'node-ipc';
import { toJSON } from '../shared/errors';
import { GENERAL_ERROR, SUCCESS } from '../shared/exit-codes';
import { delay } from '../shared/utils';
import { WriteOptions } from './types/types';
import { MultiDestinationProgress } from 'etcher-sdk/build/multi-write';
import { write, cleanup } from './child-writer';
import { startScanning } from './scanner';
import { getSourceMetadata } from './source-metadata';
import { DrivelistDrive } from '../shared/drive-constraints';
import { Dictionary, values } from 'lodash';
ipc.config.id = process.env.IPC_CLIENT_ID as string;
ipc.config.socketRoot = process.env.IPC_SOCKET_ROOT as string;
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
// > If set to 0, the client will NOT try to reconnect.
// See https://github.com/RIAEvangelist/node-ipc/
//
// The purpose behind this change is for this process
// to emit a "disconnect" event as soon as the GUI
// process is closed, so we can kill this process as well.
// @ts-ignore (0 is a valid value for stopRetrying and is not the same as false)
ipc.config.stopRetrying = 0;
const DISCONNECT_DELAY = 100;
const IPC_SERVER_ID = process.env.IPC_SERVER_ID as string;
/**
* @summary Send a message to the IPC server
*/
function emit(channel: string, message?: any) {
ipc.of[IPC_SERVER_ID].emit(channel, message);
}
/**
* @summary Send a log debug message to the IPC server
*/
function log(message: string) {
if (console?.log) {
console.log(message);
}
emit('log', message);
}
/**
* @summary Terminate the child process
*/
async function terminate(exitCode: number) {
ipc.disconnect(IPC_SERVER_ID);
await cleanup(Date.now());
process.nextTick(() => {
process.exit(exitCode || SUCCESS);
});
}
/**
* @summary Handle errors
*/
async function handleError(error: Error) {
emit('error', toJSON(error));
await delay(DISCONNECT_DELAY);
await terminate(GENERAL_ERROR);
}
/**
* @summary Abort handler
* @example
*/
const onAbort = async (exitCode: number) => {
log('Abort');
emit('abort');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
const onSkip = async (exitCode: number) => {
log('Skip validation');
emit('skip');
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
};
ipc.connectTo(IPC_SERVER_ID, () => {
// Gracefully exit on the following cases. If the parent
// process detects that child exit successfully but
// no flashing information is available, then it will
// assume that the child died halfway through.
process.once('uncaughtException', handleError);
process.once('SIGINT', async () => {
await terminate(SUCCESS);
});
process.once('SIGTERM', async () => {
await terminate(SUCCESS);
});
// The IPC server failed. Abort.
ipc.of[IPC_SERVER_ID].on('error', async () => {
await terminate(SUCCESS);
});
// The IPC server was disconnected. Abort.
ipc.of[IPC_SERVER_ID].on('disconnect', async () => {
await terminate(SUCCESS);
});
ipc.of[IPC_SERVER_ID].on('sourceMetadata', async (params) => {
const { selected, SourceType, auth } = JSON.parse(params);
try {
const sourceMatadata = await getSourceMetadata(
selected,
SourceType,
auth,
);
emitSourceMetadata(sourceMatadata);
} catch (error: any) {
emitFail(error);
}
});
ipc.of[IPC_SERVER_ID].on('scan', async () => {
startScanning();
});
// write handler
ipc.of[IPC_SERVER_ID].on('write', async (options: WriteOptions) => {
// Remove leftover tmp files older than 1 hour
cleanup(Date.now() - 60 * 60 * 1000);
let exitCode = SUCCESS;
ipc.of[IPC_SERVER_ID].on('cancel', () => onAbort(exitCode));
ipc.of[IPC_SERVER_ID].on('skip', () => onSkip(exitCode));
const results = await write(options);
if (results.errors.length > 0) {
results.errors = results.errors.map((error: any) => {
return toJSON(error);
});
exitCode = GENERAL_ERROR;
}
emit('done', { results });
await delay(DISCONNECT_DELAY);
await terminate(exitCode);
});
ipc.of[IPC_SERVER_ID].on('connect', () => {
log(
`Successfully connected to IPC server: ${IPC_SERVER_ID}, socket root ${ipc.config.socketRoot}`,
);
emit('ready', {});
});
});
function emitLog(message: string) {
log(message);
}
function emitState(state: MultiDestinationProgress) {
emit('state', state);
}
function emitFail(data: any) {
emit('fail', data);
}
function emitDrives(drives: Dictionary<DrivelistDrive>) {
emit('drives', JSON.stringify(values(drives)));
}
function emitSourceMetadata(sourceMetadata: any) {
emit('sourceMetadata', JSON.stringify(sourceMetadata));
}
export { emitLog, emitState, emitFail, emitDrives, emitSourceMetadata };

View File

@@ -1,200 +0,0 @@
/*
* Copyright 2023 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file handles the writer process.
*/
import {
OnProgressFunction,
OnFailFunction,
decompressThenFlash,
DECOMPRESSED_IMAGE_PREFIX,
MultiDestinationProgress,
} from 'etcher-sdk/build/multi-write';
import { totalmem } from 'os';
import { cleanupTmpFiles } from 'etcher-sdk/build/tmp';
import {
File,
Http,
BlockDevice,
SourceDestination,
} from 'etcher-sdk/build/source-destination';
import { WriteResult, FlashError, WriteOptions } from './types/types';
import { isJson } from '../shared/utils';
import { toJSON } from '../shared/errors';
import axios from 'axios';
import { omit } from 'lodash';
import { emitLog, emitState, emitFail } from './api';
async function write(options: WriteOptions) {
/**
* @summary Failure handler (non-fatal errors)
* @param {SourceDestination} destination - destination
* @param {Error} error - error
*/
const onFail = (destination: SourceDestination, error: Error) => {
emitFail({
// TODO: device should be destination
// @ts-ignore (destination.drive is private)
device: destination.drive,
error: toJSON(error),
});
};
/**
* @summary Progress handler
* @param {Object} state - progress state
* @example
* writer.on('progress', onProgress)
*/
const onProgress = (state: MultiDestinationProgress) => {
emitState(state);
};
// Write the image to the destinations
const destinations = options.destinations.map((d) => d.device);
const imagePath = options.image.path;
emitLog(`Image: ${imagePath}`);
emitLog(`Devices: ${destinations.join(', ')}`);
emitLog(`Auto blockmapping: ${options.autoBlockmapping}`);
emitLog(`Decompress first: ${options.decompressFirst}`);
const dests = options.destinations.map((destination) => {
return new BlockDevice({
drive: destination,
unmountOnSuccess: true,
write: true,
direct: true,
});
});
const { SourceType } = options;
try {
let source;
if (options.image.drive) {
source = new BlockDevice({
drive: options.image.drive,
direct: !options.autoBlockmapping,
});
} else {
if (SourceType === File.name) {
source = new File({
path: imagePath,
});
} else {
const decodedImagePath = decodeURIComponent(imagePath);
if (isJson(decodedImagePath)) {
const imagePathObject = JSON.parse(decodedImagePath);
source = new Http({
url: imagePathObject.url,
avoidRandomAccess: true,
axiosInstance: axios.create(omit(imagePathObject, ['url'])),
auth: options.image.auth,
});
} else {
source = new Http({
url: imagePath,
avoidRandomAccess: true,
auth: options.image.auth,
});
}
}
}
const results = await writeAndValidate({
source,
destinations: dests,
verify: true,
autoBlockmapping: options.autoBlockmapping,
decompressFirst: options.decompressFirst,
onProgress,
onFail,
});
return results;
} catch (error: any) {
return { errors: [error] };
}
}
/** @summary clean up tmp files */
export async function cleanup(until: number) {
await cleanupTmpFiles(until, DECOMPRESSED_IMAGE_PREFIX);
}
/**
* @summary writes the source to the destinations and validates the writes
* @param {SourceDestination} source - source
* @param {SourceDestination[]} destinations - destinations
* @param {Boolean} verify - whether to validate the writes or not
* @param {Boolean} autoBlockmapping - whether to trim ext partitions before writing
* @param {Function} onProgress - function to call on progress
* @param {Function} onFail - function to call on fail
* @returns {Promise<{ bytesWritten, devices, errors} >}
*/
async function writeAndValidate({
source,
destinations,
verify,
autoBlockmapping,
decompressFirst,
onProgress,
onFail,
}: {
source: SourceDestination;
destinations: BlockDevice[];
verify: boolean;
autoBlockmapping: boolean;
decompressFirst: boolean;
onProgress: OnProgressFunction;
onFail: OnFailFunction;
}): Promise<WriteResult> {
const { sourceMetadata, failures, bytesWritten } = await decompressThenFlash({
source,
destinations,
onFail,
onProgress,
verify,
trim: autoBlockmapping,
numBuffers: Math.min(
2 + (destinations.length - 1) * 32,
256,
Math.floor(totalmem() / 1024 ** 2 / 8),
),
decompressFirst,
});
const result: WriteResult = {
bytesWritten,
devices: {
failed: failures.size,
successful: destinations.length - failures.size,
},
errors: [],
sourceMetadata,
};
for (const [destination, error] of failures) {
const err = error as FlashError;
const drive = destination as BlockDevice;
err.device = drive.device;
err.description = drive.description;
result.errors.push(err);
}
return result;
}
export { write };

View File

@@ -1,180 +0,0 @@
import { scanner as driveScanner } from './drive-scanner';
import * as sdk from 'etcher-sdk';
import { DrivelistDrive } from '../shared/drive-constraints';
import outdent from 'outdent';
import { Dictionary, values, keyBy, padStart } from 'lodash';
import { emitDrives } from './api';
let availableDrives: DrivelistDrive[] = [];
export function hasAvailableDrives() {
return availableDrives.length > 0;
}
driveScanner.on('error', (error) => {
// Stop the drive scanning loop in case of errors,
// otherwise we risk presenting the same error over
// and over again to the user, while also heavily
// spamming our error reporting service.
driveScanner.stop();
console.log('scanner error', error);
});
function setDrives(drives: Dictionary<DrivelistDrive>) {
availableDrives = values(drives);
emitDrives(drives);
}
function getDrives() {
return keyBy(availableDrives, 'device');
}
async function addDrive(drive: Drive) {
const preparedDrive = prepareDrive(drive);
if (!(await driveIsAllowed(preparedDrive))) {
return;
}
const drives = getDrives();
drives[preparedDrive.device] = preparedDrive;
setDrives(drives);
}
function removeDrive(drive: Drive) {
const preparedDrive = prepareDrive(drive);
const drives = getDrives();
delete drives[preparedDrive.device];
setDrives(drives);
}
async function driveIsAllowed(drive: {
devicePath: string;
device: string;
raw: string;
}) {
// const driveBlacklist = (await settings.get("driveBlacklist")) || [];
const driveBlacklist: any[] = [];
return !(
driveBlacklist.includes(drive.devicePath) ||
driveBlacklist.includes(drive.device) ||
driveBlacklist.includes(drive.raw)
);
}
type Drive =
| sdk.sourceDestination.BlockDevice
| sdk.sourceDestination.UsbbootDrive
| sdk.sourceDestination.DriverlessDevice;
function prepareDrive(drive: Drive) {
if (drive instanceof sdk.sourceDestination.BlockDevice) {
// @ts-ignore (BlockDevice.drive is private)
return drive.drive;
} else if (drive instanceof sdk.sourceDestination.UsbbootDrive) {
// This is a workaround etcher expecting a device string and a size
// @ts-ignore
drive.device = drive.usbDevice.portId;
drive.size = null;
// @ts-ignore
drive.progress = 0;
drive.disabled = true;
drive.on('progress', (progress) => {
updateDriveProgress(drive, progress);
});
return drive;
} else if (drive instanceof sdk.sourceDestination.DriverlessDevice) {
const description =
COMPUTE_MODULE_DESCRIPTIONS[
drive.deviceDescriptor.idProduct.toString()
] || 'Compute Module';
return {
device: `${usbIdToString(
drive.deviceDescriptor.idVendor,
)}:${usbIdToString(drive.deviceDescriptor.idProduct)}`,
displayName: 'Missing drivers',
description,
mountpoints: [],
isReadOnly: false,
isSystem: false,
disabled: true,
icon: 'warning',
size: null,
link: 'https://www.raspberrypi.com/documentation/computers/compute-module.html#flashing-the-compute-module-emmc',
linkCTA: 'Install',
linkTitle: 'Install missing drivers',
linkMessage: outdent`
Would you like to download the necessary drivers from the Raspberry Pi Foundation?
This will open your browser.
Once opened, download and run the installer from the "Windows Installer" section to install the drivers
`,
};
}
}
/**
* @summary The radix used by USB ID numbers
*/
const USB_ID_RADIX = 16;
/**
* @summary The expected length of a USB ID number
*/
const USB_ID_LENGTH = 4;
/**
* @summary Convert a USB id (e.g. product/vendor) to a string
*
* @example
* console.log(usbIdToString(2652))
* > '0x0a5c'
*/
function usbIdToString(id: number): string {
return `0x${padStart(id.toString(USB_ID_RADIX), USB_ID_LENGTH, '0')}`;
}
function updateDriveProgress(
drive: sdk.sourceDestination.UsbbootDrive,
progress: number,
) {
const drives = getDrives();
// @ts-ignore
const driveInMap = drives[drive.device];
if (driveInMap) {
// @ts-ignore
drives[drive.device] = { ...driveInMap, progress };
setDrives(drives);
}
}
/**
* @summary Product ID of BCM2708
*/
const USB_PRODUCT_ID_BCM2708_BOOT = 0x2763;
/**
* @summary Product ID of BCM2710
*/
const USB_PRODUCT_ID_BCM2710_BOOT = 0x2764;
/**
* @summary Compute module descriptions
*/
const COMPUTE_MODULE_DESCRIPTIONS: Dictionary<string> = {
[USB_PRODUCT_ID_BCM2708_BOOT]: 'Compute Module 1',
[USB_PRODUCT_ID_BCM2710_BOOT]: 'Compute Module 3',
};
const startScanning = () => {
driveScanner.on('attach', (drive) => addDrive(drive));
driveScanner.on('detach', (drive) => removeDrive(drive));
driveScanner.start();
};
const stopScanning = () => {
driveScanner.stop();
};
export { startScanning, stopScanning };

View File

@@ -1,93 +0,0 @@
/** Get metadata for a source */
import { sourceDestination } from 'etcher-sdk';
import { replaceWindowsNetworkDriveLetter } from '../gui/app/os/windows-network-drives';
import axios, { AxiosRequestConfig } from 'axios';
import { isJson } from '../shared/utils';
import * as path from 'path';
import {
SourceMetadata,
Authentication,
Source,
} from '../shared/typings/source-selector';
import { DrivelistDrive } from '../shared/drive-constraints';
import { omit } from 'lodash';
function isString(value: any): value is string {
return typeof value === 'string';
}
async function createSource(
selected: string,
SourceType: Source,
auth?: Authentication,
) {
try {
selected = await replaceWindowsNetworkDriveLetter(selected);
} catch (error: any) {
// TODO: analytics.logException(error);
}
if (isJson(decodeURIComponent(selected))) {
const config: AxiosRequestConfig = JSON.parse(decodeURIComponent(selected));
return new sourceDestination.Http({
url: config.url!,
axiosInstance: axios.create(omit(config, ['url'])),
});
}
if (SourceType === 'File') {
return new sourceDestination.File({
path: selected,
});
}
return new sourceDestination.Http({ url: selected, auth });
}
async function getMetadata(
source: sourceDestination.SourceDestination,
selected: string | DrivelistDrive,
) {
const metadata = (await source.getMetadata()) as SourceMetadata;
const partitionTable = await source.getPartitionTable();
if (partitionTable) {
metadata.hasMBR = true;
metadata.partitions = partitionTable.partitions;
} else {
metadata.hasMBR = false;
}
if (isString(selected)) {
metadata.extension = path.extname(selected).slice(1);
metadata.path = selected;
}
return metadata;
}
async function getSourceMetadata(
selected: string | DrivelistDrive,
SourceType: Source,
auth?: Authentication,
) {
if (isString(selected)) {
const source = await createSource(selected, SourceType, auth);
try {
const innerSource = await source.getInnerSource();
const metadata = await getMetadata(innerSource, selected);
return metadata;
} catch (error: any) {
// TODO: handle error
} finally {
try {
await source.close();
} catch (error: any) {
// Noop
}
}
}
}
export { getSourceMetadata };

View File

@@ -1,33 +0,0 @@
import { Metadata } from 'etcher-sdk/build/source-destination';
import { SourceMetadata } from '../../shared/typings/source-selector';
import { Drive as DrivelistDrive } from 'drivelist';
export interface WriteResult {
bytesWritten?: number;
devices?: {
failed: number;
successful: number;
};
errors: FlashError[];
sourceMetadata?: Metadata;
}
export interface FlashError extends Error {
description: string;
device: string;
code: string;
}
export interface FlashResults extends WriteResult {
skip?: boolean;
cancelled?: boolean;
}
interface WriteOptions {
image: SourceMetadata;
destinations: DrivelistDrive[];
autoBlockmapping: boolean;
decompressFirst: boolean;
SourceType: string;
httpRequest?: any;
}

32837
npm-shrinkwrap.json generated

File diff suppressed because it is too large Load Diff

17760
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
"name": "balena-etcher",
"private": true,
"displayName": "balenaEtcher",
"version": "1.18.13",
"version": "1.13.4",
"packageType": "local",
"main": "generated/etcher.js",
"description": "Flash OS images to SD cards and USB drives, safely and easily.",
@@ -13,12 +13,10 @@
"url": "git@github.com:balena-io/etcher.git"
},
"scripts": {
"build": "npm run webpack && npm run build:sidecar",
"build:rebuild-mountutils": "cd node_modules/mountutils && npm rebuild",
"build:sidecar": "npm run build:rebuild-mountutils && tsc --project tsconfig.sidecar.json && pkg build/util/api.js -c pkg-sidecar.json --target node18 --output generated/etcher-util",
"flowzone-preinstall-linux": "sudo apt-get update && sudo apt-get install -y xvfb libudev-dev && cat < electron-builder.yml | yq e .deb.depends[] - | xargs -L1 echo | sed 's/|//g' | xargs -L1 sudo apt-get --ignore-missing install || true",
"build": "npm run webpack",
"flowzone-preinstall-linux": "sudo apt-get install -y xvfb libudev-dev && cat < electron-builder.yml | yq e .deb.depends[] - | xargs -L1 echo | sed 's/|//g' | xargs -L1 sudo apt-get --ignore-missing install || true",
"flowzone-preinstall-macos": "true",
"flowzone-preinstall-windows": "npx node-gyp install",
"flowzone-preinstall-windows": "true",
"flowzone-preinstall": "npm run flowzone-preinstall-linux",
"lint-css": "prettier --write lib/**/*.css",
"lint-ts": "balena-lint --fix --typescript typings lib tests scripts/clean-shrinkwrap.ts webpack.config.ts",
@@ -26,12 +24,14 @@
"postinstall": "electron-rebuild -t prod,dev,optional",
"sanity-checks": "bash scripts/ci/ensure-all-file-extensions-in-gitattributes.sh",
"start": "./node_modules/.bin/electron .",
"test-gui": "electron-mocha --recursive --reporter spec --window-config tests/gui/window-config.json --require ts-node/register/transpile-only --require-main tests/gui/allow-renderer-process-reuse.ts --full-trace --no-sandbox --renderer tests/gui/**/*.ts",
"test-macos": "npm run lint && npm run test-gui && npm run test-shared && npm run test-spectron && npm run sanity-checks",
"test-gui": "electron-mocha --recursive --reporter spec --require ts-node/register/transpile-only --require-main tests/gui/allow-renderer-process-reuse.ts --full-trace --no-sandbox --renderer tests/gui/**/*.ts",
"test-linux": "npm run lint && xvfb-run --auto-servernum npm run test-gui && xvfb-run --auto-servernum npm run test-shared && xvfb-run --auto-servernum npm run test-spectron && npm run sanity-checks",
"test-shared": "electron-mocha --recursive --reporter spec --require ts-node/register/transpile-only --require-main tests/gui/allow-renderer-process-reuse.ts --full-trace --no-sandbox tests/shared/**/*.ts",
"test-macos": "npm run lint && npm run test-gui && npm run test-shared && npm run sanity-checks",
"test-linux": "npm run lint && xvfb-run --auto-servernum npm run test-gui && xvfb-run --auto-servernum npm run test-shared && npm run sanity-checks",
"test-windows": "npm run lint && npm run test-gui && npm run test-shared && npm run sanity-checks",
"test-spectron": "mocha --recursive --reporter spec --require ts-node/register/transpile-only --require-main tests/gui/allow-renderer-process-reuse.ts tests/spectron/runner.spec.ts",
"test-windows": "npm run lint && npm run test-gui && npm run test-shared && npm run test-spectron && npm run sanity-checks",
"test": "echo npm run test-{linux,windows,macos}",
"uploadSourcemap": "sentry-cli releases files $npm_config_SENTRY_VERSION upload-sourcemaps ./generated/*.js.map --org $npm_config_SENTRY_ORG --project $npm_config_SENTRY_PROJECT",
"watch": "webpack serve --no-optimization-minimize --config ./webpack.dev.config.ts",
"webpack": "webpack"
},
@@ -48,22 +48,20 @@
"npm run lint-css"
]
},
"author": "Balena Ltd. <hello@balena.io>",
"author": "Balena Inc. <hello@etcher.io>",
"license": "Apache-2.0",
"devDependencies": {
"@babel/register": "^7.22.15",
"@balena/lint": "5.4.2",
"@balena/sudo-prompt": "9.2.1-workaround-windows-amperstand-in-username-0849e215b947987a643fe5763902aea201255534",
"@electron/remote": "^2.0.9",
"@fortawesome/fontawesome-free": "5.15.4",
"@sentry/electron": "^4.1.2",
"@sentry/cli": "^2.11.0",
"@svgr/webpack": "5.5.0",
"@types/chai": "4.3.4",
"@types/copy-webpack-plugin": "6.4.3",
"@types/mime-types": "2.1.1",
"@types/mini-css-extract-plugin": "1.4.3",
"@types/mocha": "^9.1.1",
"@types/node": "^16.18.12",
"@types/mocha": "8.2.3",
"@types/node": "14.18.34",
"@types/node-ipc": "9.2.0",
"@types/react": "16.14.34",
"@types/react-dom": "16.9.17",
@@ -72,21 +70,20 @@
"@types/terser-webpack-plugin": "5.0.4",
"@types/tmp": "0.2.3",
"@types/webpack-node-externals": "2.5.3",
"analytics-client": "^2.0.1",
"axios": "^0.27.2",
"aws4-axios": "2.4.9",
"chai": "4.3.7",
"copy-webpack-plugin": "7.0.0",
"css-loader": "5.2.7",
"d3": "4.13.0",
"debug": "4.3.4",
"electron": "^25.8.2",
"electron-builder": "^23.6.0",
"electron-mocha": "^11.0.2",
"electron": "^13.5.0",
"electron-builder": "^23.0.9",
"electron-mocha": "9.3.3",
"electron-notarize": "1.2.2",
"electron-rebuild": "^3.2.9",
"electron-rebuild": "3.2.9",
"electron-updater": "5.3.0",
"esbuild-loader": "2.20.0",
"etcher-sdk": "8.3.1",
"etcher-sdk": "^7.4.7",
"file-loader": "6.2.0",
"husky": "4.3.8",
"i18next": "21.10.0",
@@ -94,13 +91,12 @@
"lint-staged": "10.5.4",
"lodash": "4.17.21",
"mini-css-extract-plugin": "1.6.2",
"mocha": "^9.1.1",
"mocha": "8.4.0",
"native-addon-loader": "2.0.1",
"node-ipc": "9.2.1",
"omit-deep-lodash": "1.1.7",
"outdent": "0.8.0",
"path-is-inside": "1.0.2",
"pkg": "^5.8.1",
"pnp-webpack-plugin": "1.7.0",
"pretty-bytes": "5.6.0",
"react": "16.8.5",
@@ -108,9 +104,11 @@
"react-i18next": "11.18.6",
"redux": "4.2.0",
"rendition": "19.3.2",
"resin-corvus": "2.0.5",
"semver": "7.3.8",
"simple-progress-webpack-plugin": "1.1.2",
"sinon": "9.2.4",
"spectron": "15.0.0",
"string-replace-loader": "3.1.0",
"style-loader": "2.0.0",
"styled-components": "5.3.6",
@@ -127,9 +125,9 @@
"webpack-dev-server": "4.11.1"
},
"engines": {
"node": ">=18 <20"
"node": ">=14 < 16"
},
"versionist": {
"publishedAt": "2023-10-16T13:32:27.552Z"
"publishedAt": "2023-01-12T15:10:50.986Z"
}
}

View File

@@ -1,10 +0,0 @@
{
"assets": [
"node_modules/usb/**",
"node_modules/lzma-native/**",
"node_modules/drivelist/**",
"node_modules/mountutils/**",
"node_modules/winusb-driver-generator/**",
"node_modules/node-raspberrypi-usbboot/**"
]
}

View File

@@ -1,182 +0,0 @@
/*
* This is a test wrapper for etcher-utils.
* The only use for this file is debugging while developing etcher-utils.
* It will create a IPC server, spawn the cli version of etcher-writer, and wait for it to connect.
* Requires elevated privileges to work (launch with sudo)
* Note that you'll need to to edit `ipc.server.on('ready', ...` function based on what you want to test.
*/
import * as ipc from 'node-ipc';
import * as os from 'os';
import * as path from 'path';
import * as packageJSON from './package.json';
import * as permissions from './lib/shared/permissions';
// if (process.argv.length !== 3) {
// console.error('Expects an image to flash as only arg!');
// process.exit(1);
// }
const THREADS_PER_CPU = 16;
// There might be multiple Etcher instances running at
// the same time, therefore we must ensure each IPC
// server/client has a different name.
const IPC_SERVER_ID = `etcher-server-${process.pid}`;
const IPC_CLIENT_ID = `etcher-client-${process.pid}`;
ipc.config.id = IPC_SERVER_ID;
ipc.config.socketRoot = path.join(
process.env.XDG_RUNTIME_DIR || os.tmpdir(),
path.sep,
);
// NOTE: Ensure this isn't disabled, as it will cause
// the stdout maxBuffer size to be exceeded when flashing
ipc.config.silent = true;
function writerArgv(): string[] {
const entryPoint = path.join('./generated/etcher-util');
return [entryPoint];
}
function writerEnv() {
return {
IPC_SERVER_ID,
IPC_CLIENT_ID,
IPC_SOCKET_ROOT: ipc.config.socketRoot,
UV_THREADPOOL_SIZE: (os.cpus().length * THREADS_PER_CPU).toString(),
// This environment variable prevents the AppImages
// desktop integration script from presenting the
// "installation" dialog
SKIP: '1',
...(process.platform === 'win32' ? {} : process.env),
};
}
async function start(): Promise<any> {
ipc.serve();
return await new Promise((resolve, reject) => {
ipc.server.on('error', (message) => {
console.log('IPC server error', message);
});
ipc.server.on('log', (message) => {
console.log('log', message);
});
ipc.server.on('fail', ({ device, error }) => {
console.log('failure', error, device);
});
ipc.server.on('done', (event) => {
console.log('done', event);
});
ipc.server.on('abort', () => {
console.log('abort');
});
ipc.server.on('skip', () => {
console.log('skip');
});
ipc.server.on('state', (progress) => {
console.log('progress', progress);
});
ipc.server.on('drives', (drives) => {
console.log('drives', drives);
});
ipc.server.on('ready', (_data, socket) => {
console.log('ready');
ipc.server.emit(socket, 'scan', {});
// ipc.server.emit(socket, "hello", { message: "world" });
// ipc.server.emit(socket, "write", {
// image: {
// path: process.argv[2],
// displayName: "Random image for test",
// description: "Random image for test",
// SourceType: "File",
// },
// destinations: [
// {
// size: 15938355200,
// isVirtual: false,
// enumerator: "DiskArbitration",
// logicalBlockSize: 512,
// raw: "/dev/rdisk4",
// error: null,
// isReadOnly: false,
// displayName: "/dev/disk4",
// blockSize: 512,
// isSCSI: false,
// isRemovable: true,
// device: "/dev/disk4",
// busVersion: null,
// isSystem: false,
// busType: "USB",
// isCard: false,
// isUSB: true,
// devicePath:
// "IODeviceTree:/arm-io@10F00000/usb-drd1@2280000/usb-drd1-port-hs@01100000",
// mountpoints: [
// {
// path: "/Volumes/flash-rootB",
// label: "flash-rootB",
// },
// {
// path: "/Volumes/flash-rootA",
// label: "flash-rootA",
// },
// {
// path: "/Volumes/flash-boot",
// label: "flash-boot",
// },
// ],
// description: "Generic Flash Disk Media",
// isUAS: null,
// partitionTableType: "mbr",
// },
// ],
// SourceType: "File",
// autoBlockmapping: true,
// decompressFirst: true,
// });
});
const argv = writerArgv();
ipc.server.on('start', async () => {
console.log(`Elevating command: ${argv.join(' ')}`);
const env = writerEnv();
try {
await permissions.elevateCommand(argv, {
applicationName: packageJSON.displayName,
environment: env,
});
} catch (error: any) {
console.log('error', error);
// This happens when the child is killed using SIGKILL
const SIGKILL_EXIT_CODE = 137;
if (error.code === SIGKILL_EXIT_CODE) {
error.code = 'ECHILDDIED';
}
reject(error);
} finally {
console.log('Terminating IPC server');
}
resolve(true);
});
// Clear the update lock timer to prevent longer
// flashing timing it out, and releasing the lock
ipc.server.start();
});
}
start();

View File

@@ -1,13 +1,5 @@
// tslint:disable-next-line:no-var-requires
const { app } = require('electron');
if (app !== undefined) {
// tslint:disable-next-line:no-var-requires
const remoteMain = require('@electron/remote/main');
remoteMain.initialize();
app.on('browser-window-created', (_event, window) =>
remoteMain.enable(window.webContents),
);
app.allowRendererProcessReuse = false;
}

View File

@@ -1,5 +0,0 @@
{
"webPreferences": {
"enableRemoteModule": true
}
}

View File

@@ -0,0 +1,66 @@
/*
* Copyright 2017 balena.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { expect } from 'chai';
import { platform } from 'os';
import { Application } from 'spectron';
import * as electronPath from 'electron';
// TODO: spectron fails to start on the CI with:
// Error: Failed to create session.
// unknown error: Chrome failed to start: exited abnormally
if (platform() !== 'darwin') {
describe('Spectron', function () {
// Mainly for CI jobs
this.timeout(40000);
const app = new Application({
path: electronPath as unknown as string,
args: ['--no-sandbox', '.'],
});
before('app:start', async () => {
await app.start();
});
after('app:stop', async () => {
if (app && app.isRunning()) {
await app.stop();
}
});
describe('Browser Window', () => {
it('should open a browser window', async () => {
// We can't use `isVisible()` here as it won't work inside
// a Windows Docker container, but we can approximate it
// with these set of checks:
const bounds = await app.browserWindow.getBounds();
expect(bounds.height).to.be.above(0);
expect(bounds.width).to.be.above(0);
expect(await app.browserWindow.isMinimized()).to.be.false;
expect(
(await app.browserWindow.isVisible()) ||
(await app.browserWindow.isFocused()),
).to.be.true;
});
it('should set a proper title', async () => {
// @ts-ignore (SpectronClient.getTitle exists)
return expect(await app.client.getTitle()).to.equal('balenaEtcher');
});
});
});
}

View File

@@ -1,18 +0,0 @@
{
"compilerOptions": {
"target": "ES2019",
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"typeRoots": ["./node_modules/@types", "./typings"],
"module": "CommonJS",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"outDir": "build"
},
"include": ["lib/util"]
}

View File

@@ -15,21 +15,21 @@
*/
import * as CopyPlugin from 'copy-webpack-plugin';
import { readdirSync, existsSync } from 'fs';
import * as _ from 'lodash';
import * as os from 'os';
import outdent from 'outdent';
import * as path from 'path';
import { env } from 'process';
import * as SimpleProgressWebpackPlugin from 'simple-progress-webpack-plugin';
import * as TerserPlugin from 'terser-webpack-plugin';
import {
BannerPlugin,
IgnorePlugin,
NormalModuleReplacementPlugin,
} from 'webpack';
import { BannerPlugin, NormalModuleReplacementPlugin } from 'webpack';
import * as PnpWebpackPlugin from 'pnp-webpack-plugin';
import * as tsconfigRaw from './tsconfig.webpack.json';
/**
* Don't webpack package.json as sentry tokens
* Don't webpack package.json as mixpanel & sentry tokens
* will be inserted in it after webpacking
*/
function externalPackageJson(packageJsonPath: string) {
@@ -44,6 +44,24 @@ function externalPackageJson(packageJsonPath: string) {
};
}
function platformSpecificModule(
platform: string,
module: string,
replacement = '{}',
) {
// Resolves module on platform, otherwise resolves the replacement
return (
{ request }: { context: string; request: string },
callback: (error?: Error, result?: string, type?: string) => void,
) => {
if (request === module && os.platform() !== platform) {
callback(undefined, replacement);
return;
}
callback();
};
}
function renameNodeModules(resourcePath: string) {
// electron-builder excludes the node_modules folder even if you specifically include it
// Work around by renaming it to "modules"
@@ -52,11 +70,98 @@ function renameNodeModules(resourcePath: string) {
path
.relative(__dirname, resourcePath)
.replace('node_modules', 'modules')
// use the same name on all architectures so electron-builder can build a universal dmg on mac
.replace(LZMA_BINDINGS_FOLDER, LZMA_BINDINGS_FOLDER_RENAMED)
// file-loader expects posix paths, even on Windows
.replace(/\\/g, '/')
);
}
function findExt2fsFolder(): string {
const ext2fs = 'node_modules/ext2fs';
const biFsExt2fs = 'node_modules/balena-image-fs/node_modules/ext2fs';
if (existsSync(ext2fs)) {
return ext2fs;
} else if (existsSync(biFsExt2fs)) {
return biFsExt2fs;
} else {
throw Error('ext2fs not found');
}
}
function makeExt2FsRegex(): RegExp {
const folder = findExt2fsFolder();
const libpath = '/lib/libext2fs\\.js$';
return new RegExp(folder.concat(libpath));
}
function findUsbPrebuild(): string[] {
const usbPrebuildsFolder = path.join('node_modules', 'usb', 'prebuilds');
const prebuildFolders = readdirSync(usbPrebuildsFolder);
let bindingFile: string | undefined = 'node.napi.node';
const platformFolder = prebuildFolders.find(
(f) => f.startsWith(os.platform()) && f.indexOf(os.arch()) > -1,
);
if (platformFolder === undefined) {
throw new Error(
'Could not find usb prebuild. Should try fallback to node-gyp and use /build/Release instead of /prebuilds',
);
}
const bindingFiles = readdirSync(
path.join(usbPrebuildsFolder, platformFolder),
);
if (!bindingFiles.length) {
throw new Error('Could not find usb prebuild for platform');
}
if (bindingFiles.length === 1) {
bindingFile = bindingFiles[0];
}
// armv6 vs v7 in linux-arm and
// glibc vs musl in linux-x64
if (bindingFiles.length > 1) {
bindingFile = bindingFiles.find((file) => {
if (bindingFiles.indexOf('arm') > -1) {
const process = require('process');
return file.indexOf(process.config.variables.arm_version) > -1;
} else {
return file.indexOf('glibc') > -1;
}
});
}
if (bindingFile === undefined) {
throw new Error('Could not find usb prebuild for platform');
}
return [platformFolder, bindingFile];
}
const [USB_BINDINGS_FOLDER, USB_BINDINGS_FILE] = findUsbPrebuild();
function findLzmaNativeBindingsFolder(): string {
const files = readdirSync(
path.join('node_modules', 'lzma-native', 'prebuilds'),
);
const bindingsFolder = files.find(
(f) =>
f.startsWith(os.platform()) &&
f.endsWith(env.npm_config_target_arch || os.arch()),
);
if (bindingsFolder === undefined) {
throw new Error('Could not find lzma_native binding');
}
return bindingsFolder;
}
const LZMA_BINDINGS_FOLDER = findLzmaNativeBindingsFolder();
const LZMA_BINDINGS_FOLDER_RENAMED = 'binding';
interface ReplacementRule {
search: string;
replace: string | (() => string);
@@ -75,6 +180,31 @@ function replace(test: RegExp, ...replacements: ReplacementRule[]) {
};
}
function fetchWasm(...where: string[]) {
const whereStr = where.map((x) => `'${x}'`).join(', ');
return outdent`
const Path = require('path');
let electron;
try {
// This doesn't exist in the child-writer
electron = require('electron');
} catch {
}
function appPath() {
return Path.isAbsolute(__dirname) ?
__dirname :
Path.join(
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
electron.remote.app.getAppPath().replace(/\\.asar$/, () => process.platform === 'darwin' ? '-' + process.arch : ''),
'generated'
);
}
scriptDirectory = Path.join(appPath(), 'modules', ${whereStr}, '/');
`;
}
const commonConfig = {
mode: 'production',
optimization: {
@@ -133,10 +263,108 @@ const commonConfig = {
search: './adapters/xhr',
replace: './adapters/http',
}),
// remove bindings magic from drivelist
replace(
/node_modules\/drivelist\/js\/index\.js$/,
{
search: 'require("bindings");',
replace: "require('../build/Release/drivelist.node')",
},
{
search: "bindings('drivelist')",
replace: 'bindings',
},
),
replace(
/node_modules\/lzma-native\/index\.js$/,
// remove node-pre-gyp magic from lzma-native
{
search: `require('node-gyp-build')(__dirname);`,
replace: `require('./prebuilds/${LZMA_BINDINGS_FOLDER}/electron.napi.node')`,
},
// use regular stream module instead of readable-stream
{
search: "var stream = require('readable-stream');",
replace: "var stream = require('stream');",
},
),
// remove node-pre-gyp magic from usb
replace(/node_modules\/usb\/dist\/usb\/bindings\.js$/, {
search: `require('node-gyp-build')(path_1.join(__dirname, '..', '..'));`,
replace: `require('../../prebuilds/${USB_BINDINGS_FOLDER}/${USB_BINDINGS_FILE}')`,
}),
// remove bindings magic from mountutils
replace(/node_modules\/mountutils\/index\.js$/, {
search: outdent`
require('bindings')({
bindings: 'MountUtils',
/* eslint-disable camelcase */
module_root: __dirname
/* eslint-enable camelcase */
})
`,
replace: "require('./build/Release/MountUtils.node')",
}),
// remove bindings magic from winusb-driver-generator
replace(/node_modules\/winusb-driver-generator\/index\.js$/, {
search: outdent`
require('bindings')({
bindings: 'Generator',
/* eslint-disable camelcase */
module_root: __dirname
/* eslint-enable camelcase */
});
`,
replace: "require('./build/Release/Generator.node')",
}),
// Use the copy of blobs in the generated folder and rename node_modules -> modules
// See the renameNodeModules function above
replace(/node_modules\/node-raspberrypi-usbboot\/build\/index\.js$/, {
search:
"return await readFile(Path.join(__dirname, '..', 'blobs', filename));",
replace: outdent`
const { app, remote } = require('electron');
return await readFile(
Path.join(
// With macOS universal builds, getAppPath() returns the path to an app.asar file containing an index.js file which will
// include the app-x64 or app-arm64 folder depending on the arch.
// We don't care about the app.asar file, we want the actual folder.
(app || remote.app).getAppPath().replace(/\\.asar$/, () => process.platform === 'darwin' ? '-' + process.arch : ''),
'generated',
__dirname.replace('node_modules', 'modules'),
'..',
'blobs',
filename
)
);
`,
}),
// Use the libext2fs.wasm file in the generated folder
// The way to find the app directory depends on whether we run in the renderer or in the child-writer
// We use __dirname in the child-writer and electron.remote.app.getAppPath() in the renderer
replace(makeExt2FsRegex(), {
search: 'scriptDirectory = __dirname + "/";',
replace: fetchWasm('ext2fs', 'lib'),
}),
// Same for node-crc-utils
replace(/node_modules\/@balena\/node-crc-utils\/crc32\.js$/, {
search: 'scriptDirectory=__dirname+"/"',
replace: fetchWasm('@balena', 'node-crc-utils'),
}),
// Copy native modules to generated folder
{
test: /\.node$/,
use: [
{
loader: 'native-addon-loader',
options: { name: renameNodeModules },
},
],
},
],
},
resolve: {
extensions: ['.js', '.json', '.ts', '.tsx'],
extensions: ['.node', '.js', '.json', '.ts', '.tsx'],
},
plugins: [
PnpWebpackPlugin,
@@ -149,14 +377,6 @@ const commonConfig = {
slashOrAntislash(/node_modules\/axios\/lib\/adapters\/xhr\.js/),
'./http.js',
),
// Ignore `aws-crt` which is a dependency of (ultimately) `aws4-axios` which is used
// by etcher-sdk and does a runtime check to its availability. Were not currently
// using the “assume role” functionality (AFAIU) of aws4-axios and we dont care that
// its not found, so force webpack to ignore the import.
// See https://github.com/aws/aws-sdk-js-v3/issues/3025
new IgnorePlugin({
resourceRegExp: /^aws-crt$/,
}),
],
resolveLoader: {
plugins: [PnpWebpackPlugin.moduleLoader(module)],
@@ -168,9 +388,40 @@ const commonConfig = {
externals: [
// '../package.json' because we are in 'generated'
externalPackageJson('../package.json'),
// Only exists on windows
platformSpecificModule('win32', 'winusb-driver-generator'),
// Not needed but required by resin-corvus > os-locale > execa > cross-spawn
platformSpecificModule('none', 'spawn-sync'),
// Not needed as we replace all requires for it
platformSpecificModule('none', 'node-pre-gyp', '{ find: () => {} }'),
// Not needed as we replace all requires for it
platformSpecificModule('none', 'bindings'),
],
};
const guiConfigCopyPatterns = [
{
from: 'node_modules/node-raspberrypi-usbboot/blobs',
to: 'modules/node-raspberrypi-usbboot/blobs',
},
{
from: `${findExt2fsFolder()}/lib/libext2fs.wasm`,
to: 'modules/ext2fs/lib/libext2fs.wasm',
},
{
from: 'node_modules/@balena/node-crc-utils/crc32.wasm',
to: 'modules/@balena/node-crc-utils/crc32.wasm',
},
];
if (os.platform() === 'win32') {
// liblzma.dll is required on Windows for lzma-native
guiConfigCopyPatterns.push({
from: `node_modules/lzma-native/prebuilds/${LZMA_BINDINGS_FOLDER}/liblzma.dll`,
to: `modules/lzma-native/prebuilds/${LZMA_BINDINGS_FOLDER_RENAMED}/liblzma.dll`,
});
}
const guiConfig = {
...commonConfig,
target: 'electron-renderer',
@@ -181,6 +432,7 @@ const guiConfig = {
entry: {
gui: path.join(__dirname, 'lib', 'gui', 'app', 'renderer.ts'),
},
// entry: path.join(__dirname, 'lib', 'gui', 'app', 'renderer.ts'),
plugins: [
...commonConfig.plugins,
new CopyPlugin({
@@ -196,12 +448,14 @@ const guiConfig = {
banner: '__REACT_DEVTOOLS_GLOBAL_HOOK__ = { isDisabled: true };',
raw: true,
}),
new CopyPlugin({ patterns: guiConfigCopyPatterns }),
],
};
const mainConfig = {
...commonConfig,
target: 'electron-main',
devtool: 'source-map',
node: {
__dirname: false,
__filename: true,
@@ -215,4 +469,17 @@ const etcherConfig = {
},
};
export default [guiConfig, etcherConfig];
const childWriterConfig = {
...mainConfig,
entry: {
'child-writer': path.join(
__dirname,
'lib',
'gui',
'modules',
'child-writer.ts',
),
},
};
export default [guiConfig, etcherConfig, childWriterConfig];