mirror of
https://github.com/balena-io/etcher.git
synced 2025-07-15 15:26:31 +00:00
Remove lib/sdk and its tests
This commit is contained in:
parent
a42e81cf8c
commit
e68dbcf4ee
11
Makefile
11
Makefile
@ -164,7 +164,6 @@ TARGETS = \
|
||||
lint-spell \
|
||||
test-spectron \
|
||||
test-gui \
|
||||
test-sdk \
|
||||
test-cli \
|
||||
test \
|
||||
sanity-checks \
|
||||
@ -221,17 +220,11 @@ test-spectron:
|
||||
test-gui:
|
||||
electron-mocha $(MOCHA_OPTIONS) --renderer tests/gui
|
||||
|
||||
test-sdk:
|
||||
electron-mocha $(MOCHA_OPTIONS) \
|
||||
tests/shared \
|
||||
tests/image-stream
|
||||
|
||||
test-cli:
|
||||
mocha $(MOCHA_OPTIONS) \
|
||||
tests/shared \
|
||||
tests/image-stream
|
||||
tests/shared
|
||||
|
||||
test: test-gui test-sdk test-spectron
|
||||
test: test-gui test-spectron
|
||||
|
||||
help:
|
||||
@echo "Available targets: $(TARGETS)"
|
||||
|
@ -40,24 +40,6 @@ to submit their work or bug reports.
|
||||
|
||||
These are the main Etcher components, in a nutshell:
|
||||
|
||||
- [Etcher Image Write][etcher-image-write]
|
||||
|
||||
This is the repository that implements the actual procedures to write an image
|
||||
to a raw device and the place where image validation resides. Its main purpose
|
||||
is to abstract the messy details of interacting with raw devices in all major
|
||||
operating systems.
|
||||
|
||||
- [Etcher Image Stream](../lib/image-stream)
|
||||
|
||||
> (Moved from a separate repository into the main Etcher codebase)
|
||||
|
||||
This module converts any kind of input into a readable stream
|
||||
representing the image so it can be plugged to [etcher-image-write]. Inputs
|
||||
that this module might handle could be, for example: a simple image file, a URL
|
||||
to an image, a compressed image, an image inside a ZIP archive, etc. Together
|
||||
with [etcher-image-write], these modules are the building blocks needed to take
|
||||
an image representation to the user's device, the "Etcher's backend".
|
||||
|
||||
- [Drivelist](https://github.com/balena-io-modules/drivelist)
|
||||
|
||||
As the name implies, this module's duty is to detect the connected drives
|
||||
@ -106,7 +88,6 @@ since fresh eyes could help unveil things that we take for granted, but should
|
||||
be documented instead!
|
||||
|
||||
[lego-blocks]: https://github.com/sindresorhus/ama/issues/10#issuecomment-117766328
|
||||
[etcher-image-write]: https://github.com/balena-io-modules/etcher-image-write
|
||||
[exit-codes]: https://github.com/balena-io/etcher/blob/master/lib/shared/exit-codes.js
|
||||
[cli-dir]: https://github.com/balena-io/etcher/tree/master/lib/cli
|
||||
[gui-dir]: https://github.com/balena-io/etcher/tree/master/lib/gui
|
||||
|
@ -1,120 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const Bluebird = require('bluebird')
|
||||
const EventEmitter = require('events')
|
||||
const drivelist = Bluebird.promisifyAll(require('drivelist'))
|
||||
|
||||
const USBBOOT_RPI_COMPUTE_MODULE_NAMES = [
|
||||
'0001',
|
||||
'RPi-MSD- 0001',
|
||||
'File-Stor Gadget',
|
||||
'Linux File-Stor Gadget USB Device',
|
||||
'Linux File-Stor Gadget Media'
|
||||
]
|
||||
|
||||
/**
|
||||
* @summary BlockDeviceAdapter
|
||||
* @class
|
||||
*/
|
||||
class BlockDeviceAdapter extends EventEmitter {
|
||||
/**
|
||||
* @summary BlockDeviceAdapter constructor
|
||||
* @class
|
||||
* @example
|
||||
* const adapter = new BlockDeviceAdapter()
|
||||
*/
|
||||
constructor () {
|
||||
super()
|
||||
|
||||
/** @type {String} Adapter name */
|
||||
this.id = this.constructor.id
|
||||
|
||||
this.devices = []
|
||||
this.on('devices', (devices) => {
|
||||
this.devices = devices
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Scan for block devices
|
||||
* @public
|
||||
*
|
||||
* @param {Object} [options] - options
|
||||
* @param {Object} [options.includeSystemDrives=false] - include system drives
|
||||
* @param {Function} [callback] - optional callback
|
||||
* @returns {BlockDeviceAdapter}
|
||||
*
|
||||
* @example
|
||||
* adapter.scan({
|
||||
* includeSystemDrives: true
|
||||
* }, (error, devices) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
scan (options = {}, callback) {
|
||||
// eslint-disable-next-line lodash/prefer-lodash-method
|
||||
drivelist.listAsync().map((drive) => {
|
||||
drive.adapter = this.id
|
||||
|
||||
// TODO: Find a better way to detect that a certain
|
||||
// block device is a compute module initialized
|
||||
// through usbboot.
|
||||
if (_.includes(USBBOOT_RPI_COMPUTE_MODULE_NAMES, drive.description)) {
|
||||
drive.description = 'Compute Module'
|
||||
drive.icon = 'raspberrypi'
|
||||
drive.isSystem = false
|
||||
}
|
||||
|
||||
return drive
|
||||
}).catch((error) => {
|
||||
this.emit('error', error)
|
||||
callback && callback(error)
|
||||
}).filter((drive) => {
|
||||
// Always ignore RAID attached devices, as they are in danger-country;
|
||||
// Even flashing RAIDs intentionally can have unintended effects
|
||||
if (drive.busType === 'RAID') {
|
||||
return false
|
||||
}
|
||||
return !drive.error && Number.isFinite(drive.size) && (options.includeSystemDrives || !drive.isSystem)
|
||||
}).map((drive) => {
|
||||
drive.displayName = drive.device
|
||||
if (/PhysicalDrive/i.test(drive.device) && drive.mountpoints.length) {
|
||||
drive.displayName = _.map(drive.mountpoints, 'path').join(', ')
|
||||
}
|
||||
return drive
|
||||
}).then((drives) => {
|
||||
this.emit('devices', drives)
|
||||
callback && callback(null, drives)
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary The name of this adapter
|
||||
* @public
|
||||
* @type {String}
|
||||
* @constant
|
||||
*/
|
||||
BlockDeviceAdapter.id = 'blockdevice'
|
||||
|
||||
// Exports
|
||||
module.exports = BlockDeviceAdapter
|
@ -1,46 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const os = require('os')
|
||||
|
||||
const permissions = require('../../shared/permissions')
|
||||
|
||||
/**
|
||||
* @summary The list of loaded adapters
|
||||
* @type {Object[]}
|
||||
* @constant
|
||||
*/
|
||||
const ADAPTERS = [
|
||||
require('./blockdevice')
|
||||
]
|
||||
|
||||
// On GNU/Linux, we only support usbboot when running as root.
|
||||
if ((os.platform() !== 'linux') || permissions.isElevatedUnixSync()) {
|
||||
ADAPTERS.push(require('./usbboot'))
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Initialised adapters
|
||||
* @type {Object<String,Adapter>}
|
||||
* @constant
|
||||
*/
|
||||
module.exports = _.reduce(ADAPTERS, (adapters, Adapter) => {
|
||||
adapters[Adapter.id] = new Adapter()
|
||||
return adapters
|
||||
}, {})
|
@ -1,339 +0,0 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
License is intended to guarantee your freedom to share and change free
|
||||
software--to make sure the software is free for all its users. This
|
||||
General Public License applies to most of the Free Software
|
||||
Foundation's software and to any other program whose authors commit to
|
||||
using it. (Some other Free Software Foundation software is covered by
|
||||
the GNU Lesser General Public License instead.) You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if you
|
||||
distribute copies of the software, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must give the recipients all the rights that
|
||||
you have. You must make sure that they, too, receive or can get the
|
||||
source code. And you must show them these terms so they know their
|
||||
rights.
|
||||
|
||||
We protect your rights with two steps: (1) copyright the software, and
|
||||
(2) offer you this license which gives you legal permission to copy,
|
||||
distribute and/or modify the software.
|
||||
|
||||
Also, for each author's protection and ours, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
software. If the software is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original, so
|
||||
that any problems introduced by others will not reflect on the original
|
||||
authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that redistributors of a free
|
||||
program will individually obtain patent licenses, in effect making the
|
||||
program proprietary. To prevent this, we have made it clear that any
|
||||
patent must be licensed for everyone's free use or not licensed at all.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License applies to any program or other work which contains
|
||||
a notice placed by the copyright holder saying it may be distributed
|
||||
under the terms of this General Public License. The "Program", below,
|
||||
refers to any such program or work, and a "work based on the Program"
|
||||
means either the Program or any derivative work under copyright law:
|
||||
that is to say, a work containing the Program or a portion of it,
|
||||
either verbatim or with modifications and/or translated into another
|
||||
language. (Hereinafter, translation is included without limitation in
|
||||
the term "modification".) Each licensee is addressed as "you".
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running the Program is not restricted, and the output from the Program
|
||||
is covered only if its contents constitute a work based on the
|
||||
Program (independent of having been made by running the Program).
|
||||
Whether that is true depends on what the Program does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Program's
|
||||
source code as you receive it, in any medium, provided that you
|
||||
conspicuously and appropriately publish on each copy an appropriate
|
||||
copyright notice and disclaimer of warranty; keep intact all the
|
||||
notices that refer to this License and to the absence of any warranty;
|
||||
and give any other recipients of the Program a copy of this License
|
||||
along with the Program.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy, and
|
||||
you may at your option offer warranty protection in exchange for a fee.
|
||||
|
||||
2. You may modify your copy or copies of the Program or any portion
|
||||
of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any
|
||||
part thereof, to be licensed as a whole at no charge to all third
|
||||
parties under the terms of this License.
|
||||
|
||||
c) If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such
|
||||
interactive use in the most ordinary way, to print or display an
|
||||
announcement including an appropriate copyright notice and a
|
||||
notice that there is no warranty (or else, saying that you provide
|
||||
a warranty) and that users may redistribute the program under
|
||||
these conditions, and telling the user how to view a copy of this
|
||||
License. (Exception: if the Program itself is interactive but
|
||||
does not normally print such an announcement, your work based on
|
||||
the Program is not required to print an announcement.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Program,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Program, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Program.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Program
|
||||
with the Program (or with a work based on the Program) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may copy and distribute the Program (or a work based on it,
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
a) Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections
|
||||
1 and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
b) Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your
|
||||
cost of physically performing source distribution, a complete
|
||||
machine-readable copy of the corresponding source code, to be
|
||||
distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange; or,
|
||||
|
||||
c) Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is
|
||||
allowed only for noncommercial distribution and only if you
|
||||
received the program in object code or executable form with such
|
||||
an offer, in accord with Subsection b above.)
|
||||
|
||||
The source code for a work means the preferred form of the work for
|
||||
making modifications to it. For an executable work, complete source
|
||||
code means all the source code for all modules it contains, plus any
|
||||
associated interface definition files, plus the scripts used to
|
||||
control compilation and installation of the executable. However, as a
|
||||
special exception, the source code distributed need not include
|
||||
anything that is normally distributed (in either source or binary
|
||||
form) with the major components (compiler, kernel, and so on) of the
|
||||
operating system on which the executable runs, unless that component
|
||||
itself accompanies the executable.
|
||||
|
||||
If distribution of executable or object code is made by offering
|
||||
access to copy from a designated place, then offering equivalent
|
||||
access to copy the source code from the same place counts as
|
||||
distribution of the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
4. You may not copy, modify, sublicense, or distribute the Program
|
||||
except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense or distribute the Program is
|
||||
void, and will automatically terminate your rights under this License.
|
||||
However, parties who have received copies, or rights, from you under
|
||||
this License will not have their licenses terminated so long as such
|
||||
parties remain in full compliance.
|
||||
|
||||
5. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Program or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Program (or any work based on the
|
||||
Program), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Program or works based on it.
|
||||
|
||||
6. Each time you redistribute the Program (or any work based on the
|
||||
Program), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute or modify the Program subject to
|
||||
these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
7. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Program at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Program by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Program.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system, which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
8. If the distribution and/or use of the Program is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Program under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
9. The Free Software Foundation may publish revised and/or new versions
|
||||
of the General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies a version number of this License which applies to it and "any
|
||||
later version", you have the option of following the terms and conditions
|
||||
either of that version or of any later version published by the Free
|
||||
Software Foundation. If the Program does not specify a version number of
|
||||
this License, you may choose any version ever published by the Free Software
|
||||
Foundation.
|
||||
|
||||
10. If you wish to incorporate parts of the Program into other free
|
||||
programs whose distribution conditions are different, write to the author
|
||||
to ask for permission. For software which is copyrighted by the Free
|
||||
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||
make exceptions for this. Our decision will be guided by the two goals
|
||||
of preserving the free status of all derivatives of our free software and
|
||||
of promoting the sharing and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||
REPAIR OR CORRECTION.
|
||||
|
||||
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program is interactive, make it output a short notice like this
|
||||
when it starts in an interactive mode:
|
||||
|
||||
Gnomovision version 69, Copyright (C) year name of author
|
||||
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, the commands you use may
|
||||
be called something other than `show w' and `show c'; they could even be
|
||||
mouse-clicks or menu items--whatever suits your program.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1989
|
||||
Ty Coon, President of Vice
|
||||
|
||||
This General Public License does not permit incorporating your program into
|
||||
proprietary programs. If your program is a subroutine library, you may
|
||||
consider it more useful to permit linking proprietary applications with the
|
||||
library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License.
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,4 +0,0 @@
|
||||
gpu_mem=16
|
||||
dtoverlay=dwc2,dr_mode=peripheral
|
||||
dtparam=act_led_trigger=none
|
||||
dtparam=act_led_activelow=off
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,30 +0,0 @@
|
||||
Copyright (c) 2006, Broadcom Corporation.
|
||||
Copyright (c) 2015, Raspberry Pi (Trading) Ltd
|
||||
All rights reserved.
|
||||
|
||||
Redistribution. Redistribution and use in binary form, without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* This software may only be used for the purposes of developing for,
|
||||
running or using a Raspberry Pi device.
|
||||
* Redistributions must reproduce the above copyright notice and the
|
||||
following disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
* Neither the name of Broadcom Corporation nor the names of its suppliers
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
DISCLAIMER. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
|
||||
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
|
||||
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||
OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGE.
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,642 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* This work is heavily based on https://github.com/raspberrypi/usbboot
|
||||
* Copyright 2016 Raspberry Pi Foundation
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const EventEmitter = require('events')
|
||||
const Bluebird = require('bluebird')
|
||||
const debug = require('debug')('etcher:sdk:usbboot')
|
||||
const usb = require('./usb')
|
||||
const protocol = require('./protocol')
|
||||
const utils = require('../../../shared/utils')
|
||||
|
||||
debug.enabled = true
|
||||
|
||||
/**
|
||||
* @summary The radix used by USB ID numbers
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const USB_ID_RADIX = 16
|
||||
|
||||
/**
|
||||
* @summary The expected length of a USB ID number
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const USB_ID_LENGTH = 4
|
||||
|
||||
/**
|
||||
* @summary Vendor ID of "Broadcom Corporation"
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const USB_VENDOR_ID_BROADCOM_CORPORATION = 0x0a5c
|
||||
|
||||
/**
|
||||
* @summary Product ID of BCM2708
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const USB_PRODUCT_ID_BCM2708_BOOT = 0x2763
|
||||
|
||||
/**
|
||||
* @summary Product ID of BCM2710
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const USB_PRODUCT_ID_BCM2710_BOOT = 0x2764
|
||||
|
||||
/**
|
||||
* @summary The timeout for USB device operations
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const USB_OPERATION_TIMEOUT_MS = 1000
|
||||
|
||||
/**
|
||||
* @summary The number of USB endpoint interfaces in devices with a BCM2835 SoC
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const USB_ENDPOINT_INTERFACES_SOC_BCM2835 = 1
|
||||
|
||||
/**
|
||||
* @summary The USB device descriptor index of an empty property
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const USB_DESCRIPTOR_NULL_INDEX = 0
|
||||
|
||||
/**
|
||||
* @summary usbboot bootcode file name
|
||||
* @type {String}
|
||||
* @constant
|
||||
*/
|
||||
const USBBOOT_BOOTCODE_FILE_NAME = 'bootcode.bin'
|
||||
|
||||
/**
|
||||
* @summary List of usbboot capable devices
|
||||
* @type {Object[]}
|
||||
* @constant
|
||||
*/
|
||||
const USBBOOT_CAPABLE_USB_DEVICES = [
|
||||
|
||||
// BCM2835
|
||||
|
||||
{
|
||||
vendorID: USB_VENDOR_ID_BROADCOM_CORPORATION,
|
||||
productID: USB_PRODUCT_ID_BCM2708_BOOT
|
||||
},
|
||||
|
||||
// BCM2837
|
||||
|
||||
{
|
||||
vendorID: USB_VENDOR_ID_BROADCOM_CORPORATION,
|
||||
productID: USB_PRODUCT_ID_BCM2710_BOOT
|
||||
}
|
||||
|
||||
]
|
||||
|
||||
/**
|
||||
* @summary Compute module descriptions
|
||||
* @type {Object}
|
||||
* @constant
|
||||
*/
|
||||
const COMPUTE_MODULE_DESCRIPTIONS = {
|
||||
[USB_PRODUCT_ID_BCM2708_BOOT]: 'Compute Module 1',
|
||||
[USB_PRODUCT_ID_BCM2710_BOOT]: 'Compute Module 3'
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Estimated device reboot delay
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const DEVICE_REBOOT_DELAY = 6000
|
||||
|
||||
/**
|
||||
* @summary The initial step of the file server usbboot phase
|
||||
* @constant
|
||||
* @type {Number}
|
||||
* @private
|
||||
*/
|
||||
const DEFAULT_FILE_SERVER_STEP = 1
|
||||
|
||||
/**
|
||||
* @summary Convert a USB id (e.g. product/vendor) to a string
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {Number} id - USB id
|
||||
* @returns {String} string id
|
||||
*
|
||||
* @example
|
||||
* console.log(usbIdToString(2652))
|
||||
* > '0x0a5c'
|
||||
*/
|
||||
const usbIdToString = (id) => {
|
||||
return `0x${_.padStart(id.toString(USB_ID_RADIX), USB_ID_LENGTH, '0')}`
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Check if a USB device object is usbboot-capable
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {Object} device - device
|
||||
* @returns {Boolean} whether the device is usbboot-capable
|
||||
*
|
||||
* @example
|
||||
* if (isUsbBootCapableUSBDevice({ ... })) {
|
||||
* console.log('We can use usbboot on this device')
|
||||
* }
|
||||
*/
|
||||
const isUsbBootCapableUSBDevice = (device) => {
|
||||
return _.some(USBBOOT_CAPABLE_USB_DEVICES, {
|
||||
vendorID: device.deviceDescriptor.idVendor,
|
||||
productID: device.deviceDescriptor.idProduct
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary USBBootAdapter
|
||||
* @class
|
||||
*/
|
||||
class USBBootAdapter extends EventEmitter {
|
||||
/**
|
||||
* @summary USBBootAdapter constructor
|
||||
* @class
|
||||
* @example
|
||||
* const adapter = new USBBootAdapter()
|
||||
*/
|
||||
constructor () {
|
||||
super()
|
||||
|
||||
/** @type {String} Adapter name */
|
||||
this.id = this.constructor.id
|
||||
|
||||
/** @type {Object} Blob cache */
|
||||
this.blobCache = {}
|
||||
|
||||
/** @type {Object} Progress hash */
|
||||
this.progress = {}
|
||||
|
||||
this.devices = []
|
||||
this.on('devices', (devices) => {
|
||||
this.devices = devices
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Query a blob from the internal cache
|
||||
* @private
|
||||
*
|
||||
* @param {String} name - blob name
|
||||
* @fulfil {Buffer} - blob
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const Bluebird = require('bluebird')
|
||||
* const fs = Bluebird.promisifyAll(require('fs'))
|
||||
*
|
||||
* const blob = adapter.queryBlobFromCache('start.elf')
|
||||
*/
|
||||
queryBlobFromCache (name) {
|
||||
if (this.blobCache[name]) {
|
||||
return Bluebird.resolve(this.blobCache[name])
|
||||
}
|
||||
|
||||
return USBBootAdapter.readBlob(name).tap((buffer) => {
|
||||
this.blobCache[name] = buffer
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Scan for usbboot capable USB devices
|
||||
* @public
|
||||
*
|
||||
* @description
|
||||
* You should at the very least pass a file named `bootcode.bin`.
|
||||
*
|
||||
* @param {Object} options - options
|
||||
* @param {Object} options.files - files buffers
|
||||
* @param {Function} [callback] - optional callback
|
||||
* @returns {USBBootAdapter}
|
||||
*
|
||||
* @example
|
||||
* adapter.scan({
|
||||
* files: {
|
||||
* 'bootcode.bin': fs.readFileSync('./msd/bootcode.bin'),
|
||||
* 'start.elf': fs.readFileSync('./msd/start.elf')
|
||||
* }
|
||||
* }, (error, devices) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
scan (options = {}, callback) {
|
||||
/* eslint-disable lodash/prefer-lodash-method */
|
||||
usb.listDevices().filter(isUsbBootCapableUSBDevice).map((device) => {
|
||||
/* eslint-enable lodash/prefer-lodash-method */
|
||||
|
||||
const description = COMPUTE_MODULE_DESCRIPTIONS[device.deviceDescriptor.idProduct] || 'Compute Module'
|
||||
|
||||
if (!device.accessible) {
|
||||
return {
|
||||
device: `${usbIdToString(device.deviceDescriptor.idVendor)}:${usbIdToString(device.deviceDescriptor.idProduct)}`,
|
||||
displayName: 'Missing drivers',
|
||||
description,
|
||||
mountpoints: [],
|
||||
isReadOnly: false,
|
||||
isSystem: false,
|
||||
disabled: true,
|
||||
icon: 'warning',
|
||||
size: null,
|
||||
link: 'https://www.raspberrypi.org/documentation/hardware/computemodule/cm-emmc-flashing.md',
|
||||
linkCTA: 'Install',
|
||||
linkTitle: 'Install missing drivers',
|
||||
linkMessage: [
|
||||
'Would you like to download the necessary drivers from the Raspberry Pi Foundation?',
|
||||
'This will open your browser.\n\n',
|
||||
'Once opened, download and run the installer from the "Windows Installer" section to install the drivers.'
|
||||
].join(' '),
|
||||
adaptor: USBBootAdapter.id
|
||||
}
|
||||
}
|
||||
|
||||
// This is the only way we can unique identify devices
|
||||
device.raw = `${device.busNumber}:${device.deviceAddress}`
|
||||
|
||||
const result = {
|
||||
device: device.raw,
|
||||
raw: device.raw,
|
||||
displayName: 'Initializing device',
|
||||
|
||||
// At this point we can't differentiate between CMs any more, so
|
||||
// we can't use the description that changes depending on the PID.
|
||||
description: 'Compute Module',
|
||||
|
||||
size: null,
|
||||
mountpoints: [],
|
||||
isReadOnly: false,
|
||||
isSystem: false,
|
||||
disabled: true,
|
||||
icon: 'loading',
|
||||
vendor: usbIdToString(device.deviceDescriptor.idVendor),
|
||||
product: usbIdToString(device.deviceDescriptor.idProduct),
|
||||
adaptor: USBBootAdapter.id
|
||||
}
|
||||
|
||||
if (_.isNil(this.progress[result.raw])) {
|
||||
this.prepare(device).catch((error) => {
|
||||
this.emit('error', error)
|
||||
})
|
||||
}
|
||||
|
||||
result.progress = this.progress[result.raw]
|
||||
|
||||
if (result.progress === utils.PERCENTAGE_MAXIMUM) {
|
||||
return Bluebird.delay(DEVICE_REBOOT_DELAY).return(result)
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
// See http://bluebirdjs.com/docs/api/promise.map.html
|
||||
}, {
|
||||
concurrency: 5
|
||||
}).catch((error) => {
|
||||
this.emit('error', error)
|
||||
callback && callback(error)
|
||||
}).then((devices) => {
|
||||
this.emit('devices', devices)
|
||||
callback && callback(null, devices)
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Prepare a usbboot device
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {Object} device - node-usb device
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const fs = Bluebird.promisifyAll(require('fs'))
|
||||
* const usb = require('usb')
|
||||
* const device = usb.findByIds(0x0a5c, 0x2763)
|
||||
*
|
||||
* adapter.prepare(device, (name) => {
|
||||
* return fs.readFileAsync(name)
|
||||
* }).then(() => {
|
||||
* console.log('Done!')
|
||||
* })
|
||||
*/
|
||||
prepare (device) {
|
||||
/**
|
||||
* @summary Set device progress
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {Number} percentage - percentage
|
||||
*
|
||||
* @example
|
||||
* setProgress(90)
|
||||
*/
|
||||
const setProgress = (percentage) => {
|
||||
debug(`%c[${device.raw}] -> ${Math.floor(percentage)}%%`, 'color:red;')
|
||||
this.progress[device.raw] = percentage
|
||||
}
|
||||
|
||||
const serialNumberIndex = device.deviceDescriptor.iSerialNumber
|
||||
debug(`Serial number index: ${serialNumberIndex}`)
|
||||
if (serialNumberIndex === USB_DESCRIPTOR_NULL_INDEX) {
|
||||
// eslint-disable-next-line no-magic-numbers
|
||||
setProgress(10)
|
||||
} else {
|
||||
// eslint-disable-next-line no-magic-numbers
|
||||
setProgress(15)
|
||||
}
|
||||
|
||||
return Bluebird.try(() => {
|
||||
// We need to open the device in order to access _configDescriptor
|
||||
debug(`Opening device: ${device.raw}`)
|
||||
device.open()
|
||||
|
||||
// Ensures we don't wait forever if an issue occurs
|
||||
device.timeout = USB_OPERATION_TIMEOUT_MS
|
||||
|
||||
// Handle 2837 where it can start with two interfaces, the first
|
||||
// is mass storage the second is the vendor interface for programming
|
||||
const addresses = {}
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
if (device._configDescriptor.bNumInterfaces === USB_ENDPOINT_INTERFACES_SOC_BCM2835) {
|
||||
/* eslint-enable no-underscore-dangle */
|
||||
addresses.interface = 0
|
||||
addresses.endpoint = 1
|
||||
} else {
|
||||
addresses.interface = 1
|
||||
addresses.endpoint = 3
|
||||
}
|
||||
|
||||
const deviceInterface = device.interface(addresses.interface)
|
||||
debug(`Claiming interface: ${addresses.interface}`)
|
||||
|
||||
try {
|
||||
deviceInterface.claim()
|
||||
} catch (error) {
|
||||
if (error.message === 'LIBUSB_ERROR_NO_DEVICE') {
|
||||
debug('Couldn\'t claim the interface. Assuming the device is gone')
|
||||
return null
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
const endpoint = deviceInterface.endpoint(addresses.endpoint)
|
||||
|
||||
if (serialNumberIndex === USB_DESCRIPTOR_NULL_INDEX) {
|
||||
return this.queryBlobFromCache(USBBOOT_BOOTCODE_FILE_NAME).then((bootcode) => {
|
||||
return USBBootAdapter.writeBootCode(device, endpoint, bootcode)
|
||||
})
|
||||
}
|
||||
|
||||
debug('Starting file server')
|
||||
|
||||
const PERCENTAGE_START = 20
|
||||
const PERCENTAGE_TOTAL = 95
|
||||
|
||||
// TODO: Find a way to not hardcode these values, and instead
|
||||
// figure out the correct number for each board on the fly.
|
||||
// This might be possible once we implement proper device
|
||||
// auto-discovery. For now, we assume the worst case scenario.
|
||||
// eslint-disable-next-line no-magic-numbers
|
||||
const STEPS_TOTAL = 38
|
||||
|
||||
return this.startFileServer(device, endpoint, {
|
||||
progress: (step) => {
|
||||
setProgress((step * (PERCENTAGE_TOTAL - PERCENTAGE_START) / STEPS_TOTAL) + PERCENTAGE_START)
|
||||
}
|
||||
}).tap(() => {
|
||||
setProgress(utils.PERCENTAGE_MAXIMUM)
|
||||
})
|
||||
}).return(device).catch({
|
||||
message: 'LIBUSB_TRANSFER_CANCELLED'
|
||||
}, {
|
||||
message: 'LIBUSB_ERROR_NO_DEVICE'
|
||||
}, _.constant(null)).tap((result) => {
|
||||
if (result) {
|
||||
result.close()
|
||||
}
|
||||
}).finally(() => {
|
||||
return Bluebird.delay(DEVICE_REBOOT_DELAY).then(() => {
|
||||
Reflect.deleteProperty(this.progress, device.raw)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Write bootcode to USB device (usbboot first stage)
|
||||
* @private
|
||||
*
|
||||
* @description
|
||||
* After this stage is run, the USB will be re-mounted as 0x0a5c:0x2764.
|
||||
*
|
||||
* @param {Object} device - node-usb device
|
||||
* @param {Object} endpoint - node-usb endpoint
|
||||
* @param {Buffer} bootCodeBuffer - bootcode buffer
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const usb = require('usb')
|
||||
* const device = usb.findByIds(0x0a5c, 0x2763)
|
||||
* const bootcode = fs.readFileSync('./bootcode.bin')
|
||||
*
|
||||
* adapter.writeBootCode(device, device.interfaces(0).endpoint(1), bootcode).then(() => {
|
||||
* console.log('Done!')
|
||||
* })
|
||||
*/
|
||||
static writeBootCode (device, endpoint, bootCodeBuffer) {
|
||||
debug('Writing bootcode')
|
||||
debug(`Bootcode buffer length: ${bootCodeBuffer.length}`)
|
||||
const bootMessageBuffer = protocol.createBootMessageBuffer(bootCodeBuffer.length)
|
||||
|
||||
debug('Writing boot message buffer to out endpoint')
|
||||
return protocol.write(device, endpoint, bootMessageBuffer).then(() => {
|
||||
debug('Writing boot code buffer to out endpoint')
|
||||
return protocol.write(device, endpoint, bootCodeBuffer)
|
||||
}).then(() => {
|
||||
debug('Reading return code from device')
|
||||
return protocol.read(device, protocol.RETURN_CODE_LENGTH)
|
||||
}).then((data) => {
|
||||
const returnCode = data.readInt32LE()
|
||||
debug(`Received return code: ${returnCode}`)
|
||||
|
||||
if (returnCode !== protocol.RETURN_CODE_SUCCESS) {
|
||||
throw new Error(`Couldn't write the bootcode, got return code ${returnCode} from device`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Mount a USB device as a block device (usbboot second stage)
|
||||
* @private
|
||||
*
|
||||
* @description
|
||||
* The possible files you can pass here are:
|
||||
*
|
||||
* - autoboot.txt
|
||||
* - config.txt
|
||||
* - recovery.elf
|
||||
* - start.elf
|
||||
* - fixup.dat
|
||||
*
|
||||
* @param {Object} device - node-usb device
|
||||
* @param {Object} endpoint - node-usb endpoint
|
||||
* @param {Object} options - options
|
||||
* @param {Function} options.progress - progress function (step)
|
||||
* @param {Number} [step] - current step (used internally)
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const fs = Bluebird.promisifyAll(require('fs'))
|
||||
* const usb = require('usb')
|
||||
* const device = usb.findByIds(0x0a5c, 0x2763)
|
||||
*
|
||||
* adapter.startFileServer(device, device.interfaces(0).endpoint(1), {
|
||||
* progress: (step) => {
|
||||
* console.log(`Currently on step ${step}`)
|
||||
* }
|
||||
* }).then(() => {
|
||||
* console.log('Done!')
|
||||
* })
|
||||
*/
|
||||
startFileServer (device, endpoint, options, step = DEFAULT_FILE_SERVER_STEP) {
|
||||
debug(`Listening for file messages (step ${step})`)
|
||||
options.progress(step)
|
||||
return protocol
|
||||
.read(device, protocol.FILE_MESSAGE_SIZE)
|
||||
.then(protocol.parseFileMessageBuffer)
|
||||
|
||||
// We get these error messages when reading a command
|
||||
// from the device when the communication has ended
|
||||
.catch({
|
||||
message: 'LIBUSB_TRANSFER_STALL'
|
||||
}, {
|
||||
message: 'LIBUSB_TRANSFER_ERROR'
|
||||
}, (error) => {
|
||||
debug(`Got ${error.message} when reading a command, assuming everything is done`)
|
||||
return {
|
||||
command: protocol.FILE_MESSAGE_COMMANDS.DONE
|
||||
}
|
||||
})
|
||||
|
||||
.then((fileMessage) => {
|
||||
debug(`Received message: ${fileMessage.command} -> ${fileMessage.fileName}`)
|
||||
|
||||
if (fileMessage.command === protocol.FILE_MESSAGE_COMMANDS.DONE) {
|
||||
debug('Done')
|
||||
return Bluebird.resolve()
|
||||
}
|
||||
|
||||
return Bluebird.try(() => {
|
||||
if (fileMessage.command === protocol.FILE_MESSAGE_COMMANDS.GET_FILE_SIZE) {
|
||||
debug(`Getting the size of ${fileMessage.fileName}`)
|
||||
|
||||
return this.queryBlobFromCache(fileMessage.fileName).then((fileBuffer) => {
|
||||
const fileSize = fileBuffer.length
|
||||
debug(`Sending size: ${fileSize}`)
|
||||
return protocol.sendBufferSize(device, fileSize)
|
||||
}).catch({
|
||||
code: 'ENOENT'
|
||||
}, () => {
|
||||
debug(`Couldn't find ${fileMessage.fileName}`)
|
||||
debug('Sending error signal')
|
||||
return protocol.sendErrorSignal(device)
|
||||
})
|
||||
}
|
||||
|
||||
if (fileMessage.command === protocol.FILE_MESSAGE_COMMANDS.READ_FILE) {
|
||||
debug(`Reading ${fileMessage.fileName}`)
|
||||
|
||||
return this.queryBlobFromCache(fileMessage.fileName).then((fileBuffer) => {
|
||||
return protocol.write(device, endpoint, fileBuffer)
|
||||
}).catch({
|
||||
code: 'ENOENT'
|
||||
}, () => {
|
||||
debug(`Couldn't find ${fileMessage.fileName}`)
|
||||
debug('Sending error signal')
|
||||
return protocol.sendErrorSignal(device)
|
||||
})
|
||||
}
|
||||
|
||||
return Bluebird.reject(new Error(`Unrecognized command: ${fileMessage.command}`))
|
||||
}).then(() => {
|
||||
debug('Starting again')
|
||||
const STEP_INCREMENT = 1
|
||||
return this.startFileServer(device, endpoint, options, step + STEP_INCREMENT)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary The name of this adapter
|
||||
* @public
|
||||
* @type {String}
|
||||
* @constant
|
||||
*/
|
||||
USBBootAdapter.id = 'usbboot'
|
||||
|
||||
/**
|
||||
* @summary Read a usbboot blob
|
||||
* @private
|
||||
*
|
||||
* @param {String} filename - blob name
|
||||
* @fulfil {Buffer} - blob
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* USBBootAdapter.readBlob('bootcode.bin')
|
||||
* .then((buffer) => { ... })
|
||||
* .catch((error) => { ... })
|
||||
*/
|
||||
USBBootAdapter.readBlob = (filename) => {
|
||||
const isRaspberryPi = _.includes([
|
||||
'bootcode.bin',
|
||||
'start_cd.elf',
|
||||
'fixup_cd.dat'
|
||||
], filename)
|
||||
|
||||
const blobPath = isRaspberryPi
|
||||
? path.join('raspberrypi', filename)
|
||||
: filename
|
||||
|
||||
return fs.readFileAsync(path.join(__dirname, 'blobs', blobPath))
|
||||
}
|
||||
|
||||
// Exports
|
||||
module.exports = USBBootAdapter
|
@ -1,392 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* This work is heavily based on https://github.com/raspberrypi/usbboot
|
||||
* Copyright 2016 Raspberry Pi Foundation
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const Bluebird = require('bluebird')
|
||||
const usb = require('./usb')
|
||||
|
||||
// The equivalent of a NULL buffer, given that node-usb complains
|
||||
// if the data argument is not an instance of Buffer
|
||||
const NULL_BUFFER_SIZE = 0
|
||||
const NULL_BUFFER = Buffer.alloc(NULL_BUFFER_SIZE)
|
||||
|
||||
const ONE_MEGABYTE = 1048576
|
||||
|
||||
/**
|
||||
* @summary The size of the boot message bootcode length section
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const BOOT_MESSAGE_BOOTCODE_LENGTH_SIZE = 4
|
||||
|
||||
/**
|
||||
* @summary The offset of the boot message bootcode length section
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const BOOT_MESSAGE_BOOTCODE_LENGTH_OFFSET = 0
|
||||
|
||||
/**
|
||||
* @summary The size of the boot message signature section
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const BOOT_MESSAGE_SIGNATURE_SIZE = 20
|
||||
|
||||
/**
|
||||
* @summary The offset of the file message command section
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const FILE_MESSAGE_COMMAND_OFFSET = 0
|
||||
|
||||
/**
|
||||
* @summary The size of the file message command section
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const FILE_MESSAGE_COMMAND_SIZE = 4
|
||||
|
||||
/**
|
||||
* @summary The offset of the file message file name section
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const FILE_MESSAGE_FILE_NAME_OFFSET = FILE_MESSAGE_COMMAND_SIZE
|
||||
|
||||
/**
|
||||
* @summary The size of the file message file name section
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const FILE_MESSAGE_FILE_NAME_SIZE = 256
|
||||
|
||||
/**
|
||||
* @summary The GET_STATUS usb control transfer request code
|
||||
* @type {Number}
|
||||
* @constant
|
||||
* @description
|
||||
* See http://www.jungo.com/st/support/documentation/windriver/811/wdusb_man_mhtml/node55.html#usb_standard_dev_req_codes
|
||||
*/
|
||||
const USB_REQUEST_CODE_GET_STATUS = 0
|
||||
|
||||
/**
|
||||
* @summary The maximum buffer length of a usbboot message
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const USBBOOT_MESSAGE_MAX_BUFFER_LENGTH = 0xffff
|
||||
|
||||
/**
|
||||
* @summary The delay to wait between each USB read/write operation
|
||||
* @type {Number}
|
||||
* @constant
|
||||
* @description
|
||||
* The USB bus seems to hang if we execute many operations at
|
||||
* the same time.
|
||||
*/
|
||||
const USB_REQUEST_DELAY_MS = 1000
|
||||
|
||||
/**
|
||||
* @summary The timeout for USB bulk transfers, in milliseconds
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
// In node-usb, 0 means "infinite" timeout
|
||||
const USB_BULK_TRANSFER_TIMEOUT_MS = 0
|
||||
|
||||
/**
|
||||
* @summary The amount of bits to shift to the right on a control transfer index
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const CONTROL_TRANSFER_INDEX_RIGHT_BIT_SHIFT = 16
|
||||
|
||||
/**
|
||||
* @summary The size of the usbboot file message
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
exports.FILE_MESSAGE_SIZE = FILE_MESSAGE_COMMAND_SIZE + FILE_MESSAGE_FILE_NAME_SIZE
|
||||
|
||||
/**
|
||||
* @summary File message command display names
|
||||
* @namespace FILE_MESSAGE_COMMANDS
|
||||
* @public
|
||||
*/
|
||||
exports.FILE_MESSAGE_COMMANDS = {
|
||||
|
||||
/**
|
||||
* @property {String}
|
||||
* @memberof FILE_MESSAGE_COMMANDS
|
||||
*
|
||||
* @description
|
||||
* The "get file size" file message command name.
|
||||
*/
|
||||
GET_FILE_SIZE: 'GetFileSize',
|
||||
|
||||
/**
|
||||
* @property {String}
|
||||
* @memberof FILE_MESSAGE_COMMANDS
|
||||
*
|
||||
* @description
|
||||
* The "read file" file message command name.
|
||||
*/
|
||||
READ_FILE: 'ReadFile',
|
||||
|
||||
/**
|
||||
* @property {String}
|
||||
* @memberof FILE_MESSAGE_COMMANDS
|
||||
*
|
||||
* @description
|
||||
* The "done" file message command name.
|
||||
*/
|
||||
DONE: 'Done'
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary The usbboot return code that represents success
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
exports.RETURN_CODE_SUCCESS = 0
|
||||
|
||||
/**
|
||||
* @summary The buffer length of the return code message
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
exports.RETURN_CODE_LENGTH = 4
|
||||
|
||||
/**
|
||||
* @summary Send a buffer size to a device as a control transfer
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @param {Object} device - node-usb device
|
||||
* @param {Number} size - buffer size
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const usb = require('usb')
|
||||
* const device = usb.findByIds(0x0a5c, 0x2763)
|
||||
*
|
||||
* protocol.sendBufferSize(device, 512).then(() => {
|
||||
* console.log('Done!')
|
||||
* })
|
||||
*/
|
||||
exports.sendBufferSize = (device, size) => {
|
||||
return usb.performControlTransfer(device, {
|
||||
bmRequestType: usb.LIBUSB_REQUEST_TYPE_VENDOR,
|
||||
bRequest: USB_REQUEST_CODE_GET_STATUS,
|
||||
data: NULL_BUFFER,
|
||||
|
||||
/* eslint-disable no-bitwise */
|
||||
wValue: size & USBBOOT_MESSAGE_MAX_BUFFER_LENGTH,
|
||||
wIndex: size >> CONTROL_TRANSFER_INDEX_RIGHT_BIT_SHIFT
|
||||
/* eslint-enable no-bitwise */
|
||||
})
|
||||
}
|
||||
|
||||
const chunks = function *(buffer, size) {
|
||||
for (let start = 0; start < buffer.length; start += size) {
|
||||
yield buffer.slice(start, start + size)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Write a buffer to an OUT endpoint
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {Object} device - device
|
||||
* @param {Object} endpoint - endpoint
|
||||
* @param {Buffer} buffer - buffer
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const usb = require('usb')
|
||||
* const device = usb.findByIds(0x0a5c, 0x2763)
|
||||
* return protocol.write(device, device.interface(0).endpoint(1), Buffer.alloc(1)).then(() => {
|
||||
* console.log('Done!')
|
||||
* })
|
||||
*/
|
||||
exports.write = (device, endpoint, buffer) => {
|
||||
return exports.sendBufferSize(device, buffer.length)
|
||||
|
||||
// We get LIBUSB_TRANSFER_STALL sometimes
|
||||
// in future bulk transfers without this
|
||||
.delay(USB_REQUEST_DELAY_MS)
|
||||
|
||||
.then(() => {
|
||||
endpoint.timeout = USB_BULK_TRANSFER_TIMEOUT_MS
|
||||
return Bluebird.each(chunks(buffer, ONE_MEGABYTE), (chunk) => {
|
||||
return Bluebird.fromCallback((callback) => {
|
||||
endpoint.transfer(chunk, callback)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Send an error signal to a device
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @param {Object} device - node-usb device
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const usb = require('usb')
|
||||
* const device = usb.findByIds(0x0a5c, 0x2763)
|
||||
*
|
||||
* protocol.sendErrorSignal(device).then(() => {
|
||||
* console.log('Done!')
|
||||
* })
|
||||
*/
|
||||
exports.sendErrorSignal = (device) => {
|
||||
return exports.sendBufferSize(device, NULL_BUFFER_SIZE)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Read a buffer from a device
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {Object} device - device
|
||||
* @param {Number} bytesToRead - bytes to read
|
||||
* @fulfil {Buffer} - data
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const usb = require('usb')
|
||||
* const device = usb.findByIds(0x0a5c, 0x2763)
|
||||
* protocol.read(device, 4).then((data) => {
|
||||
* console.log(data.readInt32BE())
|
||||
* })
|
||||
*/
|
||||
exports.read = (device, bytesToRead) => {
|
||||
return usb.performControlTransfer(device, {
|
||||
/* eslint-disable no-bitwise */
|
||||
bmRequestType: usb.LIBUSB_REQUEST_TYPE_VENDOR | usb.LIBUSB_ENDPOINT_IN,
|
||||
wValue: bytesToRead & USBBOOT_MESSAGE_MAX_BUFFER_LENGTH,
|
||||
wIndex: bytesToRead >> CONTROL_TRANSFER_INDEX_RIGHT_BIT_SHIFT,
|
||||
/* eslint-enable no-bitwise */
|
||||
|
||||
bRequest: USB_REQUEST_CODE_GET_STATUS,
|
||||
length: bytesToRead
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Create a boot message buffer
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @description
|
||||
* This is based on the following data structure:
|
||||
*
|
||||
* typedef struct MESSAGE_S {
|
||||
* int length;
|
||||
* unsigned char signature[20];
|
||||
* } boot_message_t;
|
||||
*
|
||||
* This needs to be sent to the out endpoint of the USB device
|
||||
* as a 24 bytes big-endian buffer where:
|
||||
*
|
||||
* - The first 4 bytes contain the size of the bootcode.bin buffer
|
||||
* - The remaining 20 bytes contain the boot signature, which
|
||||
* we don't make use of in this implementation
|
||||
*
|
||||
* @param {Number} bootCodeBufferLength - bootcode.bin buffer length
|
||||
* @returns {Buffer} boot message buffer
|
||||
*
|
||||
* @example
|
||||
* const bootMessageBuffer = protocol.createBootMessageBuffer(50216)
|
||||
*/
|
||||
exports.createBootMessageBuffer = (bootCodeBufferLength) => {
|
||||
const bootMessageBufferSize = BOOT_MESSAGE_BOOTCODE_LENGTH_SIZE + BOOT_MESSAGE_SIGNATURE_SIZE
|
||||
|
||||
// Buffers are automatically filled with zero bytes
|
||||
const bootMessageBuffer = Buffer.alloc(bootMessageBufferSize)
|
||||
|
||||
// The bootcode length should be stored in 4 big-endian bytes
|
||||
bootMessageBuffer.writeInt32BE(bootCodeBufferLength, BOOT_MESSAGE_BOOTCODE_LENGTH_OFFSET)
|
||||
|
||||
return bootMessageBuffer
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Parse a file message buffer from a device
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @param {Buffer} fileMessageBuffer - file message buffer
|
||||
* @returns {Object} parsed file message
|
||||
*
|
||||
* @example
|
||||
* const usb = require('usb')
|
||||
* const device = usb.findByIds(0x0a5c, 0x2763)
|
||||
*
|
||||
* return protocol.read(device, protocol.FILE_MESSAGE_SIZE).then((fileMessageBuffer) => {
|
||||
* return protocol.parseFileMessageBuffer(fileMessageBuffer)
|
||||
* }).then((fileMessage) => {
|
||||
* console.log(fileMessage.command)
|
||||
* console.log(fileMessage.fileName)
|
||||
* })
|
||||
*/
|
||||
exports.parseFileMessageBuffer = (fileMessageBuffer) => {
|
||||
const commandCode = fileMessageBuffer.readInt32LE(FILE_MESSAGE_COMMAND_OFFSET)
|
||||
const command = _.nth([
|
||||
exports.FILE_MESSAGE_COMMANDS.GET_FILE_SIZE,
|
||||
exports.FILE_MESSAGE_COMMANDS.READ_FILE,
|
||||
exports.FILE_MESSAGE_COMMANDS.DONE
|
||||
], commandCode)
|
||||
|
||||
if (_.isNil(command)) {
|
||||
throw new Error(`Invalid file message command code: ${commandCode}`)
|
||||
}
|
||||
|
||||
const fileName = _.chain(fileMessageBuffer.toString('ascii', FILE_MESSAGE_FILE_NAME_OFFSET))
|
||||
|
||||
// The parsed string will likely contain tons of trailing
|
||||
// null bytes that we should get rid of for convenience
|
||||
// See https://github.com/nodejs/node/issues/4775
|
||||
.takeWhile((character) => {
|
||||
return character !== '\0'
|
||||
})
|
||||
.join('')
|
||||
.value()
|
||||
|
||||
// A blank file name can also mean "done"
|
||||
if (_.isEmpty(fileName)) {
|
||||
return {
|
||||
command: exports.FILE_MESSAGE_COMMANDS.DONE
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
command,
|
||||
fileName
|
||||
}
|
||||
}
|
@ -1,174 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const Bluebird = require('bluebird')
|
||||
const debug = require('debug')('etcher:sdk:usbboot')
|
||||
|
||||
// The USB module calls `libusb_init`, which will fail
|
||||
// if the device we're running in has no USB controller
|
||||
// plugged in (e.g. in certain CI services).
|
||||
// In order to workaround that, we need to return a
|
||||
// stub if such error occurs.
|
||||
const usb = (() => {
|
||||
try {
|
||||
return require('usb')
|
||||
} catch (error) {
|
||||
debug('Couldn\'t require "usb". Reason: ', error.message, error.stack)
|
||||
return {
|
||||
getDeviceList: _.constant([])
|
||||
}
|
||||
}
|
||||
})()
|
||||
|
||||
// Re-expose some `usb` constants
|
||||
_.each([
|
||||
'LIBUSB_REQUEST_TYPE_VENDOR',
|
||||
'LIBUSB_ENDPOINT_IN',
|
||||
'LIBUSB_TRANSFER_TYPE_BULK',
|
||||
'LIBUSB_ERROR_NO_DEVICE',
|
||||
'LIBUSB_ERROR_IO'
|
||||
], (constant) => {
|
||||
exports[constant] = usb[constant]
|
||||
})
|
||||
|
||||
/**
|
||||
* @summary The timeout for USB control transfers, in milliseconds
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
// In node-usb, 0 means "infinite" timeout
|
||||
const USB_CONTROL_TRANSFER_TIMEOUT_MS = 0
|
||||
|
||||
/**
|
||||
* @summary List the available USB devices
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @fulfil {Object[]} - usb devices
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* usb.listDevices().each((device) => {
|
||||
* console.log(device)
|
||||
* })
|
||||
*/
|
||||
exports.listDevices = () => {
|
||||
const devices = _.map(usb.getDeviceList(), (device) => {
|
||||
device.accessible = true
|
||||
return device
|
||||
})
|
||||
|
||||
// Include driverless devices into the list of USB devices.
|
||||
if (process.platform === 'win32') {
|
||||
// NOTE: Temporarily ignore errors when loading winusb-driver-generator,
|
||||
// due to C Runtime issues on Windows;
|
||||
// see https://github.com/resin-io/etcher/issues/1956
|
||||
try {
|
||||
/* eslint-disable node/no-missing-require */
|
||||
const winusbDriverGenerator = require('winusb-driver-generator')
|
||||
/* eslint-enable node/no-missing-require */
|
||||
for (const device of winusbDriverGenerator.listDriverlessDevices()) {
|
||||
devices.push({
|
||||
accessible: false,
|
||||
deviceDescriptor: {
|
||||
idVendor: device.vid,
|
||||
idProduct: device.pid
|
||||
}
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore error
|
||||
}
|
||||
}
|
||||
|
||||
return Bluebird.resolve(devices)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get a USB device string from an index
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @param {Object} device - device
|
||||
* @param {Number} index - string index
|
||||
* @fulfil {String} - string
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* usb.getDeviceStringFromIndex({ ... }, 5).then((string) => {
|
||||
* console.log(string)
|
||||
* })
|
||||
*/
|
||||
exports.getDeviceStringFromIndex = (device, index) => {
|
||||
return Bluebird.fromCallback((callback) => {
|
||||
device.getStringDescriptor(index, callback)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Perform a USB control transfer
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @description
|
||||
* See http://libusb.sourceforge.net/api-1.0/group__syncio.html
|
||||
*
|
||||
* @param {Object} device - usb device
|
||||
* @param {Object} options - options
|
||||
* @param {Number} options.bmRequestType - the request type field for the setup packet
|
||||
* @param {Number} options.bRequest - the request field for the setup packet
|
||||
* @param {Number} options.wValue - the value field for the setup packet
|
||||
* @param {Number} options.wIndex - the index field for the setup packet
|
||||
* @param {Buffer} [options.data] - output data buffer (for OUT transfers)
|
||||
* @param {Number} [options.length] - input data size (for IN transfers)
|
||||
* @fulfil {(Buffer|Undefined)} - result
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const buffer = Buffer.alloc(512)
|
||||
*
|
||||
* usb.performControlTransfer({ ... }, {
|
||||
* bmRequestType: usb.LIBUSB_REQUEST_TYPE_VENDOR
|
||||
* bRequest: 0,
|
||||
* wValue: buffer.length & 0xffff,
|
||||
* wIndex: buffer.length >> 16,
|
||||
* data: Buffer.alloc(256)
|
||||
* })
|
||||
*/
|
||||
exports.performControlTransfer = (device, options) => {
|
||||
if (_.isNil(options.data) && _.isNil(options.length)) {
|
||||
return Bluebird.reject(new Error('You must define either data or length'))
|
||||
}
|
||||
|
||||
if (!_.isNil(options.data) && !_.isNil(options.length)) {
|
||||
return Bluebird.reject(new Error('You can define either data or length, but not both'))
|
||||
}
|
||||
|
||||
return Bluebird.fromCallback((callback) => {
|
||||
device.timeout = USB_CONTROL_TRANSFER_TIMEOUT_MS
|
||||
device.controlTransfer(
|
||||
options.bmRequestType,
|
||||
options.bRequest,
|
||||
options.wValue,
|
||||
options.wIndex,
|
||||
options.data || options.length,
|
||||
callback
|
||||
)
|
||||
})
|
||||
}
|
@ -1,76 +0,0 @@
|
||||
Etcher Image Stream
|
||||
===================
|
||||
|
||||
This module is in charge of creating a readable stream from any image source
|
||||
(e.g: a file, a URL, etc) along with some metadata (like size), and handling
|
||||
any necessary transformations (like decompression) that must be applied before
|
||||
plugging the stream to [`etcher-image-write`][etcher-image-write].
|
||||
|
||||
Given that this module contains the logic to handle image formats, the module
|
||||
becomes the most reliable source of truth for the list of supported ones.
|
||||
|
||||
There are three classes of images this module supports:
|
||||
|
||||
- Uncompressed images (e.g: `.img`, `.iso`)
|
||||
- Compressed images (e.g: `.img.xz`, `.iso.gz`)
|
||||
- Archive images (e.g: `.zip`)
|
||||
|
||||
The core of this module consists of handlers and archive hooks.
|
||||
|
||||
Handlers
|
||||
--------
|
||||
|
||||
The handlers are functions that know how to handle certain MIME types, like
|
||||
`application/x-bzip2` and `application/octet-stream`, returning a stream for
|
||||
the image, a transform stream that needs to be applied to get the real image
|
||||
data, and useful metadata like the final image size.
|
||||
|
||||
Each handler is called with a file path (although that will change soon once we
|
||||
add proper support for URLs) and an options object, containing extra metadata
|
||||
about the file.
|
||||
|
||||
Archive Hooks
|
||||
-------------
|
||||
|
||||
This module supports reading "archive images", which are defined by handlers
|
||||
(like `application/zip`). In order to avoid duplication on how to handle
|
||||
archives, archive support is implemented by "archive hooks".
|
||||
|
||||
Archive hooks are CommonJS modules that expose two functions:
|
||||
|
||||
- `Promise .getEntries(String archivePath)`: list all entries in the archive
|
||||
- `Stream.Readable .extractFile(String archivePath, String[] entries, String entry)`: get a readable stream for an archive entry
|
||||
|
||||
Defining those two functions for any archive format is enough for Etcher to
|
||||
correctly use its archive handling logic on them.
|
||||
|
||||
Archive Images
|
||||
--------------
|
||||
|
||||
As mentioned before, Etcher supports the concept of "archive images". These are
|
||||
uncompressed image files included *inside* an archive format, like `.zip` or
|
||||
`.tar`, possibly along other files.
|
||||
|
||||
These are the rules for handling archive images:
|
||||
|
||||
- Each archive should only contain one valid image
|
||||
- Images in archives should be in uncompressed form
|
||||
|
||||
The module throws an error if the above rules are not met.
|
||||
|
||||
Supported Formats
|
||||
-----------------
|
||||
|
||||
There are currently three image types in supported formats: `image`, `compressed` and `archive`.
|
||||
|
||||
An extension tagged `image` describes a format which can be directly written to a device by its handler,
|
||||
and an extension tagged `archive` denotes an archive containing an image, and will cause an archive handler
|
||||
to open the archive and search for an image file.
|
||||
|
||||
Note that when marking an extension as `compressed`, the filename will be stripped of that extension,
|
||||
and the leftover extension examined to determine the uncompressed image format (i.e. `.img.gz -> .img`).
|
||||
|
||||
As an archive (such as `.tar`) might be additionally compressed, this will allow for constructs such as
|
||||
`.tar.gz` (a compressed archive, containing a file with an extension tagged as `image`) to be handled correctly.
|
||||
|
||||
[etcher-image-write]: https://github.com/balena-io-modules/etcher-image-write
|
@ -1,117 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const Bluebird = require('bluebird')
|
||||
const _ = require('lodash')
|
||||
const StreamZip = require('node-stream-zip')
|
||||
const yauzl = Bluebird.promisifyAll(require('yauzl'))
|
||||
const errors = require('../../../shared/errors')
|
||||
|
||||
/**
|
||||
* @summary Get all archive entries
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @param {String} archive - archive path
|
||||
* @fulfil {Object[]} - archive entries
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* zip.getEntries('path/to/my.zip').then((entries) => {
|
||||
* entries.forEach((entry) => {
|
||||
* console.log(entry.name);
|
||||
* console.log(entry.size);
|
||||
* });
|
||||
* });
|
||||
*/
|
||||
exports.getEntries = (archive) => {
|
||||
return new Bluebird((resolve, reject) => {
|
||||
const zip = new StreamZip({
|
||||
file: archive,
|
||||
storeEntries: true
|
||||
})
|
||||
|
||||
zip.on('error', reject)
|
||||
|
||||
zip.on('ready', () => {
|
||||
const EMPTY_ENTRY_SIZE = 0
|
||||
|
||||
return resolve(_.chain(zip.entries())
|
||||
.omitBy({
|
||||
size: EMPTY_ENTRY_SIZE
|
||||
})
|
||||
.map((metadata) => {
|
||||
return {
|
||||
name: metadata.name,
|
||||
size: metadata.size
|
||||
}
|
||||
})
|
||||
.value())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Extract a file from an archive
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @param {String} archive - archive path
|
||||
* @param {String[]} entries - archive entries
|
||||
* @param {String} file - archive file
|
||||
* @fulfil {ReadableStream} file
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* zip.getEntries('path/to/my.zip').then((entries) => {
|
||||
* return zip.extractFile('path/to/my.zip', entries, 'my/file');
|
||||
* }).then((stream) => {
|
||||
* stream.pipe('...');
|
||||
* });
|
||||
*/
|
||||
exports.extractFile = (archive, entries, file) => {
|
||||
return new Bluebird((resolve, reject) => {
|
||||
if (!_.find(entries, {
|
||||
name: file
|
||||
})) {
|
||||
throw errors.createError({
|
||||
title: `Invalid entry: ${file}`
|
||||
})
|
||||
}
|
||||
|
||||
yauzl.openAsync(archive, {
|
||||
lazyEntries: true
|
||||
}).then((zipfile) => {
|
||||
zipfile.readEntry()
|
||||
|
||||
zipfile.on('entry', (entry) => {
|
||||
if (entry.fileName !== file) {
|
||||
return zipfile.readEntry()
|
||||
}
|
||||
|
||||
return zipfile.openReadStream(entry, (error, readStream) => {
|
||||
if (error) {
|
||||
return reject(error)
|
||||
}
|
||||
|
||||
return resolve(readStream)
|
||||
})
|
||||
})
|
||||
}).catch(reject)
|
||||
})
|
||||
}
|
@ -1,214 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const Bluebird = require('bluebird')
|
||||
const _ = require('lodash')
|
||||
const PassThroughStream = require('stream').PassThrough
|
||||
const supportedFileTypes = require('./supported')
|
||||
const utils = require('./utils')
|
||||
const errors = require('../../shared/errors')
|
||||
const fileExtensions = require('../../shared/file-extensions')
|
||||
|
||||
/**
|
||||
* @summary Archive metadata base path
|
||||
* @constant
|
||||
* @private
|
||||
* @type {String}
|
||||
*/
|
||||
const ARCHIVE_METADATA_BASE_PATH = '.meta'
|
||||
|
||||
/**
|
||||
* @summary Image extensions
|
||||
* @constant
|
||||
* @private
|
||||
* @type {String[]}
|
||||
*/
|
||||
const IMAGE_EXTENSIONS = _.reduce(supportedFileTypes, (accumulator, file) => {
|
||||
if (file.type === 'image') {
|
||||
accumulator.push(file.extension)
|
||||
}
|
||||
|
||||
return accumulator
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* @summary Extract entry by path
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {String} archive - archive
|
||||
* @param {String} filePath - entry file path
|
||||
* @param {Object} options - options
|
||||
* @param {Object} options.hooks - archive hooks
|
||||
* @param {Object[]} options.entries - archive entries
|
||||
* @param {*} [options.default] - entry default value
|
||||
* @fulfil {*} contents
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* extractEntryByPath('my/archive.zip', '_info/logo.svg', {
|
||||
* hooks: { ... },
|
||||
* entries: [ ... ],
|
||||
* default: ''
|
||||
* }).then((contents) => {
|
||||
* console.log(contents);
|
||||
* });
|
||||
*/
|
||||
const extractEntryByPath = (archive, filePath, options) => {
|
||||
const fileEntry = _.find(options.entries, (entry) => {
|
||||
return _.chain(entry.name)
|
||||
.split('/')
|
||||
.tail()
|
||||
.join('/')
|
||||
.value() === filePath
|
||||
})
|
||||
|
||||
if (!fileEntry) {
|
||||
return Bluebird.resolve(options.default)
|
||||
}
|
||||
|
||||
return options.hooks.extractFile(archive, options.entries, fileEntry.name)
|
||||
.then(utils.extractStream)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Extract archive metadata
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {String} archive - archive
|
||||
* @param {String} basePath - metadata base path
|
||||
* @param {Object} options - options
|
||||
* @param {Object[]} options.entries - archive entries
|
||||
* @param {Object} options.hooks - archive hooks
|
||||
* @fulfil {Object} - metadata
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* extractArchiveMetadata('my/archive.zip', '.meta', {
|
||||
* hooks: { ... },
|
||||
* entries: [ ... ]
|
||||
* }).then((metadata) => {
|
||||
* console.log(metadata);
|
||||
* });
|
||||
*/
|
||||
const extractArchiveMetadata = (archive, basePath, options) => {
|
||||
return Bluebird.props({
|
||||
logo: extractEntryByPath(archive, `${basePath}/logo.svg`, options),
|
||||
instructions: extractEntryByPath(archive, `${basePath}/instructions.markdown`, options),
|
||||
bmap: extractEntryByPath(archive, `${basePath}/image.bmap`, options),
|
||||
manifest: _.attempt(() => {
|
||||
return extractEntryByPath(archive, `${basePath}/manifest.json`, {
|
||||
hooks: options.hooks,
|
||||
entries: options.entries,
|
||||
default: '{}'
|
||||
}).then((manifest) => {
|
||||
try {
|
||||
return JSON.parse(manifest)
|
||||
} catch (parseError) {
|
||||
throw errors.createUserError({
|
||||
title: 'Invalid archive manifest.json',
|
||||
description: 'The archive manifest.json file is not valid JSON'
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
}).then((results) => {
|
||||
return {
|
||||
name: results.manifest.name,
|
||||
version: results.manifest.version,
|
||||
url: results.manifest.url,
|
||||
supportUrl: results.manifest.supportUrl,
|
||||
releaseNotesUrl: results.manifest.releaseNotesUrl,
|
||||
checksumType: results.manifest.checksumType,
|
||||
checksum: results.manifest.checksum,
|
||||
bytesToZeroOutFromTheBeginning: results.manifest.bytesToZeroOutFromTheBeginning,
|
||||
recommendedDriveSize: results.manifest.recommendedDriveSize,
|
||||
logo: _.invoke(results.logo, [ 'toString' ]),
|
||||
bmap: _.invoke(results.bmap, [ 'toString' ]),
|
||||
instructions: _.invoke(results.instructions, [ 'toString' ])
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Extract image from archive
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @param {String} archive - archive path
|
||||
* @param {Object} hooks - archive hooks
|
||||
* @param {Function} hooks.getEntries - get entries hook
|
||||
* @param {Function} hooks.extractFile - extract file hook
|
||||
* @fulfil {Object} image metadata
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* archive.extractImage('path/to/my/archive.zip', {
|
||||
* getEntries: (archive) => {
|
||||
* return [ ..., ..., ... ];
|
||||
* },
|
||||
* extractFile: (archive, entries, file) => {
|
||||
* ...
|
||||
* }
|
||||
* }).then((image) => {
|
||||
* image.stream.pipe(image.transform).pipe(...);
|
||||
* });
|
||||
*/
|
||||
exports.extractImage = (archive, hooks) => {
|
||||
return hooks.getEntries(archive).then((entries) => {
|
||||
const imageEntries = _.filter(entries, (entry) => {
|
||||
return _.includes(IMAGE_EXTENSIONS, fileExtensions.getLastFileExtension(entry.name))
|
||||
})
|
||||
|
||||
const VALID_NUMBER_OF_IMAGE_ENTRIES = 1
|
||||
if (imageEntries.length !== VALID_NUMBER_OF_IMAGE_ENTRIES) {
|
||||
throw errors.createUserError({
|
||||
title: 'Invalid archive image',
|
||||
description: 'The archive image should contain one and only one top image file'
|
||||
})
|
||||
}
|
||||
|
||||
const imageEntry = _.first(imageEntries)
|
||||
|
||||
return Bluebird.props({
|
||||
imageStream: hooks.extractFile(archive, entries, imageEntry.name),
|
||||
metadata: extractArchiveMetadata(archive, ARCHIVE_METADATA_BASE_PATH, {
|
||||
entries,
|
||||
hooks
|
||||
})
|
||||
}).then((results) => {
|
||||
results.metadata.stream = results.imageStream
|
||||
results.metadata.transform = new PassThroughStream()
|
||||
results.metadata.path = archive
|
||||
|
||||
results.metadata.size = {
|
||||
original: imageEntry.size,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: imageEntry.size
|
||||
}
|
||||
}
|
||||
|
||||
results.metadata.extension = fileExtensions.getLastFileExtension(imageEntry.name)
|
||||
results.metadata.archiveExtension = fileExtensions.getLastFileExtension(archive)
|
||||
|
||||
return results.metadata
|
||||
})
|
||||
})
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @summary The byte length of ISIZE
|
||||
* @type {Number}
|
||||
* @constant
|
||||
* @description
|
||||
* See https://tools.ietf.org/html/rfc1952
|
||||
*/
|
||||
const ISIZE_LENGTH = 4
|
||||
|
||||
/**
|
||||
* @summary Get the estimated uncompressed size of a gzip file
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @description
|
||||
* This function determines the uncompressed size of the gzip file
|
||||
* by reading its `ISIZE` field at the end of the file. The specification
|
||||
* clarifies that this value is just an estimation.
|
||||
*
|
||||
* @param {Object} options - options
|
||||
* @param {Number} options.size - file size
|
||||
* @param {Function} options.read - read function (position, count)
|
||||
* @fulfil {Number} - uncompressed size
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const fd = fs.openSync('path/to/image', 'r');
|
||||
*
|
||||
* gzip.getUncompressedSize({
|
||||
* size: fs.statSync('path/to/image.gz').size,
|
||||
* read: (position, count) => {
|
||||
* const buffer = Buffer.alloc(count);
|
||||
* return new Promise((resolve, reject) => {
|
||||
* fs.read(fd, buffer, 0, count, position, (error) => {
|
||||
* if (error) {
|
||||
* return reject(error);
|
||||
* }
|
||||
*
|
||||
* resolve(buffer);
|
||||
* });
|
||||
* });
|
||||
* }
|
||||
* }).then((uncompressedSize) => {
|
||||
* console.log(`The uncompressed size is: ${uncompressedSize}`);
|
||||
* fs.closeSync(fd);
|
||||
* });
|
||||
*/
|
||||
exports.getUncompressedSize = (options) => {
|
||||
const ISIZE_BUFFER_START = 0
|
||||
const ISIZE_POSITION = options.size - ISIZE_LENGTH
|
||||
return options.read(ISIZE_POSITION, ISIZE_LENGTH).then((buffer) => {
|
||||
return buffer.readUInt32LE(ISIZE_BUFFER_START)
|
||||
})
|
||||
}
|
@ -1,253 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint-disable jsdoc/require-example */
|
||||
|
||||
const Bluebird = require('bluebird')
|
||||
const fs = Bluebird.promisifyAll(require('fs'))
|
||||
const PassThroughStream = require('stream').PassThrough
|
||||
const lzma = Bluebird.promisifyAll(require('lzma-native'))
|
||||
const zlib = require('zlib')
|
||||
const unbzip2Stream = require('unbzip2-stream')
|
||||
const gzip = require('./gzip')
|
||||
const udif = Bluebird.promisifyAll(require('udif'))
|
||||
const archive = require('./archive')
|
||||
const utils = require('./utils')
|
||||
const zipArchiveHooks = require('./archive-hooks/zip')
|
||||
const fileExtensions = require('../../shared/file-extensions')
|
||||
const path = require('path')
|
||||
const errors = require('../../shared/errors')
|
||||
|
||||
/**
|
||||
* @summary Default image extension to be assumed
|
||||
* @type {String}
|
||||
* @constant
|
||||
*/
|
||||
const DEFAULT_EXT = 'img'
|
||||
|
||||
/**
|
||||
* @summary Default read-stream highWaterMark value (1M)
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const STREAM_HWM = 1048576
|
||||
|
||||
/**
|
||||
* @summary Image handlers
|
||||
* @namespace handlers
|
||||
* @public
|
||||
*/
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
* @summary Handle BZ2 compressed images
|
||||
* @function
|
||||
* @public
|
||||
* @memberof handlers
|
||||
*
|
||||
* @param {String} imagePath - image path
|
||||
* @param {Object} options - options
|
||||
* @param {Number} [options.size] - image size
|
||||
*
|
||||
* @fulfil {Object} - image metadata
|
||||
* @returns {Promise}
|
||||
*/
|
||||
'application/x-bzip2': (imagePath, options) => {
|
||||
return {
|
||||
path: imagePath,
|
||||
archiveExtension: fileExtensions.getLastFileExtension(imagePath),
|
||||
extension: fileExtensions.getPenultimateFileExtension(imagePath) || DEFAULT_EXT,
|
||||
stream: fs.createReadStream(imagePath, { highWaterMark: STREAM_HWM }),
|
||||
size: {
|
||||
original: options.size,
|
||||
final: {
|
||||
estimation: true,
|
||||
value: options.size
|
||||
}
|
||||
},
|
||||
transform: unbzip2Stream()
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Handle GZ compressed images
|
||||
* @function
|
||||
* @public
|
||||
* @memberof handlers
|
||||
*
|
||||
* @param {String} imagePath - image path
|
||||
* @param {Object} options - options
|
||||
* @param {Number} [options.size] - image size
|
||||
*
|
||||
* @fulfil {Object} - image metadata
|
||||
* @returns {Promise}
|
||||
*/
|
||||
'application/gzip': (imagePath, options) => {
|
||||
return Bluebird.using(fs.openAsync(imagePath, 'r').disposer((fileDescriptor) => {
|
||||
return fs.closeAsync(fileDescriptor)
|
||||
}), (fileDescriptor) => {
|
||||
return gzip.getUncompressedSize({
|
||||
size: options.size,
|
||||
read: (position, count) => {
|
||||
return utils.readBufferFromImageFileDescriptor(fileDescriptor, position, count)
|
||||
}
|
||||
})
|
||||
}).then((uncompressedSize) => {
|
||||
return {
|
||||
path: imagePath,
|
||||
archiveExtension: fileExtensions.getLastFileExtension(imagePath),
|
||||
extension: fileExtensions.getPenultimateFileExtension(imagePath) || DEFAULT_EXT,
|
||||
stream: fs.createReadStream(imagePath, { highWaterMark: STREAM_HWM }),
|
||||
size: {
|
||||
original: options.size,
|
||||
final: {
|
||||
estimation: true,
|
||||
value: uncompressedSize
|
||||
}
|
||||
},
|
||||
transform: zlib.createGunzip()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Handle XZ compressed images
|
||||
* @function
|
||||
* @public
|
||||
* @memberof handlers
|
||||
*
|
||||
* @param {String} imagePath - image path
|
||||
* @param {Object} options - options
|
||||
* @param {Number} [options.size] - image size
|
||||
*
|
||||
* @fulfil {Object} - image metadata
|
||||
* @returns {Promise}
|
||||
*/
|
||||
'application/x-xz': (imagePath, options) => {
|
||||
return Bluebird.using(fs.openAsync(imagePath, 'r').disposer((fileDescriptor) => {
|
||||
return fs.closeAsync(fileDescriptor)
|
||||
}), (fileDescriptor) => {
|
||||
return lzma.parseFileIndexAsync({
|
||||
fileSize: options.size,
|
||||
read: (count, position, callback) => {
|
||||
utils.readBufferFromImageFileDescriptor(fileDescriptor, position, count).asCallback(callback)
|
||||
}
|
||||
})
|
||||
}).then((metadata) => {
|
||||
return {
|
||||
path: imagePath,
|
||||
archiveExtension: fileExtensions.getLastFileExtension(imagePath),
|
||||
extension: fileExtensions.getPenultimateFileExtension(imagePath) || DEFAULT_EXT,
|
||||
stream: fs.createReadStream(imagePath, { highWaterMark: STREAM_HWM }),
|
||||
size: {
|
||||
original: options.size,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: metadata.uncompressedSize
|
||||
}
|
||||
},
|
||||
transform: lzma.createDecompressor()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Handle Apple disk images (.dmg)
|
||||
* @function
|
||||
* @public
|
||||
* @memberof handlers
|
||||
*
|
||||
* @param {String} imagePath - image path
|
||||
* @param {Object} options - options
|
||||
* @param {Number} [options.size] - image size
|
||||
*
|
||||
* @fulfil {Object} - image metadata
|
||||
* @returns {Promise}
|
||||
*/
|
||||
'application/x-apple-diskimage': (imagePath, options) => {
|
||||
return udif.getUncompressedSizeAsync(imagePath).then((size) => {
|
||||
return {
|
||||
path: imagePath,
|
||||
extension: fileExtensions.getLastFileExtension(imagePath),
|
||||
stream: udif.createReadStream(imagePath, { highWaterMark: STREAM_HWM }),
|
||||
size: {
|
||||
original: options.size,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: size
|
||||
}
|
||||
},
|
||||
transform: new PassThroughStream()
|
||||
}
|
||||
}).catch((error) => {
|
||||
if (/invalid footer/i.test(error.message)) {
|
||||
throw errors.createUserError({
|
||||
title: 'Invalid image',
|
||||
description: `There was an error reading "${path.basename(imagePath)}". ` +
|
||||
'The image does not appear to be a valid Apple Disk Image (dmg), or may have the wrong filename extension.\n\n' +
|
||||
`Error: ${error.description || error.message}`
|
||||
})
|
||||
}
|
||||
throw error
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Handle ZIP compressed images
|
||||
* @function
|
||||
* @public
|
||||
* @memberof handlers
|
||||
*
|
||||
* @param {String} imagePath - image path
|
||||
* @fulfil {Object} - image metadata
|
||||
* @returns {Promise}
|
||||
*/
|
||||
'application/zip': (imagePath) => {
|
||||
return archive.extractImage(imagePath, zipArchiveHooks)
|
||||
},
|
||||
|
||||
/**
|
||||
* @summary Handle plain uncompressed images
|
||||
* @function
|
||||
* @public
|
||||
* @memberof handlers
|
||||
*
|
||||
* @param {String} imagePath - image path
|
||||
* @param {Object} options - options
|
||||
* @param {Number} [options.size] - image size
|
||||
*
|
||||
* @fulfil {Object} - image metadata
|
||||
* @returns {Promise}
|
||||
*/
|
||||
'application/octet-stream': (imagePath, options) => {
|
||||
return {
|
||||
path: imagePath,
|
||||
extension: fileExtensions.getLastFileExtension(imagePath),
|
||||
stream: fs.createReadStream(imagePath, { highWaterMark: STREAM_HWM }),
|
||||
size: {
|
||||
original: options.size,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: options.size
|
||||
}
|
||||
},
|
||||
transform: new PassThroughStream()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,140 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const Bluebird = require('bluebird')
|
||||
const fs = Bluebird.promisifyAll(require('fs'))
|
||||
const stream = require('stream')
|
||||
const mime = require('./mime')
|
||||
const handlers = require('./handlers')
|
||||
const supportedFileTypes = require('./supported')
|
||||
const errors = require('../../shared/errors')
|
||||
const parsePartitions = require('./parse-partitions')
|
||||
|
||||
/**
|
||||
* @summary Get an image stream from a file
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @description
|
||||
* This function resolves an object containing the following properties,
|
||||
* along with various extra metadata:
|
||||
*
|
||||
* - `Number size`: The input file size.
|
||||
*
|
||||
* - `ReadableStream stream`: The input file stream.
|
||||
*
|
||||
* - `TransformStream transform`: A transform stream that performs any
|
||||
* needed transformation to get the image out of the source input file
|
||||
* (for example, decompression).
|
||||
*
|
||||
* The purpose of separating the above components is to handle cases like
|
||||
* showing a progress bar when you can't know the final uncompressed size.
|
||||
*
|
||||
* In such case, you can pipe the `stream` through a progress stream using
|
||||
* the input file `size`, and apply the `transform` after the progress stream.
|
||||
*
|
||||
* @param {String} file - file path
|
||||
* @fulfil {Object} - image stream details
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const imageStream = require('./lib/sdk/image-stream');
|
||||
*
|
||||
* imageStream.getFromFilePath('path/to/rpi.img.xz').then((image) => {
|
||||
* console.log(`The image display name is: ${image.name}`);
|
||||
* console.log(`The image url is: ${image.url}`);
|
||||
* console.log(`The image support url is: ${image.supportUrl}`);
|
||||
* console.log(`The image logo is: ${image.logo}`);
|
||||
*
|
||||
* image.stream
|
||||
* .pipe(image.transform)
|
||||
* .pipe(fs.createWriteStream('/dev/disk2'));
|
||||
* });
|
||||
*/
|
||||
exports.getFromFilePath = (file) => {
|
||||
return fs.statAsync(file).then((fileStats) => {
|
||||
if (!fileStats.isFile()) {
|
||||
throw errors.createUserError({
|
||||
title: 'Invalid image',
|
||||
description: 'The image must be a file'
|
||||
})
|
||||
}
|
||||
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
const mimeType = _.has(handlers, type) ? type : mime.DEFAULT_MIME_TYPE
|
||||
return _.invoke(handlers, mimeType, file, {
|
||||
size: fileStats.size
|
||||
})
|
||||
})
|
||||
}).then((image) => {
|
||||
return _.omitBy(image, _.isUndefined)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get image metadata
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @description
|
||||
* This function is useful to determine the final size of an image
|
||||
* after decompression or any other needed transformation, as well as
|
||||
* other relevant metadata, if any.
|
||||
*
|
||||
* **NOTE:** This function is known to output incorrect size results for
|
||||
* `bzip2`. For this compression format, this function will simply
|
||||
* return the size of the compressed file.
|
||||
*
|
||||
* @param {String} file - file path
|
||||
* @fulfil {Object} - image metadata
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const imageStream = require('./lib/sdk/image-stream');
|
||||
*
|
||||
* imageStream.getImageMetadata('path/to/rpi.img.xz').then((metadata) => {
|
||||
* console.log(`The image display name is: ${metadata.name}`);
|
||||
* console.log(`The image url is: ${metadata.url}`);
|
||||
* console.log(`The image support url is: ${metadata.supportUrl}`);
|
||||
* console.log(`The image logo is: ${metadata.logo}`);
|
||||
* });
|
||||
*/
|
||||
exports.getImageMetadata = (file) => {
|
||||
return exports.getFromFilePath(file)
|
||||
.then(parsePartitions)
|
||||
.then((image) => {
|
||||
return _.omitBy(image, (property) => {
|
||||
return property instanceof stream.Stream || _.isNil(property)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Supported file types
|
||||
* @type {String[]}
|
||||
* @public
|
||||
*
|
||||
* @example
|
||||
* const imageStream = require('./lib/sdk/image-stream');
|
||||
*
|
||||
* imageStream.supportedFileTypes.forEach((fileType) => {
|
||||
* console.log('Supported file type: ' + fileType.extension);
|
||||
* });
|
||||
*/
|
||||
exports.supportedFileTypes = supportedFileTypes
|
@ -1,64 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const Bluebird = require('bluebird')
|
||||
const fs = Bluebird.promisifyAll(require('fs'))
|
||||
const fileType = require('file-type')
|
||||
const mime = require('mime-types')
|
||||
const utils = require('./utils')
|
||||
|
||||
/**
|
||||
* @summary The default MIME type
|
||||
* @type {String}
|
||||
* @constant
|
||||
*/
|
||||
exports.DEFAULT_MIME_TYPE = 'application/octet-stream'
|
||||
|
||||
/**
|
||||
* @summary Get file's mime type, by reading the initial 262 bytes if necessary
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @param {String} filename - file path
|
||||
* @fulfil {String} - mime type
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* mime.getMimeTypeFromFileName('path/to/raspberrypi.img.gz').then((mimeType) => {
|
||||
* console.log(mimeType);
|
||||
* });
|
||||
*/
|
||||
exports.getMimeTypeFromFileName = (filename) => {
|
||||
const mimeType = mime.lookup(filename)
|
||||
|
||||
if (mimeType) {
|
||||
return Bluebird.resolve(mimeType)
|
||||
}
|
||||
|
||||
const FILE_TYPE_ID_START = 0
|
||||
const FILE_TYPE_ID_BYTES = 262
|
||||
|
||||
return Bluebird.using(fs.openAsync(filename, 'r').disposer((fileDescriptor) => {
|
||||
return fs.closeAsync(fileDescriptor)
|
||||
}), (fileDescriptor) => {
|
||||
return utils.readBufferFromImageFileDescriptor(fileDescriptor, FILE_TYPE_ID_START, FILE_TYPE_ID_BYTES).then((buffer) => {
|
||||
return _.get(fileType(buffer), [ 'mime' ], exports.DEFAULT_MIME_TYPE)
|
||||
})
|
||||
})
|
||||
}
|
@ -1,202 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const Bluebird = require('bluebird')
|
||||
const MBR = require('mbr')
|
||||
const GPT = require('gpt')
|
||||
|
||||
/**
|
||||
* @summary Maximum number of bytes to read from the stream
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const MAX_STREAM_BYTES = 65536
|
||||
|
||||
/**
|
||||
* @summary Initial number of bytes read
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const INITIAL_LENGTH = 0
|
||||
|
||||
/**
|
||||
* @summary Initial block size
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const INITIAL_BLOCK_SIZE = 512
|
||||
|
||||
/**
|
||||
* @summary Maximum block size to check for
|
||||
* @type {Number}
|
||||
* @constant
|
||||
*/
|
||||
const MAX_BLOCK_SIZE = 4096
|
||||
|
||||
/**
|
||||
* @summary Attempt to parse the GPT from various block sizes
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {Buffer} buffer - Buffer
|
||||
* @returns {GPT|null}
|
||||
*
|
||||
* @example
|
||||
* const gpt = detectGPT(buffer);
|
||||
*
|
||||
* if (gpt != null) {
|
||||
* // Has a GPT
|
||||
* console.log('Partitions:', gpt.partitions);
|
||||
* }
|
||||
*/
|
||||
const detectGPT = (buffer) => {
|
||||
let blockSize = INITIAL_BLOCK_SIZE
|
||||
let gpt = null
|
||||
|
||||
// Attempt to parse the GPT from several offsets,
|
||||
// as the block size of the image may vary (512,1024,2048,4096);
|
||||
// For example, ISOs will usually have a block size of 4096,
|
||||
// but raw images a block size of 512 bytes
|
||||
while (blockSize <= MAX_BLOCK_SIZE) {
|
||||
gpt = _.attempt(GPT.parse, buffer.slice(blockSize))
|
||||
if (!_.isError(gpt)) {
|
||||
return gpt
|
||||
}
|
||||
blockSize += blockSize
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Attempt to parse the MBR & GPT from a given buffer
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {Object} image - Image metadata
|
||||
* @param {Buffer} buffer - Buffer
|
||||
*
|
||||
* @example
|
||||
* parsePartitionTables(image, buffer);
|
||||
*
|
||||
* if (image.hasMBR || image.hasGPT) {
|
||||
* console.log('Partitions:', image.partitions);
|
||||
* }
|
||||
*/
|
||||
const parsePartitionTables = (image, buffer) => {
|
||||
const mbr = _.attempt(MBR.parse, buffer)
|
||||
let gpt = null
|
||||
|
||||
if (!_.isError(mbr)) {
|
||||
image.hasMBR = true
|
||||
gpt = detectGPT(buffer)
|
||||
image.hasGPT = !_.isNil(gpt)
|
||||
}
|
||||
|
||||
// As MBR and GPT partition entries have a different structure,
|
||||
// we normalize them here to make them easier to deal with and
|
||||
// avoid clutter in what's sent to analytics
|
||||
if (image.hasGPT) {
|
||||
image.partitions = _.map(gpt.partitions, (partition) => {
|
||||
return {
|
||||
type: partition.type.toString(),
|
||||
id: partition.guid.toString(),
|
||||
name: partition.name,
|
||||
firstLBA: partition.firstLBA,
|
||||
lastLBA: partition.lastLBA,
|
||||
extended: false
|
||||
}
|
||||
})
|
||||
} else if (image.hasMBR) {
|
||||
image.partitions = _.map(mbr.partitions, (partition) => {
|
||||
return {
|
||||
type: partition.type,
|
||||
id: null,
|
||||
name: null,
|
||||
firstLBA: partition.firstLBA,
|
||||
lastLBA: partition.lastLBA,
|
||||
extended: partition.extended
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Attempt to read the MBR and GPT from an imagestream
|
||||
* @function
|
||||
* @public
|
||||
* @description
|
||||
* This operation will consume the first `MAX_STREAM_BYTES`
|
||||
* of the stream and then destroy the stream.
|
||||
*
|
||||
* @param {Object} image - image metadata
|
||||
* @returns {Promise}
|
||||
* @fulfil {Object} image
|
||||
*
|
||||
* @example
|
||||
* parsePartitions(image)
|
||||
* .then((image) => {
|
||||
* console.log('MBR:', image.hasMBR);
|
||||
* console.log('GPT:', image.hasGPT);
|
||||
* console.log('Partitions:', image.partitions);
|
||||
* });
|
||||
*/
|
||||
module.exports = (image) => {
|
||||
return new Bluebird((resolve, reject) => {
|
||||
const chunks = []
|
||||
let length = INITIAL_LENGTH
|
||||
let destroyed = false
|
||||
|
||||
image.hasMBR = false
|
||||
image.hasGPT = false
|
||||
|
||||
let stream = image.stream.pipe(image.transform)
|
||||
|
||||
stream.on('error', reject)
|
||||
|
||||
// We need to use the "old" flowing mode here,
|
||||
// as some dependencies don't implement the "readable"
|
||||
// mode properly (i.e. bzip2)
|
||||
stream.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
length += chunk.length
|
||||
|
||||
// Once we've read enough bytes, terminate the stream
|
||||
if (length >= MAX_STREAM_BYTES && !destroyed) {
|
||||
// Prefer close() over destroy(), as some streams
|
||||
// from dependencies exhibit quirky behavior when destroyed
|
||||
if (image.stream.close) {
|
||||
image.stream.close()
|
||||
} else {
|
||||
image.stream.destroy()
|
||||
}
|
||||
|
||||
// Remove references to stream to allow them being GCed
|
||||
image.stream = null
|
||||
image.transform = null
|
||||
stream = null
|
||||
destroyed = true
|
||||
|
||||
// Parse the MBR, GPT and partitions from the obtained buffer
|
||||
parsePartitionTables(image, Buffer.concat(chunks))
|
||||
resolve(image)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
@ -1,88 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @summary Supported filename extensions
|
||||
* @description
|
||||
* NOTE: Extensions with type: 'compressed' will be stripped
|
||||
* from filenames to determine the format of the uncompressed image.
|
||||
* For details, see lib/image-stream/README.md
|
||||
* @const {Array}
|
||||
*/
|
||||
module.exports = [
|
||||
{
|
||||
extension: 'zip',
|
||||
type: 'archive'
|
||||
},
|
||||
{
|
||||
extension: 'etch',
|
||||
type: 'archive'
|
||||
},
|
||||
{
|
||||
extension: 'gz',
|
||||
type: 'compressed'
|
||||
},
|
||||
{
|
||||
extension: 'bz2',
|
||||
type: 'compressed'
|
||||
},
|
||||
{
|
||||
extension: 'xz',
|
||||
type: 'compressed'
|
||||
},
|
||||
{
|
||||
extension: 'img',
|
||||
type: 'image'
|
||||
},
|
||||
{
|
||||
extension: 'iso',
|
||||
type: 'image'
|
||||
},
|
||||
{
|
||||
extension: 'bin',
|
||||
type: 'image'
|
||||
},
|
||||
{
|
||||
extension: 'dsk',
|
||||
type: 'image'
|
||||
},
|
||||
{
|
||||
extension: 'hddimg',
|
||||
type: 'image'
|
||||
},
|
||||
{
|
||||
extension: 'raw',
|
||||
type: 'image'
|
||||
},
|
||||
{
|
||||
extension: 'dmg',
|
||||
type: 'image'
|
||||
},
|
||||
{
|
||||
extension: 'sdcard',
|
||||
type: 'image'
|
||||
},
|
||||
{
|
||||
extension: 'rpi-sdimg',
|
||||
type: 'image'
|
||||
},
|
||||
{
|
||||
extension: 'wic',
|
||||
type: 'image'
|
||||
}
|
||||
]
|
@ -1,89 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const Bluebird = require('bluebird')
|
||||
const fs = Bluebird.promisifyAll(require('fs'))
|
||||
const errors = require('../../shared/errors')
|
||||
|
||||
/**
|
||||
* @summary Read a buffer from an image file descriptor
|
||||
* @function
|
||||
* @private
|
||||
*
|
||||
* @param {Number} fileDescriptor - file descriptor
|
||||
* @param {Number} position - image position to start reading from
|
||||
* @param {Number} count - number of bytes to read
|
||||
* @fulfil {Buffer} - buffer
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* fs.openAsync('path/to/image.img', 'r').then((fileDescriptor) => {
|
||||
* return utils.readBufferFromImageFileDescriptor(fileDescriptor, 0, 512);
|
||||
* }).then((buffer) => {
|
||||
* console.log(buffer);
|
||||
* });
|
||||
*/
|
||||
exports.readBufferFromImageFileDescriptor = (fileDescriptor, position, count) => {
|
||||
const BUFFER_FILL_VALUE = 0
|
||||
const BUFFER_START_POSITION = 0
|
||||
const buffer = Buffer.alloc(count, BUFFER_FILL_VALUE)
|
||||
|
||||
return fs.readAsync(fileDescriptor, buffer, BUFFER_START_POSITION, count, position).tap((bytesRead) => {
|
||||
if (bytesRead !== count) {
|
||||
throw errors.createUserError({
|
||||
title: 'Looks like the image is truncated',
|
||||
description: `We tried to read ${count} bytes at ${position}, but got ${bytesRead} bytes instead`
|
||||
})
|
||||
}
|
||||
}).return(buffer)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Extract the data of a readable stream
|
||||
* @function
|
||||
* @public
|
||||
*
|
||||
* @description
|
||||
* You should be careful when using this function, since you can only
|
||||
* extract files that are not bigger than the available computer memory.
|
||||
*
|
||||
* @param {StreamReadable} stream - stream
|
||||
* @fulfil {Buffer} - data
|
||||
* @returns {Promise}
|
||||
*
|
||||
* @example
|
||||
* const stream = fs.createReadStream('./foo/bar');
|
||||
*
|
||||
* utils.extractStream(stream).then((data) => {
|
||||
* console.log(data.toString());
|
||||
* });
|
||||
*/
|
||||
exports.extractStream = (stream) => {
|
||||
return new Bluebird((resolve, reject) => {
|
||||
const chunks = []
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
})
|
||||
|
||||
stream.on('error', reject)
|
||||
stream.on('end', () => {
|
||||
resolve(Buffer.concat(chunks))
|
||||
})
|
||||
})
|
||||
}
|
@ -1,47 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const SDK = module.exports
|
||||
|
||||
/**
|
||||
* @summary Initialised adapters
|
||||
* @type {Object<String,Adapter>}
|
||||
* @constant
|
||||
*/
|
||||
SDK.adapters = require('./adapters')
|
||||
|
||||
/**
|
||||
* Adapter Scanner
|
||||
* @see scanner.js
|
||||
* @ignore
|
||||
*/
|
||||
SDK.Scanner = require('./scanner')
|
||||
|
||||
/**
|
||||
* @summary Create a new Scanner
|
||||
* @param {Object} [options] - options
|
||||
* @returns {SDK.Scanner}
|
||||
* @example
|
||||
* SDK.createScanner({
|
||||
* blockdevice: { ... },
|
||||
* usbboot: { ... }
|
||||
* })
|
||||
*/
|
||||
SDK.createScanner = (options) => {
|
||||
return new SDK.Scanner(options)
|
||||
}
|
@ -1,232 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const _ = require('lodash')
|
||||
const EventEmitter = require('events')
|
||||
const debug = require('debug')('etcher:sdk:scanner')
|
||||
const SDK = require('./')
|
||||
|
||||
debug.enabled = true
|
||||
|
||||
/* eslint-disable lodash/prefer-lodash-method */
|
||||
|
||||
/**
|
||||
* Adapter Scanner
|
||||
* @class Scanner
|
||||
* @memberOf SDK
|
||||
*/
|
||||
class Scanner extends EventEmitter {
|
||||
/**
|
||||
* @summary Adapter Scanner constructor
|
||||
* @param {Object<String,Object>} [options] - device adapter options
|
||||
* @param {Object} [options.adapters] - map of external device adapters
|
||||
* @example
|
||||
* new Scanner({
|
||||
* blockdevice: { ... },
|
||||
* usbboot: { ... }
|
||||
* })
|
||||
*/
|
||||
constructor (options = {}) {
|
||||
// Inherit from EventEmitter
|
||||
super()
|
||||
|
||||
this.options = options
|
||||
this.isScanning = false
|
||||
this.adapters = new Map()
|
||||
|
||||
// Bind event handlers to own context to facilitate
|
||||
// removing listeners by reference
|
||||
this.onDevices = this.onDevices.bind(this)
|
||||
this.onError = this.onError.bind(this)
|
||||
|
||||
this.init()
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Initialize adapters
|
||||
* @private
|
||||
* @example
|
||||
* // Only to be used internally
|
||||
* this.init()
|
||||
*/
|
||||
init () {
|
||||
debug('scanner:init', this)
|
||||
_.each(_.keys(this.options), (adapterId) => {
|
||||
const adapter = SDK.adapters[adapterId] ||
|
||||
_.get(this.options, [ 'adapters', adapterId ])
|
||||
|
||||
if (_.isNil(adapter)) {
|
||||
console.warn(`Unknown adapter "${adapterId}"`)
|
||||
return
|
||||
}
|
||||
|
||||
this.subscribe(adapter)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Event handler for adapter's "device" events
|
||||
* @private
|
||||
* @example
|
||||
* adapter.on('devices', this.onDevices)
|
||||
*/
|
||||
onDevices () {
|
||||
const devices = []
|
||||
this.adapters.forEach((adapter) => {
|
||||
devices.push(...adapter.devices)
|
||||
})
|
||||
this.emit('devices', devices)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Event handler for adapter's "error" events
|
||||
* @param {Error} error - error
|
||||
* @private
|
||||
* @example
|
||||
* adapter.on('error', this.onError)
|
||||
*/
|
||||
onError (error) {
|
||||
this.emit('error', error)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Start scanning for devices
|
||||
* @public
|
||||
* @returns {Scanner}
|
||||
* @example
|
||||
* scanner.start()
|
||||
*/
|
||||
start () {
|
||||
debug('start', !this.isScanning)
|
||||
if (this.isScanning) {
|
||||
return this
|
||||
}
|
||||
|
||||
this.adapters.forEach((adapter) => {
|
||||
const options = this.options[adapter.id]
|
||||
|
||||
/**
|
||||
* @summary Run a scan with an adapter
|
||||
* @function
|
||||
* @private
|
||||
* @example
|
||||
* runScan()
|
||||
*/
|
||||
const runScan = () => {
|
||||
adapter.scan(options, () => {
|
||||
if (this.isScanning) {
|
||||
setTimeout(runScan, Scanner.MIN_SCAN_DELAY)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
adapter
|
||||
.on('devices', this.onDevices)
|
||||
.on('error', this.onError)
|
||||
|
||||
runScan()
|
||||
})
|
||||
|
||||
this.emit('start')
|
||||
this.isScanning = true
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Stop scanning for devices
|
||||
* @public
|
||||
* @returns {Scanner}
|
||||
* @example
|
||||
* scanner.stop()
|
||||
*/
|
||||
stop () {
|
||||
debug('stop', this.isScanning)
|
||||
if (!this.isScanning) {
|
||||
return this
|
||||
}
|
||||
|
||||
this.adapters.forEach((adapter) => {
|
||||
adapter.removeListener('devices', this.onDevices)
|
||||
adapter.removeListener('error', this.onError)
|
||||
})
|
||||
|
||||
this.isScanning = false
|
||||
this.emit('stop')
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Subscribe to an adapter
|
||||
* @public
|
||||
* @param {Adapter} adapter - device adapter
|
||||
* @returns {Scanner}
|
||||
* @example
|
||||
* scanner.subscribe(adapter)
|
||||
*/
|
||||
subscribe (adapter) {
|
||||
debug('subscribe', adapter)
|
||||
|
||||
if (this.adapters.get(adapter.id)) {
|
||||
throw new Error(`Scanner: Already subscribed to ${adapter.id}`)
|
||||
}
|
||||
|
||||
this.adapters.set(adapter.id, adapter)
|
||||
this.emit('subscribe', adapter)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Unsubscribe from an adapter
|
||||
* @public
|
||||
* @param {Adapter} adapter - device adapter
|
||||
* @returns {Scanner}
|
||||
* @example
|
||||
* scanner.unsubscribe(adapter)
|
||||
* // OR
|
||||
* scanner.unsubscribe('adapterName')
|
||||
*/
|
||||
unsubscribe (adapter) {
|
||||
debug('unsubscribe', adapter)
|
||||
const instance = _.isString(adapter) ? this.adapters.get(adapter) : this.adapters.get(adapter.id)
|
||||
|
||||
if (_.isNil(instance)) {
|
||||
// Not subscribed
|
||||
return this
|
||||
}
|
||||
|
||||
instance.removeListener('devices', this.onDevices)
|
||||
instance.removeListener('error', this.onError)
|
||||
|
||||
this.adapters.delete(instance.id)
|
||||
this.emit('unsubscribe', adapter)
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Minimum delay between scans in ms
|
||||
* @const
|
||||
* @type {Number}
|
||||
*/
|
||||
Scanner.MIN_SCAN_DELAY = 500
|
||||
|
||||
module.exports = Scanner
|
@ -1,7 +0,0 @@
|
||||
rules:
|
||||
no-eq-null: off
|
||||
no-magic-numbers: off
|
||||
no-param-reassign: off
|
||||
no-underscore-dangle: off
|
||||
lodash/prefer-lodash-method: off
|
||||
lodash/prefer-get: off
|
@ -1,238 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const stream = require('readable-stream')
|
||||
const fs = require('fs')
|
||||
const debug = require('debug')('etcher:writer:block-read-stream')
|
||||
const errors = require('./error-types')
|
||||
|
||||
const CHUNK_SIZE = 64 * 1024
|
||||
const MIN_CHUNK_SIZE = 512
|
||||
|
||||
/**
|
||||
* @summary I/O retry base timeout, in milliseconds
|
||||
* @constant
|
||||
* @type {Number}
|
||||
*/
|
||||
const RETRY_BASE_TIMEOUT = 100
|
||||
|
||||
/**
|
||||
* @summary BlockReadStream
|
||||
* @class
|
||||
*/
|
||||
class BlockReadStream extends stream.Readable {
|
||||
/**
|
||||
* @summary BlockReadStream constructor
|
||||
* @param {Object} [options] - options
|
||||
* @param {Number} [options.fd] - file descriptor
|
||||
* @param {String} [options.path] - file path
|
||||
* @param {String} [options.flags] - file open flags
|
||||
* @param {Number} [options.mode] - file mode
|
||||
* @param {Number} [options.start] - start offset in bytes
|
||||
* @param {Number} [options.end] - end offset in bytes
|
||||
* @param {Boolean} [options.autoClose] - automatically close the stream on end
|
||||
* @param {Number} [options.maxRetries] - maximum number of retries per read
|
||||
* @example
|
||||
* new BlockReadStream()
|
||||
*/
|
||||
constructor (options) {
|
||||
options = Object.assign({}, BlockReadStream.defaults, options)
|
||||
options.objectMode = true
|
||||
|
||||
debug('block-read-stream %j', options)
|
||||
|
||||
super(options)
|
||||
|
||||
this.fs = options.fs
|
||||
this.fd = options.fd
|
||||
this.path = options.path
|
||||
this.flags = options.flags
|
||||
this.mode = options.mode
|
||||
this.end = options.end || Infinity
|
||||
this.autoClose = options.autoClose
|
||||
this.maxRetries = options.maxRetries || 5
|
||||
|
||||
this.retries = 0
|
||||
this.position = options.start || 0
|
||||
this.bytesRead = 0
|
||||
|
||||
this.closed = false
|
||||
this.destroyed = false
|
||||
|
||||
this.once('end', function () {
|
||||
if (this.autoClose) {
|
||||
this.close()
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @summary onRead handler
|
||||
* @param {Error} error - error
|
||||
* @param {Number} bytesRead - bytes read
|
||||
* @param {Buffer} buffer - resulting buffer
|
||||
* @example
|
||||
* fs.read(fd, buffer, 0, length, position, onRead)
|
||||
*/
|
||||
this._onRead = (error, bytesRead, buffer) => {
|
||||
if (!error && bytesRead !== buffer.length) {
|
||||
error = new Error(`Bytes read mismatch: ${bytesRead} != ${buffer.length}`)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
const isTransient = errors.isTransientError(error)
|
||||
|
||||
if (isTransient && (this.retries < this.maxRetries)) {
|
||||
this.retries += 1
|
||||
setTimeout(() => {
|
||||
this._read()
|
||||
}, RETRY_BASE_TIMEOUT * this.retries)
|
||||
return
|
||||
} else if (isTransient) {
|
||||
error.code = 'EUNPLUGGED'
|
||||
}
|
||||
|
||||
if (this.autoClose) {
|
||||
this.destroy()
|
||||
}
|
||||
|
||||
this.emit('error', error)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
this.retries = 0
|
||||
this.bytesRead += bytesRead
|
||||
this.position += buffer.length
|
||||
this.push(buffer)
|
||||
}
|
||||
|
||||
this.open()
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Read a chunk from the source
|
||||
* @private
|
||||
* @example
|
||||
* // not to be called directly
|
||||
*/
|
||||
_read () {
|
||||
// Wait for file handle to be open
|
||||
if (this.fd == null) {
|
||||
this.once('open', () => {
|
||||
this._read()
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const toRead = this.end - this.position
|
||||
|
||||
if (toRead <= 0) {
|
||||
this.push(null)
|
||||
return
|
||||
}
|
||||
|
||||
const length = Math.min(CHUNK_SIZE, Math.max(MIN_CHUNK_SIZE, toRead))
|
||||
const buffer = Buffer.alloc(length)
|
||||
|
||||
this.fs.read(this.fd, buffer, 0, length, this.position, this._onRead)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Open a handle to the file
|
||||
* @private
|
||||
* @example
|
||||
* this.open()
|
||||
*/
|
||||
open () {
|
||||
debug('open')
|
||||
|
||||
if (this.fd != null) {
|
||||
this.emit('open', this.fd)
|
||||
return
|
||||
}
|
||||
|
||||
this.fs.open(this.path, this.flags, this.mode, (error, fd) => {
|
||||
if (error) {
|
||||
if (this.autoClose) {
|
||||
this.destroy()
|
||||
}
|
||||
this.emit('error', error)
|
||||
} else {
|
||||
this.fd = fd
|
||||
this.emit('open', fd)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Close the underlying resource
|
||||
* @param {Function} callback - callback(error)
|
||||
* @example
|
||||
* blockStream.close((error) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
close (callback) {
|
||||
debug('close')
|
||||
|
||||
if (callback) {
|
||||
this.once('close', callback)
|
||||
}
|
||||
|
||||
if (this.closed || this.fd == null) {
|
||||
if (this.fd == null) {
|
||||
this.once('open', () => {
|
||||
this.close()
|
||||
})
|
||||
} else {
|
||||
process.nextTick(() => {
|
||||
this.emit('close')
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
this.closed = true
|
||||
|
||||
this.fs.close(this.fd, (error) => {
|
||||
if (error) {
|
||||
this.emit('error', error)
|
||||
} else {
|
||||
this.emit('close')
|
||||
}
|
||||
})
|
||||
|
||||
this.fd = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Default options
|
||||
* @type {Object}
|
||||
* @constant
|
||||
*/
|
||||
BlockReadStream.defaults = {
|
||||
fs,
|
||||
fd: null,
|
||||
path: null,
|
||||
flags: 'r',
|
||||
mode: 0o666,
|
||||
autoClose: true
|
||||
}
|
||||
|
||||
module.exports = BlockReadStream
|
@ -1,136 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const stream = require('readable-stream')
|
||||
const debug = require('debug')('etcher:writer:block-stream')
|
||||
|
||||
const MIN_BLOCK_SIZE = 512
|
||||
const CHUNK_SIZE = 64 * 1024
|
||||
|
||||
/**
|
||||
* @summary BlockStream class
|
||||
* @class
|
||||
*/
|
||||
class BlockStream extends stream.Transform {
|
||||
/**
|
||||
* @summary BlockStream constructor
|
||||
* @param {Object} [options] - options
|
||||
* @param {Number} [options.blockSize] - block size in bytes
|
||||
* @param {Number} [options.chunkSize] - chunk size in bytes
|
||||
* @example
|
||||
* new BlockStream(options)
|
||||
*/
|
||||
constructor (options) {
|
||||
options = Object.assign({}, BlockStream.defaults, options)
|
||||
options.readableObjectMode = true
|
||||
|
||||
super(options)
|
||||
|
||||
this.blockSize = options.blockSize
|
||||
this.chunkSize = options.chunkSize
|
||||
this.bytesRead = 0
|
||||
this.bytesWritten = 0
|
||||
|
||||
this._buffers = []
|
||||
this._bytes = 0
|
||||
|
||||
debug('new %j', options)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Internal write handler
|
||||
* @private
|
||||
* @param {Buffer} chunk - chunk buffer
|
||||
* @param {String} encoding - chunk encoding
|
||||
* @param {Function} next - callback(error, value)
|
||||
* @example
|
||||
* // Not to be called directly
|
||||
*/
|
||||
_transform (chunk, encoding, next) {
|
||||
this.bytesRead += chunk.length
|
||||
|
||||
if (this._bytes === 0 && chunk.length >= this.chunkSize) {
|
||||
if (chunk.length % this.blockSize === 0) {
|
||||
this.bytesWritten += chunk.length
|
||||
this.push(chunk)
|
||||
next()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
this._buffers.push(chunk)
|
||||
this._bytes += chunk.length
|
||||
|
||||
if (this._bytes >= this.chunkSize) {
|
||||
let block = Buffer.concat(this._buffers)
|
||||
const length = Math.floor(block.length / this.blockSize) * this.blockSize
|
||||
|
||||
this._buffers.length = 0
|
||||
this._bytes = 0
|
||||
|
||||
if (block.length !== length) {
|
||||
this._buffers.push(block.slice(length))
|
||||
this._bytes += block.length - length
|
||||
block = block.slice(0, length)
|
||||
}
|
||||
|
||||
this.bytesWritten += block.length
|
||||
this.push(block)
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Internal stream end handler
|
||||
* @private
|
||||
* @param {Function} done - callback(error, value)
|
||||
* @example
|
||||
* // Not to be called directly
|
||||
*/
|
||||
_flush (done) {
|
||||
if (!this._bytes) {
|
||||
done()
|
||||
return
|
||||
}
|
||||
|
||||
const length = Math.ceil(this._bytes / this.blockSize) * this.blockSize
|
||||
const block = Buffer.alloc(length)
|
||||
let offset = 0
|
||||
|
||||
for (let index = 0; index < this._buffers.length; index += 1) {
|
||||
this._buffers[index].copy(block, offset)
|
||||
offset += this._buffers[index].length
|
||||
}
|
||||
|
||||
this.push(block)
|
||||
done()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Default options
|
||||
* @type {Object}
|
||||
* @constant
|
||||
*/
|
||||
BlockStream.defaults = {
|
||||
blockSize: MIN_BLOCK_SIZE,
|
||||
chunkSize: CHUNK_SIZE
|
||||
}
|
||||
|
||||
module.exports = BlockStream
|
@ -1,312 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const stream = require('readable-stream')
|
||||
const fs = require('fs')
|
||||
const speedometer = require('speedometer')
|
||||
const debug = require('debug')('etcher:writer:block-write-stream')
|
||||
const errors = require('./error-types')
|
||||
|
||||
const CHUNK_SIZE = 64 * 1024
|
||||
const UPDATE_INTERVAL_MS = 1000 / 60
|
||||
|
||||
/**
|
||||
* @summary I/O retry base timeout, in milliseconds
|
||||
* @constant
|
||||
* @type {Number}
|
||||
*/
|
||||
const RETRY_BASE_TIMEOUT = 100
|
||||
|
||||
/**
|
||||
* @summary BlockWriteStream
|
||||
* @class
|
||||
*/
|
||||
class BlockWriteStream extends stream.Writable {
|
||||
/**
|
||||
* @summary BlockWriteStream constructor
|
||||
* @param {Object} [options] - options
|
||||
* @param {Number} [options.fd] - file descriptor
|
||||
* @param {String} [options.path] - file path
|
||||
* @param {String} [options.flags] - file open flags
|
||||
* @param {Number} [options.mode] - file mode
|
||||
* @param {Boolean} [options.autoClose] - automatically close the stream on end
|
||||
* @param {Number} [options.maxRetries] - maximum number of retries per write
|
||||
* @example
|
||||
* new BlockWriteStream(options)
|
||||
*/
|
||||
constructor (options) {
|
||||
options = Object.assign({}, BlockWriteStream.defaults, options)
|
||||
options.objectMode = true
|
||||
|
||||
debug('block-write-stream %j', options)
|
||||
|
||||
super(options)
|
||||
|
||||
this._writableState.highWaterMark = 1
|
||||
|
||||
this.fs = options.fs
|
||||
this.fd = options.fd
|
||||
this.path = options.path
|
||||
this.flags = options.flags
|
||||
this.mode = options.mode
|
||||
this.autoClose = options.autoClose
|
||||
this.maxRetries = options.maxRetries || 5
|
||||
|
||||
this.position = 0
|
||||
this.bytesRead = 0
|
||||
this.blocksRead = 0
|
||||
this.bytesWritten = 0
|
||||
this.blocksWritten = 0
|
||||
this.retries = 0
|
||||
this.meter = speedometer()
|
||||
this.delta = 0
|
||||
this.speed = 0
|
||||
|
||||
this.clear = () => {
|
||||
clearInterval(this.timer)
|
||||
}
|
||||
|
||||
this.update = () => {
|
||||
this.speed = this.meter(this.delta)
|
||||
this.delta = 0
|
||||
}
|
||||
|
||||
this.once('end', this.clear)
|
||||
this.once('error', this.clear)
|
||||
|
||||
this.timer = setInterval(this.update, UPDATE_INTERVAL_MS)
|
||||
|
||||
this.closed = false
|
||||
this.destroyed = false
|
||||
|
||||
this.once('finish', function () {
|
||||
if (this.autoClose) {
|
||||
this.close()
|
||||
}
|
||||
})
|
||||
|
||||
this._flushing = false
|
||||
this._firstBlocks = []
|
||||
|
||||
this.open()
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Internal write handler
|
||||
* @private
|
||||
* @param {Buffer} chunk - chunk buffer
|
||||
* @param {String} encoding - chunk encoding
|
||||
* @param {Function} next - callback(error, value)
|
||||
* @example
|
||||
* // Not to be called directly
|
||||
*/
|
||||
_write (chunk, encoding, next) {
|
||||
debug('_write', chunk.length, chunk.position, chunk.address)
|
||||
|
||||
// Wait for file handle to be open
|
||||
if (this.fd == null) {
|
||||
this.once('open', () => {
|
||||
this._write(chunk, encoding, next)
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (this.retries === 0) {
|
||||
this.bytesRead += chunk.length
|
||||
this.blocksRead += 1
|
||||
}
|
||||
|
||||
if (chunk.position == null) {
|
||||
chunk.position = this.position
|
||||
}
|
||||
|
||||
if (!this._flushing && (chunk.position < CHUNK_SIZE)) {
|
||||
this._firstBlocks.push(chunk)
|
||||
this.position = chunk.position + chunk.length
|
||||
process.nextTick(next)
|
||||
return
|
||||
}
|
||||
|
||||
if (chunk.position !== this.position) {
|
||||
this.position = chunk.position
|
||||
}
|
||||
|
||||
fs.write(this.fd, chunk, 0, chunk.length, chunk.position, (error, bytesWritten) => {
|
||||
if (!error) {
|
||||
this.bytesWritten += bytesWritten
|
||||
this.delta += bytesWritten
|
||||
this.blocksWritten += 1
|
||||
this.position += bytesWritten
|
||||
this.retries = 0
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
const isTransient = errors.isTransientError(error)
|
||||
|
||||
if (isTransient && (this.retries < this.maxRetries)) {
|
||||
this.retries += 1
|
||||
setTimeout(() => {
|
||||
this._write(chunk, encoding, next)
|
||||
}, RETRY_BASE_TIMEOUT * this.retries)
|
||||
return
|
||||
} else if (isTransient) {
|
||||
error.code = 'EUNPLUGGED'
|
||||
}
|
||||
|
||||
next(error)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Write buffered data before a stream ends
|
||||
* @private
|
||||
* @param {Function} done - callback
|
||||
* @example
|
||||
* // Called by stream internals
|
||||
*/
|
||||
_final (done) {
|
||||
debug('_final')
|
||||
|
||||
/**
|
||||
* @summary Write the next chunk of the buffered `_firstBlocks`
|
||||
* @param {Error} [error] - error
|
||||
* @example
|
||||
* writeNext()
|
||||
*/
|
||||
const writeNext = (error) => {
|
||||
if (error) {
|
||||
this.destroy(error)
|
||||
return
|
||||
}
|
||||
const chunk = this._firstBlocks.pop()
|
||||
if (!chunk) {
|
||||
done()
|
||||
return
|
||||
}
|
||||
this._write(chunk, null, writeNext)
|
||||
}
|
||||
|
||||
this._flushing = true
|
||||
writeNext()
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Destroy the stream, and emit the passed error
|
||||
* @private
|
||||
* @param {Error} [error] - error
|
||||
* @param {Function} done - callback
|
||||
* @example
|
||||
* stream.destroy()
|
||||
*/
|
||||
_destroy (error, done) {
|
||||
debug('_destroy', error)
|
||||
|
||||
if (this.autoClose) {
|
||||
this.close((closeError) => {
|
||||
done(error || closeError)
|
||||
})
|
||||
} else {
|
||||
done(error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Open a handle to the file
|
||||
* @private
|
||||
* @example
|
||||
* this.open()
|
||||
*/
|
||||
open () {
|
||||
debug('open')
|
||||
|
||||
if (this.fd != null) {
|
||||
this.emit('open', this.fd)
|
||||
return
|
||||
}
|
||||
|
||||
this.fs.open(this.path, this.flags, this.mode, (error, fd) => {
|
||||
if (error) {
|
||||
if (this.autoClose) {
|
||||
this.destroy()
|
||||
}
|
||||
this.emit('error', error)
|
||||
} else {
|
||||
this.fd = fd
|
||||
this.emit('open', fd)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Close the underlying resource
|
||||
* @param {Function} callback - callback(error)
|
||||
* @example
|
||||
* blockStream.close((error) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
close (callback) {
|
||||
debug('close')
|
||||
|
||||
if (callback) {
|
||||
this.once('close', callback)
|
||||
}
|
||||
|
||||
if (this.closed || this.fd == null) {
|
||||
if (this.fd == null) {
|
||||
this.once('open', () => {
|
||||
this.close()
|
||||
})
|
||||
} else {
|
||||
process.nextTick(() => {
|
||||
this.emit('close')
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
this.closed = true
|
||||
|
||||
this.fs.close(this.fd, (error) => {
|
||||
if (error) {
|
||||
this.emit('error', error)
|
||||
} else {
|
||||
this.emit('close')
|
||||
}
|
||||
})
|
||||
|
||||
this.fd = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Default options
|
||||
* @type {Object}
|
||||
* @constant
|
||||
*/
|
||||
BlockWriteStream.defaults = {
|
||||
fs,
|
||||
fd: null,
|
||||
path: null,
|
||||
flags: 'w',
|
||||
mode: 0o666,
|
||||
autoClose: true
|
||||
}
|
||||
|
||||
module.exports = BlockWriteStream
|
@ -1,141 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const stream = require('readable-stream')
|
||||
const crypto = require('crypto')
|
||||
const xxhash = require('xxhash')
|
||||
const _ = require('lodash')
|
||||
|
||||
/**
|
||||
* @summary Create an instance of ChecksumStream
|
||||
* @name ChecksumStream
|
||||
* @class
|
||||
*/
|
||||
class ChecksumStream extends stream.Transform {
|
||||
/**
|
||||
* @summary Create an instance of ChecksumStream
|
||||
* @name ChecksumStream
|
||||
* @class
|
||||
* @param {Object} options - options
|
||||
* @param {String[]} options.algorithms - hash algorithms
|
||||
* @example
|
||||
* var checksum = new ChecksumStream({
|
||||
* algorithms: [ 'md5' ]
|
||||
* })
|
||||
*
|
||||
* checksum.once('checksum', (checksum) => {
|
||||
* // checksum: {
|
||||
* // md5: '55a4eb779e08f604c41ba1cbfff47ada'
|
||||
* // }
|
||||
* })
|
||||
*
|
||||
* fs.createReadStream( 'os-image.img' )
|
||||
* .pipe( checksum )
|
||||
* .pipe( fs.createWriteStream( '/dev/rdisk2' ) )
|
||||
* .once( 'finish', () => { ... })
|
||||
*/
|
||||
constructor (options = {}) {
|
||||
super(options)
|
||||
this.results = {}
|
||||
this.algorithms = options.algorithms || []
|
||||
this.hashes = _.map(this.algorithms, (algorithm) => {
|
||||
return this._createHash(algorithm)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Create & pipe to the Hash streams
|
||||
* @private
|
||||
* @param {String[]} algorithm - hash algorithm
|
||||
* @returns {Stream}
|
||||
* @example
|
||||
* const hash = this._createHash(algorithm)
|
||||
*/
|
||||
_createHash (algorithm) {
|
||||
let hash = null
|
||||
|
||||
if (algorithm === 'xxhash') {
|
||||
// Seed value 0x45544348 = ASCII "ETCH"
|
||||
const seed = 0x45544348
|
||||
const is64Bit = [ 'x64', 'arm64', 'ppc64' ].includes(process.arch)
|
||||
hash = new xxhash.Stream(seed, is64Bit ? 64 : 32, Buffer.allocUnsafe(is64Bit ? 8 : 4))
|
||||
} else {
|
||||
hash = _.attempt(crypto.createHash, algorithm)
|
||||
}
|
||||
|
||||
if (_.isError(hash)) {
|
||||
hash.message += ` "${algorithm}"`
|
||||
throw hash
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Check for all checksums to have been calculated
|
||||
* @private
|
||||
* @example
|
||||
* hash.once('end', check)
|
||||
*/
|
||||
const check = () => {
|
||||
if (_.keys(this.results).length === this.algorithms.length) {
|
||||
this.emit('checksum', _.clone(this.results))
|
||||
}
|
||||
}
|
||||
|
||||
hash.once('error', (error) => {
|
||||
return this.emit('error', error)
|
||||
})
|
||||
|
||||
hash.once('readable', () => {
|
||||
this.results[algorithm] = hash.read().toString('hex')
|
||||
check()
|
||||
})
|
||||
|
||||
return hash
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Pass through chunks
|
||||
* @private
|
||||
* @param {Buffer} chunk - chunk
|
||||
* @param {String} encoding - encoding
|
||||
* @param {Function} next - callback
|
||||
* @example
|
||||
* checksumStream.write(buffer)
|
||||
*/
|
||||
_transform (chunk, encoding, next) {
|
||||
for (let index = 0; index < this.hashes.length; index += 1) {
|
||||
this.hashes[index].write(chunk)
|
||||
}
|
||||
next(null, chunk)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary End the hash streams once this stream ends
|
||||
* @private
|
||||
* @param {Function} done - callback
|
||||
* @example
|
||||
* checksumStream.end()
|
||||
*/
|
||||
_flush (done) {
|
||||
for (let index = 0; index < this.hashes.length; index += 1) {
|
||||
this.hashes[index].end()
|
||||
}
|
||||
done()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ChecksumStream
|
@ -1,45 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
* @summary Determine whether an error is considered a
|
||||
* transient occurrence, and the operation should be retried
|
||||
* Errors considered potentially temporary are:
|
||||
* - Mac OS: ENXIO, EBUSY
|
||||
* - Windows: ENOENT, UNKNOWN
|
||||
* - Linux: EIO, EBUSY
|
||||
* @private
|
||||
* @param {Error} error - Error
|
||||
* @returns {Boolean}
|
||||
* @example
|
||||
* errors.isTransientError(error)
|
||||
*/
|
||||
isTransientError (error) {
|
||||
if (process.platform === 'darwin') {
|
||||
return error.code === 'ENXIO' || error.code === 'EBUSY'
|
||||
} else if (process.platform === 'linux') {
|
||||
return error.code === 'EIO' || error.code === 'EBUSY'
|
||||
} else if (process.platform === 'win32') {
|
||||
return error.code === 'ENOENT' || error.code === 'UNKNOWN'
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
}
|
@ -1,832 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const os = require('os')
|
||||
const fs = require('fs')
|
||||
const EventEmitter = require('events').EventEmitter
|
||||
const mountutils = require('mountutils')
|
||||
const drivelist = require('drivelist')
|
||||
const stream = require('readable-stream')
|
||||
const Pipage = require('pipage')
|
||||
const BlockMap = require('blockmap')
|
||||
const BlockStream = require('./block-stream')
|
||||
const BlockWriteStream = require('./block-write-stream')
|
||||
const BlockReadStream = require('./block-read-stream')
|
||||
const ChecksumStream = require('./checksum-stream')
|
||||
const ProgressStream = require('./progress-stream')
|
||||
const imageStream = require('../image-stream')
|
||||
const diskpart = require('../../cli/diskpart')
|
||||
const constraints = require('../../shared/drive-constraints')
|
||||
const errors = require('../../shared/errors')
|
||||
const debug = require('debug')('etcher:writer')
|
||||
const _ = require('lodash')
|
||||
|
||||
/* eslint-disable prefer-reflect */
|
||||
/* eslint-disable callback-return */
|
||||
|
||||
/**
|
||||
* @summary Timeout, in milliseconds, to wait before unmounting on success
|
||||
* @constant
|
||||
* @type {Number}
|
||||
*/
|
||||
const UNMOUNT_ON_SUCCESS_TIMEOUT_MS = 2000
|
||||
|
||||
/**
|
||||
* @summary Helper function to run a set of async tasks in sequence
|
||||
* @private
|
||||
* @param {Array<Function>} tasks - set of tasks
|
||||
* @param {Function} callback - callback(error)
|
||||
* @example
|
||||
* runSeries([
|
||||
* (next) => first(next),
|
||||
* (next) => second(next),
|
||||
* ], (error) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
const runSeries = (tasks, callback) => {
|
||||
/**
|
||||
* @summary Task runner
|
||||
* @param {Error} [error] - error
|
||||
* @example
|
||||
* run()
|
||||
*/
|
||||
const run = (error) => {
|
||||
const task = tasks.shift()
|
||||
if (error || task == null) {
|
||||
callback(error)
|
||||
return
|
||||
}
|
||||
task(run)
|
||||
}
|
||||
|
||||
run()
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Helper function to run a set of async tasks in sequence
|
||||
* @private
|
||||
* @param {Array<Function>} tasks - set of tasks
|
||||
* @param {Function} callback - callback(error)
|
||||
* @example
|
||||
* runParallel([
|
||||
* (next) => first(next),
|
||||
* (next) => second(next),
|
||||
* ], (error) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
const runParallel = (tasks, callback) => {
|
||||
let count = tasks.length
|
||||
const resultErrors = new Array(count).fill(null)
|
||||
const results = new Array(count).fill(null)
|
||||
|
||||
tasks.forEach((task, index) => {
|
||||
task((error, result) => {
|
||||
count -= 1
|
||||
resultErrors[index] = error
|
||||
results[index] = result
|
||||
if (count === 0) {
|
||||
callback(resultErrors, results)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary ImageWriter class
|
||||
* @class
|
||||
*/
|
||||
class ImageWriter extends EventEmitter {
|
||||
/**
|
||||
* @summary ImageWriter constructor
|
||||
* @param {Object} options - options
|
||||
* @param {Boolean} options.verify - whether to verify the dest
|
||||
* @param {Boolean} options.unmountOnSuccess - whether to unmount the dest after flashing
|
||||
* @param {Array<String>} options.checksumAlgorithms - checksums to calculate
|
||||
* @example
|
||||
* new ImageWriter(options)
|
||||
*/
|
||||
constructor (options) {
|
||||
options = options || {}
|
||||
super()
|
||||
|
||||
debug('new', options)
|
||||
|
||||
this.unmountOnSuccess = Boolean(options.unmountOnSuccess)
|
||||
this.verifyChecksums = Boolean(options.verify)
|
||||
this.checksumAlgorithms = options.checksumAlgorithms || []
|
||||
|
||||
this.source = null
|
||||
this.pipeline = null
|
||||
this.destinations = new Map()
|
||||
|
||||
this.finished = false
|
||||
this.hadError = false
|
||||
|
||||
this.bytesRead = 0
|
||||
this.bytesWritten = 0
|
||||
this.checksum = {}
|
||||
|
||||
this.once('error', () => {
|
||||
this.hadError = true
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Verify that the selected destination devices exist
|
||||
* @param {Array<String>} paths - target device paths
|
||||
* @param {Function} callback - callback(error)
|
||||
* @private
|
||||
* @example
|
||||
* writer.getSelectedDevices(['/dev/disk2'], (error, destinations) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
getSelectedDevices (paths, callback) {
|
||||
debug('state:device-select', paths)
|
||||
drivelist.list((error, drives) => {
|
||||
debug('state:device-select', paths, error ? 'NOT OK' : 'OK')
|
||||
|
||||
if (error) {
|
||||
callback.call(this, error)
|
||||
return
|
||||
}
|
||||
|
||||
const results = paths.map((path) => {
|
||||
const destination = {
|
||||
fd: null,
|
||||
error: null,
|
||||
stream: null,
|
||||
finished: false,
|
||||
verified: false,
|
||||
device: _.find(drives, {
|
||||
device: path
|
||||
})
|
||||
}
|
||||
|
||||
if (!destination.device) {
|
||||
const selectionError = errors.createUserError({
|
||||
title: `The selected drive "${path}" was not found`,
|
||||
description: `We can't find "${path}" in your system. Did you unplug the drive?`,
|
||||
code: 'EUNPLUGGED'
|
||||
})
|
||||
debug('state:device-select', destination, 'NOT OK')
|
||||
destination.error = selectionError
|
||||
}
|
||||
|
||||
return destination
|
||||
})
|
||||
|
||||
callback.call(this, null, results)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Unmount the destination device
|
||||
* @param {Object} destination - destination object
|
||||
* @param {Function} callback - callback(error)
|
||||
* @private
|
||||
* @example
|
||||
* writer.unmountDevice((error) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
unmountDevice (destination, callback) {
|
||||
if (os.platform() === 'win32') {
|
||||
callback.call(this)
|
||||
return
|
||||
}
|
||||
|
||||
debug('state:unmount', destination.device.device)
|
||||
|
||||
mountutils.unmountDisk(destination.device.device, (error) => {
|
||||
debug('state:unmount', destination.device.device, error ? 'NOT OK' : 'OK')
|
||||
destination.error = error
|
||||
callback.call(this, error)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Clean a device's partition table
|
||||
* @param {Object} destination - destination object
|
||||
* @param {Function} callback - callback(error)
|
||||
* @private
|
||||
* @example
|
||||
* writer.removePartitionTable((error) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
removePartitionTable (destination, callback) {
|
||||
if (os.platform() !== 'win32') {
|
||||
callback.call(this)
|
||||
return
|
||||
}
|
||||
|
||||
debug('state:clean', destination.device.device)
|
||||
|
||||
diskpart.clean(destination.device.device).asCallback((error) => {
|
||||
debug('state:clean', destination.device.device, error ? 'NOT OK' : 'OK')
|
||||
destination.error = error
|
||||
callback.call(this, error)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Open the source for reading
|
||||
* @param {String} imagePath - path to source image
|
||||
* @param {Function} callback - callback(error)
|
||||
* @private
|
||||
* @example
|
||||
* writer.openSource('path/to/image.img', (error, source) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
openSource (imagePath, callback) {
|
||||
debug('state:source-open', imagePath)
|
||||
imageStream.getFromFilePath(imagePath).asCallback((error, image) => {
|
||||
debug('state:source-open', imagePath, error ? 'NOT OK' : 'OK')
|
||||
this.source = image
|
||||
callback.call(this, error, this.source)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Open the destination for writing
|
||||
* @param {Object} destination - destination object
|
||||
* @param {Function} callback - callback(error)
|
||||
* @private
|
||||
* @example
|
||||
* writer.openDestination((error) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
openDestination (destination, callback) {
|
||||
debug('state:destination-open', destination.device.raw)
|
||||
|
||||
/* eslint-disable no-bitwise */
|
||||
const flags = fs.constants.O_RDWR |
|
||||
fs.constants.O_NONBLOCK |
|
||||
fs.constants.O_SYNC
|
||||
/* eslint-enable no-bitwise */
|
||||
|
||||
fs.open(destination.device.raw, flags, (error, fd) => {
|
||||
debug('state:destination-open', destination.device.raw, error ? 'NOT OK' : 'OK')
|
||||
destination.fd = fd
|
||||
destination.error = error
|
||||
callback.call(this, error)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Check a destination against the drive constraints
|
||||
* @param {Object} destination - destination object
|
||||
* @param {Function} callback - callback(error)
|
||||
* @example
|
||||
* this.checkDriveConstraints(destination, (error) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
checkDriveConstraints (destination, callback) {
|
||||
if (!constraints.isDriveLargeEnough(destination.device, this.source)) {
|
||||
destination.error = errors.createUserError({
|
||||
title: 'The image you selected is too big for this drive',
|
||||
description: 'Please connect a bigger drive and try again'
|
||||
})
|
||||
}
|
||||
|
||||
callback.call(this, destination.error)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Start the flashing process
|
||||
* @param {String} imagePath - path to source image
|
||||
* @param {Array<String>} destinationPaths - paths to target devices
|
||||
* @returns {ImageWriter} imageWriter
|
||||
* @example
|
||||
* imageWriter.write(source, destinations)
|
||||
* .on('error', reject)
|
||||
* .on('progress', onProgress)
|
||||
* .on('finish', resolve)
|
||||
*/
|
||||
write (imagePath, destinationPaths) {
|
||||
// Open the source image
|
||||
this.openSource(imagePath, (openError, source) => {
|
||||
if (openError) {
|
||||
this.emit('error', openError)
|
||||
return
|
||||
}
|
||||
|
||||
// Open & prepare target devices
|
||||
this.getSelectedDevices(destinationPaths, (error, destinations) => {
|
||||
if (error) {
|
||||
this.emit('error', error)
|
||||
return
|
||||
}
|
||||
|
||||
const notFound = _.find(destinations, (destination) => {
|
||||
return Boolean(destination.error)
|
||||
})
|
||||
|
||||
if (notFound) {
|
||||
this.emit('error', notFound.error)
|
||||
return
|
||||
}
|
||||
|
||||
// Generate preparation tasks for all destinations
|
||||
const tasks = destinations.map((destination) => {
|
||||
destination.verified = !this.verifyChecksums
|
||||
this.destinations.set(destination.device.device, destination)
|
||||
return (next) => {
|
||||
runSeries([
|
||||
(done) => { this.checkDriveConstraints(destination, done) },
|
||||
(done) => { this.unmountDevice(destination, done) },
|
||||
(done) => { this.removePartitionTable(destination, done) },
|
||||
(done) => { this.openDestination(destination, done) }
|
||||
], () => {
|
||||
if (destination.error) {
|
||||
this.emit('fail', { device: destination.device.device, error: destination.error })
|
||||
}
|
||||
next(destination.error, destination)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Run the preparation tasks in parallel for each destination
|
||||
runParallel(tasks, (resultErrors, results) => {
|
||||
// We can start (theoretically) flashing now...
|
||||
debug('write:prep:done', resultErrors)
|
||||
if (_.every(resultErrors, _.identity)) {
|
||||
this.emit('error', resultErrors[0])
|
||||
} else {
|
||||
this._write()
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Internal progress state handler
|
||||
* @param {Object} state - progress state
|
||||
* @example
|
||||
* pipeline.on('progress', (state) => {
|
||||
* // ...
|
||||
* this._onProgress(state)
|
||||
* })
|
||||
*/
|
||||
_onProgress (state) {
|
||||
state.totalSpeed = 0
|
||||
state.active = 0
|
||||
|
||||
state.flashing = 0
|
||||
state.verifying = 0
|
||||
state.failed = 0
|
||||
state.successful = 0
|
||||
|
||||
this.destinations.forEach((dest) => {
|
||||
state.flashing += !dest.error && !dest.finished ? 1 : 0
|
||||
state.verifying += !dest.error && dest.finished && !dest.verified ? 1 : 0
|
||||
state.failed += dest.error ? 1 : 0
|
||||
state.successful += !dest.error && dest.finished && (dest.verified || !this.verifyChecksums) ? 1 : 0
|
||||
if (!(dest.finished && dest.verified) && !dest.error) {
|
||||
state.totalSpeed += state.type === 'write'
|
||||
? (dest.stream.speed || 0)
|
||||
: (dest.progress.state.speed || 0)
|
||||
state.active += 1
|
||||
}
|
||||
})
|
||||
|
||||
state.speed = state.active
|
||||
? state.totalSpeed / state.active
|
||||
: state.totalSpeed
|
||||
|
||||
state.eta = state.speed ? state.remaining / state.speed : 0
|
||||
|
||||
this.emit('progress', state)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Start the writing process
|
||||
* @returns {ImageWriter} imageWriter
|
||||
* @example
|
||||
* imageWriter.write()
|
||||
*/
|
||||
_write () {
|
||||
this.pipeline = this._createWritePipeline()
|
||||
|
||||
this.pipeline.on('checksum', (checksum) => {
|
||||
debug('write:checksum', checksum)
|
||||
this.checksum = checksum
|
||||
})
|
||||
|
||||
this.pipeline.on('error', (error) => {
|
||||
this.emit('error', error)
|
||||
})
|
||||
|
||||
this.pipeline.on('complete', (destination) => {
|
||||
this.bytesRead = this.source.bytesRead
|
||||
|
||||
let finishedCount = 0
|
||||
let errorCount = 0
|
||||
|
||||
this.destinations.forEach((dest) => {
|
||||
finishedCount += dest.finished ? 1 : 0
|
||||
errorCount += dest.error ? 1 : 0
|
||||
})
|
||||
|
||||
debug('write:finish', finishedCount, '/', this.destinations.size)
|
||||
|
||||
if (_.has(destination, [ 'stream' ])) {
|
||||
this.bytesWritten += destination.stream.bytesWritten
|
||||
}
|
||||
|
||||
if (finishedCount === this.destinations.size) {
|
||||
if (errorCount === this.destinations.size) {
|
||||
this.emit('error', destination.error)
|
||||
this._finish()
|
||||
} else if (this.verifyChecksums) {
|
||||
debug('write:verify')
|
||||
this.verify()
|
||||
} else {
|
||||
debug('write:finish')
|
||||
this._finish()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Start the writing process
|
||||
* @returns {ImageWriter} imageWriter
|
||||
* @example
|
||||
* imageWriter.verify()
|
||||
*/
|
||||
verify () {
|
||||
let bytesWritten = 0
|
||||
|
||||
// NOTE: We can't re-use `this.bytesWritten` here, as that will
|
||||
// included bytes of streams that may have errored part way through
|
||||
this.destinations.forEach((destination) => {
|
||||
// Don't count errored destinations
|
||||
if (destination.error || !destination.stream) {
|
||||
return
|
||||
}
|
||||
bytesWritten += destination.stream.bytesWritten
|
||||
})
|
||||
|
||||
const progressStream = new ProgressStream({
|
||||
length: bytesWritten,
|
||||
time: 500
|
||||
})
|
||||
|
||||
progressStream.resume()
|
||||
|
||||
progressStream.on('progress', (state) => {
|
||||
state.type = 'check'
|
||||
this._onProgress(state)
|
||||
})
|
||||
|
||||
this.destinations.forEach((destination) => {
|
||||
// Don't verify errored destinations
|
||||
if (destination.error || !destination.stream) {
|
||||
return
|
||||
}
|
||||
|
||||
const pipeline = this._createVerifyPipeline(destination)
|
||||
|
||||
pipeline.on('error', (error) => {
|
||||
// NOTE: As the `blockmap` module doesn't set our custom error codes,
|
||||
// we need to patch `EVALIDATION` into a range checksum error here
|
||||
if (error.message && error.message.startsWith('Invalid checksum for range')) {
|
||||
error.code = 'EVALIDATION'
|
||||
this.emit('fail', { device: destination.device.device, error })
|
||||
}
|
||||
this.emit('error', error)
|
||||
})
|
||||
|
||||
pipeline.on('checksum', (checksum) => {
|
||||
debug('verify:checksum', this.checksum, '==', checksum)
|
||||
destination.checksum = checksum
|
||||
if (!_.isEqual(this.checksum, checksum)) {
|
||||
const error = new Error(`Verification failed: ${JSON.stringify(this.checksum)} != ${JSON.stringify(checksum)}`)
|
||||
error.code = 'EVALIDATION'
|
||||
destination.error = error
|
||||
this.emit('fail', { device: destination.device.device, error })
|
||||
}
|
||||
})
|
||||
|
||||
pipeline.on('finish', () => {
|
||||
debug('verify:finish')
|
||||
|
||||
destination.verified = true
|
||||
destination.progress = null
|
||||
destination.stream = null
|
||||
|
||||
let finishedCount = 0
|
||||
|
||||
this.destinations.forEach((dest) => {
|
||||
finishedCount += (dest.error || dest.verified) ? 1 : 0
|
||||
})
|
||||
|
||||
if (finishedCount === this.destinations.size) {
|
||||
debug('verify:complete')
|
||||
progressStream.end()
|
||||
this._finish()
|
||||
}
|
||||
})
|
||||
|
||||
// NOTE: Normally we'd use `pipeline.pipe(progressStream)` here,
|
||||
// but that leads to degraded performance
|
||||
pipeline.on('readable', function () {
|
||||
let chunk = null
|
||||
while ((chunk = this.read())) {
|
||||
progressStream.write(chunk)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Abort the flashing process
|
||||
* @example
|
||||
* imageWriter.abort()
|
||||
*/
|
||||
abort () {
|
||||
if (this.source && this.source.stream) {
|
||||
this.source.stream.destroy()
|
||||
}
|
||||
this.emit('abort')
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Cleanup after writing; close file descriptors & unmount
|
||||
* @param {Function} callback - callback(error)
|
||||
* @private
|
||||
* @example
|
||||
* writer._cleanup((error) => {
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
_cleanup (callback) {
|
||||
debug('state:cleanup')
|
||||
const tasks = []
|
||||
|
||||
this.destinations.forEach((destination) => {
|
||||
tasks.push((next) => {
|
||||
runSeries([
|
||||
(done) => {
|
||||
if (destination.fd) {
|
||||
fs.close(destination.fd, done)
|
||||
destination.fd = null
|
||||
} else {
|
||||
done()
|
||||
}
|
||||
},
|
||||
(done) => {
|
||||
if (!this.unmountOnSuccess) {
|
||||
done()
|
||||
return
|
||||
}
|
||||
|
||||
// Closing a file descriptor on a drive containing mountable
|
||||
// partitions causes macOS to mount the drive. If we try to
|
||||
// unmount too quickly, then the drive might get re-mounted
|
||||
// right afterwards.
|
||||
setTimeout(() => {
|
||||
mountutils.unmountDisk(destination.device.device, (error) => {
|
||||
debug('state:cleanup', error ? 'NOT OK' : 'OK')
|
||||
done(error)
|
||||
})
|
||||
}, UNMOUNT_ON_SUCCESS_TIMEOUT_MS)
|
||||
}
|
||||
], next)
|
||||
})
|
||||
})
|
||||
|
||||
runParallel(tasks, (resultErrors, results) => {
|
||||
debug('state:cleanup', resultErrors)
|
||||
callback.call(this, resultErrors)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Emits the `finish` event with state metadata
|
||||
* @private
|
||||
* @example
|
||||
* this._finish()
|
||||
*/
|
||||
_finish () {
|
||||
this._cleanup(() => {
|
||||
const failures = []
|
||||
let successful = 0
|
||||
let failed = 0
|
||||
|
||||
this.finished = true
|
||||
|
||||
this.destinations.forEach((dest) => {
|
||||
successful += dest.finished && dest.verified && !dest.error ? 1 : 0
|
||||
failed += dest.error ? 1 : 0
|
||||
if (dest.error) {
|
||||
dest.error.device = dest.device.device
|
||||
failures.push(dest.error)
|
||||
}
|
||||
})
|
||||
|
||||
this.emit('finish', {
|
||||
devices: { successful, failed },
|
||||
bytesRead: this.bytesRead,
|
||||
bytesWritten: this.bytesWritten,
|
||||
checksum: this.checksum,
|
||||
errors: failures
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Creates a write pipeline from given options
|
||||
* @private
|
||||
* @returns {Pipage} pipeline
|
||||
* @example
|
||||
* this._createWritePipeline()
|
||||
*/
|
||||
_createWritePipeline () {
|
||||
const pipeline = new Pipage({
|
||||
readableObjectMode: true
|
||||
})
|
||||
|
||||
const progressOptions = {
|
||||
length: this.source.size.original,
|
||||
time: 500
|
||||
}
|
||||
|
||||
let progressStream = null
|
||||
|
||||
// If the final size is an estimation,
|
||||
// use the original source size for progress metering
|
||||
if (this.source.size.final.estimation) {
|
||||
progressStream = new ProgressStream(progressOptions)
|
||||
pipeline.append(progressStream)
|
||||
}
|
||||
|
||||
const isPassThrough = this.source.transform instanceof stream.PassThrough
|
||||
|
||||
// If the image transform is a pass-through,
|
||||
// ignore it to save on the overhead
|
||||
if (this.source.transform && !isPassThrough) {
|
||||
pipeline.append(this.source.transform)
|
||||
}
|
||||
|
||||
// If the final size is known precisely and we're not
|
||||
// using block maps, then use the final size for progress
|
||||
if (!this.source.size.final.estimation && !this.source.bmap) {
|
||||
progressOptions.length = this.source.size.final.value
|
||||
progressStream = new ProgressStream(progressOptions)
|
||||
pipeline.append(progressStream)
|
||||
}
|
||||
|
||||
if (this.source.bmap) {
|
||||
const blockMap = BlockMap.parse(this.source.bmap)
|
||||
debug('write:bmap', blockMap)
|
||||
progressStream = new ProgressStream(progressOptions)
|
||||
pipeline.append(progressStream)
|
||||
pipeline.append(new BlockMap.FilterStream(blockMap))
|
||||
} else {
|
||||
debug('write:blockstream')
|
||||
pipeline.append(new BlockStream())
|
||||
if (this.verifyChecksums) {
|
||||
const checksumStream = new ChecksumStream({
|
||||
objectMode: true,
|
||||
algorithms: this.checksumAlgorithms
|
||||
})
|
||||
pipeline.append(checksumStream)
|
||||
pipeline.bind(checksumStream, 'checksum')
|
||||
}
|
||||
}
|
||||
|
||||
this.destinations.forEach((destination) => {
|
||||
if (destination.error) {
|
||||
debug('pipeline:skip', destination.device.device)
|
||||
destination.finished = true
|
||||
return
|
||||
}
|
||||
|
||||
destination.stream = new BlockWriteStream({
|
||||
fd: destination.fd,
|
||||
autoClose: false
|
||||
})
|
||||
|
||||
destination.stream.on('finish', () => {
|
||||
debug('finish:unpipe', destination.device.device)
|
||||
destination.finished = true
|
||||
pipeline.emit('complete', destination)
|
||||
pipeline.unpipe(destination.stream)
|
||||
})
|
||||
|
||||
destination.stream.on('error', (error) => {
|
||||
debug('error:unpipe', destination.device.device)
|
||||
destination.error = error
|
||||
destination.finished = true
|
||||
pipeline.unpipe(destination.stream)
|
||||
this.emit('fail', { device: destination.device.device, error })
|
||||
pipeline.emit('complete', destination)
|
||||
})
|
||||
|
||||
pipeline.pipe(destination.stream)
|
||||
})
|
||||
|
||||
// Pipeline.bind(progressStream, 'progress');
|
||||
progressStream.on('progress', (state) => {
|
||||
state.type = 'write'
|
||||
this._onProgress(state)
|
||||
})
|
||||
|
||||
pipeline.bind(this.source.stream, 'error')
|
||||
this.source.stream.pipe(pipeline)
|
||||
|
||||
return pipeline
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Creates a verification pipeline from given options
|
||||
* @private
|
||||
* @param {Object} destination - the destination object
|
||||
* @returns {Pipage} pipeline
|
||||
* @example
|
||||
* this._createVerifyPipeline()
|
||||
*/
|
||||
_createVerifyPipeline (destination) {
|
||||
const pipeline = new Pipage()
|
||||
|
||||
let size = destination.stream.bytesWritten
|
||||
|
||||
if (!this.source.size.final.estimation) {
|
||||
size = Math.max(size, this.source.size.final.value)
|
||||
}
|
||||
|
||||
const progressStream = new ProgressStream({
|
||||
length: size,
|
||||
time: 500
|
||||
})
|
||||
|
||||
pipeline.append(progressStream)
|
||||
|
||||
if (this.source.bmap) {
|
||||
debug('verify:bmap')
|
||||
const blockMap = BlockMap.parse(this.source.bmap)
|
||||
const blockMapStream = new BlockMap.FilterStream(blockMap)
|
||||
pipeline.append(blockMapStream)
|
||||
|
||||
// NOTE: Because the blockMapStream checksums each range,
|
||||
// and doesn't emit a final "checksum" event, we artificially
|
||||
// raise one once the stream finishes
|
||||
blockMapStream.once('finish', () => {
|
||||
pipeline.emit('checksum', {})
|
||||
})
|
||||
} else {
|
||||
const checksumStream = new ChecksumStream({
|
||||
algorithms: this.checksumAlgorithms
|
||||
})
|
||||
pipeline.append(checksumStream)
|
||||
pipeline.bind(checksumStream, 'checksum')
|
||||
}
|
||||
|
||||
const source = new BlockReadStream({
|
||||
fd: destination.fd,
|
||||
autoClose: false,
|
||||
start: 0,
|
||||
end: size,
|
||||
highWaterMark: 1048576
|
||||
})
|
||||
|
||||
pipeline.bind(source, 'error')
|
||||
|
||||
destination.stream = source.pipe(pipeline)
|
||||
destination.progress = progressStream
|
||||
|
||||
return pipeline
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ImageWriter
|
@ -1,117 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const Stream = require('stream')
|
||||
const speedometer = require('speedometer')
|
||||
|
||||
const PERCENT = 100
|
||||
const DEFAULT_TIME_MS = 500
|
||||
|
||||
/**
|
||||
* @class ProgressStream
|
||||
* @public
|
||||
*/
|
||||
class ProgressStream extends Stream.Transform {
|
||||
/**
|
||||
* @summary ProgressStream constructor
|
||||
* @param {Object} options - options
|
||||
* @param {Number} options.length - expected total
|
||||
* @param {Number} [options.time] - time interval to report progress
|
||||
* @example
|
||||
* new ProgressStream({ length: 1024 * 1024 })
|
||||
* .on('progress', (state) => {
|
||||
* console.log( state.percentage.toFixed(0) + '%' )
|
||||
* })
|
||||
*/
|
||||
constructor (options) {
|
||||
super(options)
|
||||
|
||||
this.start = 0
|
||||
this.interval = options.time || DEFAULT_TIME_MS
|
||||
this.timer = null
|
||||
this.meter = speedometer()
|
||||
|
||||
this.delta = 0
|
||||
|
||||
this.state = {
|
||||
delta: 0,
|
||||
eta: 0,
|
||||
length: options.length,
|
||||
percentage: 0,
|
||||
remaining: 0,
|
||||
runtime: 0,
|
||||
speed: 0,
|
||||
totalSpeed: 0,
|
||||
transferred: 0
|
||||
}
|
||||
|
||||
this.clear = () => {
|
||||
clearInterval(this.timer)
|
||||
}
|
||||
|
||||
this.update = () => {
|
||||
this.state.delta = this.delta
|
||||
this.state.transferred += this.delta
|
||||
this.state.percentage = this.state.transferred / this.state.length * PERCENT
|
||||
this.state.remaining = this.state.length - this.state.transferred
|
||||
this.state.runtime = Date.now() - this.start
|
||||
this.state.speed = this.meter(this.state.delta)
|
||||
|
||||
// NOTE: We need to guard against this becoming Infinity,
|
||||
// because that value isn't transmitted properly over IPC and becomes `null`
|
||||
this.state.eta = this.state.speed ? this.state.remaining / this.state.speed : 0
|
||||
this.delta = 0
|
||||
this.emit('progress', this.state)
|
||||
}
|
||||
|
||||
this.once('end', this.clear)
|
||||
this.once('end', this.update)
|
||||
this.once('error', this.clear)
|
||||
|
||||
this.timer = setInterval(this.update, this.interval)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Transform function
|
||||
* @private
|
||||
* @param {Buffer} chunk - chunk
|
||||
* @param {String} _ - encoding
|
||||
* @param {Function} next - callback
|
||||
* @example
|
||||
* progressStream.write(buffer)
|
||||
*/
|
||||
_transform (chunk, _, next) {
|
||||
this.start = this.start || Date.now()
|
||||
this.delta += chunk.length
|
||||
next(null, chunk)
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Destroy handler
|
||||
* @param {Error} [error] - error
|
||||
* @param {Function} done - callback
|
||||
* @example
|
||||
* progressStream.destroy()
|
||||
*/
|
||||
_destroy (error, done) {
|
||||
this.clear()
|
||||
done(error)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ProgressStream
|
17
package.json
17
package.json
@ -53,7 +53,6 @@
|
||||
"angular-ui-bootstrap": "2.5.0",
|
||||
"angular-ui-router": "0.4.2",
|
||||
"bindings": "1.3.0",
|
||||
"blockmap": "2.0.2",
|
||||
"bluebird": "3.4.1",
|
||||
"bluebird-retry": "0.11.0",
|
||||
"bootstrap-sass": "3.3.6",
|
||||
@ -64,27 +63,19 @@
|
||||
"drivelist": "6.4.6",
|
||||
"electron-is-running-in-asar": "1.0.0",
|
||||
"etcher-sdk": "github:resin-io-modules/etcher-sdk#356e40b2190492cec55fd92d58f0cc218cae4ed2",
|
||||
"file-type": "4.1.0",
|
||||
"flexboxgrid": "6.3.0",
|
||||
"gpt": "1.0.0",
|
||||
"immutable": "3.8.1",
|
||||
"inactivity-timer": "1.0.0",
|
||||
"lodash": "4.17.10",
|
||||
"lzma-native": "3.0.8",
|
||||
"mbr": "1.1.2",
|
||||
"mime-types": "2.1.15",
|
||||
"mountutils": "1.3.16",
|
||||
"nan": "2.9.2",
|
||||
"node-ipc": "9.1.1",
|
||||
"node-stream-zip": "1.3.7",
|
||||
"path-is-inside": "1.0.2",
|
||||
"pipage": "1.0.2",
|
||||
"pretty-bytes": "1.0.4",
|
||||
"prop-types": "15.5.9",
|
||||
"react": "16.3.2",
|
||||
"react-dom": "16.3.2",
|
||||
"react2angular": "4.0.2",
|
||||
"readable-stream": "2.3.3",
|
||||
"redux": "3.5.2",
|
||||
"rendition": "4.41.1",
|
||||
"request": "2.81.0",
|
||||
@ -93,18 +84,12 @@
|
||||
"resin-corvus": "2.0.0",
|
||||
"roboto-fontface": "0.9.0",
|
||||
"semver": "5.1.1",
|
||||
"speedometer": "1.0.0",
|
||||
"styled-components": "3.2.3",
|
||||
"styled-system": "3.1.11",
|
||||
"sudo-prompt": "8.2.3",
|
||||
"udif": "0.13.0",
|
||||
"unbzip2-stream": "github:balena-io-modules/unbzip2-stream#core-streams",
|
||||
"usb": "github:balena-io/node-usb#1.3.5",
|
||||
"uuid": "3.0.1",
|
||||
"xml2js": "0.4.17",
|
||||
"xxhash": "git://github.com/balena-io-modules/node-xxhash.git#70ac31da1a41c6f8c53d931b5802c6c93f7b6b83",
|
||||
"yargs": "11.0.0",
|
||||
"yauzl": "2.6.0"
|
||||
"yargs": "11.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"winusb-driver-generator": "1.2.4"
|
||||
|
@ -1,429 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const os = require('os')
|
||||
const drivelist = require('drivelist')
|
||||
const driveScanner = require('../../../lib/gui/app/modules/drive-scanner')
|
||||
|
||||
describe('Browser: driveScanner', function () {
|
||||
describe('detected devices should be an array', function () {
|
||||
it('should emit an empty array', function (done) {
|
||||
const spy = m.sinon.spy()
|
||||
|
||||
driveScanner.once('devices', function (drives) {
|
||||
let error = null
|
||||
try {
|
||||
m.chai.expect(drives).to.be.an.instanceof(Array)
|
||||
m.chai.expect(spy).to.not.have.been.called
|
||||
} catch (exception) {
|
||||
error = exception
|
||||
}
|
||||
driveScanner.removeListener('error', spy)
|
||||
driveScanner.stop()
|
||||
done(error)
|
||||
})
|
||||
|
||||
driveScanner.on('error', spy)
|
||||
driveScanner.start()
|
||||
})
|
||||
})
|
||||
|
||||
describe('given only system available drives', function () {
|
||||
beforeEach(function () {
|
||||
this.drivelistStub = m.sinon.stub(drivelist, 'list')
|
||||
this.drivelistStub.yields(null, [
|
||||
{
|
||||
device: '/dev/sda',
|
||||
description: 'WDC WD10JPVX-75J',
|
||||
size: '931.5G',
|
||||
mountpoints: [
|
||||
{
|
||||
path: '/'
|
||||
}
|
||||
],
|
||||
isSystem: true
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.drivelistStub.restore()
|
||||
})
|
||||
|
||||
it('should emit an empty array', function (done) {
|
||||
const spy = m.sinon.spy()
|
||||
|
||||
driveScanner.once('devices', function (drives) {
|
||||
let error = null
|
||||
try {
|
||||
m.chai.expect(drives).to.deep.equal([])
|
||||
m.chai.expect(spy).to.not.have.been.called
|
||||
} catch (exception) {
|
||||
error = exception
|
||||
}
|
||||
driveScanner.removeListener('error', spy)
|
||||
driveScanner.stop()
|
||||
done(error)
|
||||
})
|
||||
|
||||
driveScanner.on('error', spy)
|
||||
driveScanner.start()
|
||||
})
|
||||
})
|
||||
|
||||
describe('given linux', function () {
|
||||
beforeEach(function () {
|
||||
this.osPlatformStub = m.sinon.stub(os, 'platform')
|
||||
this.osPlatformStub.returns('linux')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.osPlatformStub.restore()
|
||||
})
|
||||
|
||||
describe('given available drives', function () {
|
||||
beforeEach(function () {
|
||||
this.drivelistStub = m.sinon.stub(drivelist, 'list')
|
||||
this.drivelistStub.yields(null, [
|
||||
{
|
||||
device: '/dev/sda',
|
||||
displayName: '/dev/sda',
|
||||
description: 'WDC WD10JPVX-75J',
|
||||
size: '931.5G',
|
||||
mountpoints: [
|
||||
{
|
||||
path: '/'
|
||||
}
|
||||
],
|
||||
isSystem: true,
|
||||
isRemovable: false
|
||||
},
|
||||
{
|
||||
device: '/dev/sdb',
|
||||
displayName: '/dev/sdb',
|
||||
description: 'Foo',
|
||||
size: 14000000000,
|
||||
mountpoints: [
|
||||
{
|
||||
path: '/mnt/foo'
|
||||
}
|
||||
],
|
||||
isSystem: false,
|
||||
isRemovable: false
|
||||
},
|
||||
{
|
||||
device: '/dev/sdc',
|
||||
displayName: '/dev/sdc',
|
||||
description: 'Bar',
|
||||
size: 14000000000,
|
||||
mountpoints: [
|
||||
{
|
||||
path: '/mnt/bar'
|
||||
}
|
||||
],
|
||||
isSystem: false,
|
||||
isRemovable: false
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.drivelistStub.restore()
|
||||
})
|
||||
|
||||
it('should emit the non removable drives', function (done) {
|
||||
const spy = m.sinon.spy()
|
||||
|
||||
driveScanner.once('devices', function (drives) {
|
||||
let error = null
|
||||
try {
|
||||
m.chai.expect(drives).to.deep.equal([
|
||||
{
|
||||
device: '/dev/sdb',
|
||||
displayName: '/dev/sdb',
|
||||
description: 'Foo',
|
||||
size: 14000000000,
|
||||
mountpoints: [
|
||||
{
|
||||
path: '/mnt/foo'
|
||||
}
|
||||
],
|
||||
adapter: 'blockdevice',
|
||||
isSystem: false,
|
||||
isRemovable: false
|
||||
},
|
||||
{
|
||||
device: '/dev/sdc',
|
||||
displayName: '/dev/sdc',
|
||||
description: 'Bar',
|
||||
size: 14000000000,
|
||||
mountpoints: [
|
||||
{
|
||||
path: '/mnt/bar'
|
||||
}
|
||||
],
|
||||
adapter: 'blockdevice',
|
||||
isSystem: false,
|
||||
isRemovable: false
|
||||
}
|
||||
])
|
||||
|
||||
m.chai.expect(spy).to.not.have.been.called
|
||||
} catch (exception) {
|
||||
error = exception
|
||||
}
|
||||
driveScanner.removeListener('error', spy)
|
||||
driveScanner.stop()
|
||||
done(error)
|
||||
})
|
||||
|
||||
driveScanner.on('error', spy)
|
||||
driveScanner.start()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('given windows', function () {
|
||||
beforeEach(function () {
|
||||
this.osPlatformStub = m.sinon.stub(os, 'platform')
|
||||
this.osPlatformStub.returns('win32')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.osPlatformStub.restore()
|
||||
})
|
||||
|
||||
describe('given available drives', function () {
|
||||
beforeEach(function () {
|
||||
this.drivelistStub = m.sinon.stub(drivelist, 'list')
|
||||
this.drivelistStub.yields(null, [
|
||||
{
|
||||
device: '\\\\.\\PHYSICALDRIVE1',
|
||||
displayName: 'C:',
|
||||
description: 'WDC WD10JPVX-75J',
|
||||
size: '931.5G',
|
||||
mountpoints: [
|
||||
{
|
||||
path: 'C:'
|
||||
}
|
||||
],
|
||||
isSystem: true,
|
||||
isRemovable: false
|
||||
},
|
||||
{
|
||||
device: '\\\\.\\PHYSICALDRIVE2',
|
||||
displayName: '\\\\.\\PHYSICALDRIVE2',
|
||||
description: 'Foo',
|
||||
size: 14000000000,
|
||||
mountpoints: [],
|
||||
isSystem: false,
|
||||
isRemovable: false
|
||||
},
|
||||
{
|
||||
device: '\\\\.\\PHYSICALDRIVE3',
|
||||
displayName: 'F:',
|
||||
description: 'Bar',
|
||||
size: 14000000000,
|
||||
mountpoints: [
|
||||
{
|
||||
path: 'F:'
|
||||
}
|
||||
],
|
||||
isSystem: false,
|
||||
isRemovable: false
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.drivelistStub.restore()
|
||||
})
|
||||
|
||||
it('should emit the non removable drives', function (done) {
|
||||
const spy = m.sinon.spy()
|
||||
|
||||
driveScanner.once('devices', function (drives) {
|
||||
let error = null
|
||||
try {
|
||||
m.chai.expect(drives).to.deep.equal([
|
||||
{
|
||||
device: '\\\\.\\PHYSICALDRIVE2',
|
||||
displayName: '\\\\.\\PHYSICALDRIVE2',
|
||||
description: 'Foo',
|
||||
size: 14000000000,
|
||||
mountpoints: [],
|
||||
adapter: 'blockdevice',
|
||||
isSystem: false,
|
||||
isRemovable: false
|
||||
},
|
||||
{
|
||||
device: '\\\\.\\PHYSICALDRIVE3',
|
||||
displayName: 'F:',
|
||||
description: 'Bar',
|
||||
size: 14000000000,
|
||||
mountpoints: [
|
||||
{
|
||||
path: 'F:'
|
||||
}
|
||||
],
|
||||
adapter: 'blockdevice',
|
||||
isSystem: false,
|
||||
isRemovable: false
|
||||
}
|
||||
])
|
||||
m.chai.expect(spy).to.not.have.been.called
|
||||
} catch (exception) {
|
||||
error = exception
|
||||
}
|
||||
driveScanner.removeListener('error', spy)
|
||||
driveScanner.stop()
|
||||
done(error)
|
||||
})
|
||||
|
||||
driveScanner.on('error', spy)
|
||||
driveScanner.start()
|
||||
})
|
||||
})
|
||||
|
||||
describe('given a drive with a single drive letters', function () {
|
||||
beforeEach(function () {
|
||||
this.drivelistStub = m.sinon.stub(drivelist, 'list')
|
||||
this.drivelistStub.yields(null, [
|
||||
{
|
||||
device: '\\\\.\\PHYSICALDRIVE3',
|
||||
raw: '\\\\.\\PHYSICALDRIVE3',
|
||||
description: 'Bar',
|
||||
size: 14000000000,
|
||||
mountpoints: [
|
||||
{
|
||||
path: 'F:'
|
||||
}
|
||||
],
|
||||
isSystem: false,
|
||||
isRemovable: true
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.drivelistStub.restore()
|
||||
})
|
||||
|
||||
it('should use the drive letter as the name', function (done) {
|
||||
const spy = m.sinon.spy()
|
||||
|
||||
driveScanner.once('devices', function (drives) {
|
||||
let error = null
|
||||
try {
|
||||
m.chai.expect(drives).to.have.length(1)
|
||||
m.chai.expect(drives[0].displayName).to.equal('F:')
|
||||
m.chai.expect(spy).to.not.have.been.called
|
||||
} catch (exception) {
|
||||
error = exception
|
||||
}
|
||||
driveScanner.removeListener('error', spy)
|
||||
driveScanner.stop()
|
||||
done(error)
|
||||
})
|
||||
|
||||
driveScanner.on('error', spy)
|
||||
driveScanner.start()
|
||||
})
|
||||
})
|
||||
|
||||
describe('given a drive with multiple drive letters', function () {
|
||||
beforeEach(function () {
|
||||
this.drivesListStub = m.sinon.stub(drivelist, 'list')
|
||||
this.drivesListStub.yields(null, [
|
||||
{
|
||||
device: '\\\\.\\PHYSICALDRIVE3',
|
||||
raw: '\\\\.\\PHYSICALDRIVE3',
|
||||
description: 'Bar',
|
||||
size: 14000000000,
|
||||
mountpoints: [
|
||||
{
|
||||
path: 'F:'
|
||||
},
|
||||
{
|
||||
path: 'G:'
|
||||
},
|
||||
{
|
||||
path: 'H:'
|
||||
}
|
||||
],
|
||||
isSystem: false,
|
||||
isRemovable: true
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.drivesListStub.restore()
|
||||
})
|
||||
|
||||
it('should join all the mountpoints in `name`', function (done) {
|
||||
const spy = m.sinon.spy()
|
||||
|
||||
driveScanner.once('devices', function (drives) {
|
||||
let error = null
|
||||
try {
|
||||
m.chai.expect(drives).to.have.length(1)
|
||||
m.chai.expect(drives[0].displayName).to.equal('F:, G:, H:')
|
||||
m.chai.expect(spy).to.not.have.been.called
|
||||
} catch (exception) {
|
||||
error = exception
|
||||
}
|
||||
driveScanner.removeListener('error', spy)
|
||||
driveScanner.stop()
|
||||
done(error)
|
||||
})
|
||||
|
||||
driveScanner.on('error', spy)
|
||||
driveScanner.start()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('given an error when listing the drives', function () {
|
||||
beforeEach(function () {
|
||||
this.drivesListStub = m.sinon.stub(drivelist, 'list')
|
||||
this.drivesListStub.yields(new Error('scan error'))
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.drivesListStub.restore()
|
||||
})
|
||||
|
||||
it('should emit the error', function (done) {
|
||||
driveScanner.once('error', function (error) {
|
||||
let assertionError = null
|
||||
try {
|
||||
m.chai.expect(error).to.be.an.instanceof(Error)
|
||||
m.chai.expect(error.message).to.equal('scan error')
|
||||
} catch (exception) {
|
||||
assertionError = exception
|
||||
}
|
||||
driveScanner.stop()
|
||||
done(assertionError)
|
||||
})
|
||||
|
||||
driveScanner.start()
|
||||
})
|
||||
})
|
||||
})
|
@ -1,128 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const path = require('path')
|
||||
const zipHooks = require('../../../lib/sdk/image-stream/archive-hooks/zip')
|
||||
const utils = require('../../../lib/sdk/image-stream/utils')
|
||||
const tester = require('../tester')
|
||||
const ZIP_PATH = path.join(__dirname, '..', 'data', 'zip')
|
||||
|
||||
describe('ImageStream: Archive hooks: ZIP', function () {
|
||||
this.timeout(tester.DEFAULT_IMAGE_TESTS_TIMEOUT)
|
||||
|
||||
describe('.getEntries()', function () {
|
||||
describe('given an empty zip', function () {
|
||||
beforeEach(function () {
|
||||
this.zip = path.join(ZIP_PATH, 'zip-directory-empty.zip')
|
||||
})
|
||||
|
||||
it('should become an empty array', function () {
|
||||
return zipHooks.getEntries(this.zip).then((entries) => {
|
||||
m.chai.expect(entries).to.deep.equal([])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('given a zip with multiple files in it', function () {
|
||||
beforeEach(function () {
|
||||
this.zip = path.join(ZIP_PATH, 'zip-directory-multiple-images.zip')
|
||||
})
|
||||
|
||||
it('should become all entries', function () {
|
||||
return zipHooks.getEntries(this.zip).then((entries) => {
|
||||
m.chai.expect(entries).to.deep.equal([
|
||||
{
|
||||
name: 'multiple-images/edison-config.img',
|
||||
size: 16777216
|
||||
},
|
||||
{
|
||||
name: 'multiple-images/raspberrypi.img',
|
||||
size: 33554432
|
||||
}
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('given a zip with nested files in it', function () {
|
||||
beforeEach(function () {
|
||||
this.zip = path.join(ZIP_PATH, 'zip-directory-nested-misc.zip')
|
||||
})
|
||||
|
||||
it('should become all entries', function () {
|
||||
return zipHooks.getEntries(this.zip).then((entries) => {
|
||||
m.chai.expect(entries).to.deep.equal([
|
||||
{
|
||||
name: 'zip-directory-nested-misc/foo',
|
||||
size: 4
|
||||
},
|
||||
{
|
||||
name: 'zip-directory-nested-misc/hello/there/bar',
|
||||
size: 4
|
||||
}
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('.extractFile()', function () {
|
||||
beforeEach(function () {
|
||||
this.zip = path.join(ZIP_PATH, 'zip-directory-nested-misc.zip')
|
||||
})
|
||||
|
||||
it('should be able to extract a top-level file', function () {
|
||||
const fileName = 'zip-directory-nested-misc/foo'
|
||||
return zipHooks.getEntries(this.zip).then((entries) => {
|
||||
return zipHooks.extractFile(this.zip, entries, fileName)
|
||||
}).then(utils.extractStream).then((data) => {
|
||||
m.chai.expect(data.toString()).to.equal('foo\n')
|
||||
})
|
||||
})
|
||||
|
||||
it('should be able to extract a nested file', function () {
|
||||
const fileName = 'zip-directory-nested-misc/hello/there/bar'
|
||||
return zipHooks.getEntries(this.zip).then((entries) => {
|
||||
return zipHooks.extractFile(this.zip, entries, fileName)
|
||||
}).then(utils.extractStream).then((data) => {
|
||||
m.chai.expect(data.toString()).to.equal('bar\n')
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw if the entry does not exist', function () {
|
||||
const fileName = 'zip-directory-nested-misc/xxxxxxxxxxxxxxxx'
|
||||
return zipHooks.getEntries(this.zip).then((entries) => {
|
||||
return zipHooks.extractFile(this.zip, entries, fileName)
|
||||
}).catch((error) => {
|
||||
m.chai.expect(error).to.be.an.instanceof(Error)
|
||||
m.chai.expect(error.message).to.equal(`Invalid entry: ${fileName}`)
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw if the entry is a directory', function () {
|
||||
const fileName = 'zip-directory-nested-misc/hello'
|
||||
return zipHooks.getEntries(this.zip).then((entries) => {
|
||||
return zipHooks.extractFile(this.zip, entries, fileName)
|
||||
}).catch((error) => {
|
||||
m.chai.expect(error).to.be.an.instanceof(Error)
|
||||
m.chai.expect(error.message).to.equal(`Invalid entry: ${fileName}`)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,69 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, 'data')
|
||||
const IMAGES_PATH = path.join(DATA_PATH, 'images')
|
||||
const BZ2_PATH = path.join(DATA_PATH, 'bz2')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
const tester = require('./tester')
|
||||
|
||||
describe('ImageStream: BZ2', function () {
|
||||
this.timeout(tester.DEFAULT_IMAGE_TESTS_TIMEOUT)
|
||||
|
||||
describe('compression method', function () {
|
||||
describe('bzip2 level 9', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(BZ2_PATH, 'etcher-test-9.img.bz2'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
describe('bzip2 level 1', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(BZ2_PATH, 'etcher-test.img.bz2'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
it('should return the correct metadata', function () {
|
||||
const image = path.join(BZ2_PATH, 'etcher-test.img.bz2')
|
||||
const expectedSize = fs.statSync(image).size
|
||||
|
||||
return imageStream.getImageMetadata(image).then((metadata) => {
|
||||
m.chai.expect(metadata).to.deep.equal({
|
||||
path: image,
|
||||
extension: 'img',
|
||||
archiveExtension: 'bz2',
|
||||
size: {
|
||||
original: expectedSize,
|
||||
final: {
|
||||
estimation: true,
|
||||
value: expectedSize
|
||||
}
|
||||
},
|
||||
hasMBR: true,
|
||||
hasGPT: false,
|
||||
partitions: require('./data/images/etcher-test-partitions.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,26 +0,0 @@
|
||||
[
|
||||
{
|
||||
"type": "E3C9E316-0B5C-4DB8-817D-F92DF00215AE",
|
||||
"id": "F2020024-6D12-43A7-B0AA-0E243771ED00",
|
||||
"name": "Microsoft reserved partition",
|
||||
"firstLBA": 34,
|
||||
"lastLBA": 65569,
|
||||
"extended": false
|
||||
},
|
||||
{
|
||||
"type": "EBD0A0A2-B9E5-4433-87C0-68B6B72699C7",
|
||||
"id": "3B781D99-BEFA-41F7-85C7-01346507805C",
|
||||
"name": "Basic data partition",
|
||||
"firstLBA": 65664,
|
||||
"lastLBA": 163967,
|
||||
"extended": false
|
||||
},
|
||||
{
|
||||
"type": "EBD0A0A2-B9E5-4433-87C0-68B6B72699C7",
|
||||
"id": "EE0EAF80-24C1-4A41-949E-419676E89AD6",
|
||||
"name": "Basic data partition",
|
||||
"firstLBA": 163968,
|
||||
"lastLBA": 258175,
|
||||
"extended": false
|
||||
}
|
||||
]
|
Binary file not shown.
@ -1,34 +0,0 @@
|
||||
[
|
||||
{
|
||||
"type": 14,
|
||||
"id": null,
|
||||
"name": null,
|
||||
"firstLBA": 128,
|
||||
"lastLBA": 2176,
|
||||
"extended": false
|
||||
},
|
||||
{
|
||||
"type": 14,
|
||||
"id": null,
|
||||
"name": null,
|
||||
"firstLBA": 2176,
|
||||
"lastLBA": 4224,
|
||||
"extended": false
|
||||
},
|
||||
{
|
||||
"type": 0,
|
||||
"id": null,
|
||||
"name": null,
|
||||
"firstLBA": 0,
|
||||
"lastLBA": 0,
|
||||
"extended": false
|
||||
},
|
||||
{
|
||||
"type": 0,
|
||||
"id": null,
|
||||
"name": null,
|
||||
"firstLBA": 0,
|
||||
"lastLBA": 0,
|
||||
"extended": false
|
||||
}
|
||||
]
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,52 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, 'data')
|
||||
const IMAGES_PATH = path.join(DATA_PATH, 'images')
|
||||
const errors = require('../../lib/shared/errors')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
|
||||
describe('ImageStream: Directory', function () {
|
||||
describe('.getFromFilePath()', function () {
|
||||
describe('given a directory', function () {
|
||||
it('should be rejected with an error', function (done) {
|
||||
imageStream.getFromFilePath(IMAGES_PATH).catch((error) => {
|
||||
m.chai.expect(error).to.be.an.instanceof(Error)
|
||||
m.chai.expect(errors.getTitle(error)).to.equal('Invalid image')
|
||||
m.chai.expect(errors.getDescription(error)).to.equal('The image must be a file')
|
||||
m.chai.expect(errors.isUserError(error)).to.be.true
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
it('should be rejected with an error', function (done) {
|
||||
imageStream.getImageMetadata(IMAGES_PATH).catch((error) => {
|
||||
m.chai.expect(error).to.be.an.instanceof(Error)
|
||||
m.chai.expect(errors.getTitle(error)).to.equal('Invalid image')
|
||||
m.chai.expect(errors.getDescription(error)).to.equal('The image must be a file')
|
||||
m.chai.expect(errors.isUserError(error)).to.be.true
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,141 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, 'data')
|
||||
const IMAGES_PATH = path.join(DATA_PATH, 'images')
|
||||
const DMG_PATH = path.join(DATA_PATH, 'dmg')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
const tester = require('./tester')
|
||||
|
||||
describe('ImageStream: DMG', function () {
|
||||
this.timeout(tester.DEFAULT_IMAGE_TESTS_TIMEOUT)
|
||||
|
||||
describe('compression method', function () {
|
||||
describe('NONE', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(DMG_PATH, 'etcher-test-raw.dmg'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
describe('UDCO (ADC)', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(DMG_PATH, 'etcher-test-adc.dmg'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
describe('UDZO (ZLIB)', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(DMG_PATH, 'etcher-test-zlib.dmg'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
describe('UDBZ (BZIP2)', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(DMG_PATH, 'etcher-test-bz2.dmg'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
// NOTE: Skipped, as LZFSE is not supported by `udif` module yet
|
||||
describe.skip('ULFO (LZFSE)', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(DMG_PATH, 'etcher-test-lzfse.dmg'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
})
|
||||
|
||||
context('zlib compressed', function () {
|
||||
describe('.getFromFilePath()', function () {
|
||||
describe('given an dmg image', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(DMG_PATH, 'etcher-test-zlib.dmg'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
it('should return the correct metadata', function () {
|
||||
const image = path.join(DMG_PATH, 'etcher-test-zlib.dmg')
|
||||
const compressedSize = fs.statSync(path.join(DMG_PATH, 'etcher-test-zlib.dmg')).size
|
||||
const uncompressedSize = fs.statSync(path.join(IMAGES_PATH, 'etcher-test.img')).size
|
||||
|
||||
return imageStream.getImageMetadata(image).then((metadata) => {
|
||||
m.chai.expect(metadata).to.deep.equal({
|
||||
path: image,
|
||||
extension: 'dmg',
|
||||
size: {
|
||||
original: compressedSize,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: uncompressedSize
|
||||
}
|
||||
},
|
||||
hasMBR: true,
|
||||
hasGPT: false,
|
||||
partitions: require('./data/images/etcher-test-partitions.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
context('uncompressed', function () {
|
||||
describe('.getFromFilePath()', function () {
|
||||
describe('given an dmg image', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(DMG_PATH, 'etcher-test-raw.dmg'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
it('should return the correct metadata', function () {
|
||||
const image = path.join(DMG_PATH, 'etcher-test-raw.dmg')
|
||||
const compressedSize = fs.statSync(path.join(DMG_PATH, 'etcher-test-raw.dmg')).size
|
||||
const uncompressedSize = fs.statSync(path.join(IMAGES_PATH, 'etcher-test.img')).size
|
||||
|
||||
return imageStream.getImageMetadata(image).then((metadata) => {
|
||||
m.chai.expect(metadata).to.deep.equal({
|
||||
path: image,
|
||||
extension: 'dmg',
|
||||
size: {
|
||||
original: compressedSize,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: uncompressedSize
|
||||
}
|
||||
},
|
||||
hasMBR: true,
|
||||
hasGPT: false,
|
||||
partitions: require('./data/images/etcher-test-partitions.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
context('invalid', function () {
|
||||
describe('given an invalid dmg file', function () {
|
||||
tester.expectError(
|
||||
path.join(DATA_PATH, 'unrecognized', 'invalid.dmg'),
|
||||
'Invalid image', 'Invalid footer')
|
||||
})
|
||||
})
|
||||
})
|
@ -1,64 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, 'data')
|
||||
const IMAGES_PATH = path.join(DATA_PATH, 'images')
|
||||
const GZ_PATH = path.join(DATA_PATH, 'gz')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
const tester = require('./tester')
|
||||
|
||||
describe('ImageStream: GZ', function () {
|
||||
this.timeout(tester.DEFAULT_IMAGE_TESTS_TIMEOUT)
|
||||
|
||||
describe('.getFromFilePath()', function () {
|
||||
describe('given a gz image', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(GZ_PATH, 'etcher-test.img.gz'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
it('should return the correct metadata', function () {
|
||||
const image = path.join(GZ_PATH, 'etcher-test.img.gz')
|
||||
const uncompressedSize = fs.statSync(path.join(IMAGES_PATH, 'etcher-test.img')).size
|
||||
const compressedSize = fs.statSync(path.join(GZ_PATH, 'etcher-test.img.gz')).size
|
||||
|
||||
return imageStream.getImageMetadata(image).then((metadata) => {
|
||||
m.chai.expect(metadata).to.deep.equal({
|
||||
path: image,
|
||||
extension: 'img',
|
||||
archiveExtension: 'gz',
|
||||
size: {
|
||||
original: compressedSize,
|
||||
final: {
|
||||
estimation: true,
|
||||
value: uncompressedSize
|
||||
}
|
||||
},
|
||||
hasMBR: true,
|
||||
hasGPT: false,
|
||||
partitions: require('./data/images/etcher-test-partitions.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,89 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, 'data')
|
||||
const IMAGES_PATH = path.join(DATA_PATH, 'images')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
const tester = require('./tester')
|
||||
|
||||
describe('ImageStream: IMG', function () {
|
||||
this.timeout(tester.DEFAULT_IMAGE_TESTS_TIMEOUT)
|
||||
|
||||
describe('.getFromFilePath()', function () {
|
||||
describe('given an img image', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
context('Master Boot Record', function () {
|
||||
it('should return the correct metadata', function () {
|
||||
const image = path.join(IMAGES_PATH, 'etcher-test.img')
|
||||
const expectedSize = fs.statSync(image).size
|
||||
|
||||
return imageStream.getImageMetadata(image).then((metadata) => {
|
||||
m.chai.expect(metadata).to.deep.equal({
|
||||
path: image,
|
||||
extension: 'img',
|
||||
size: {
|
||||
original: expectedSize,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: expectedSize
|
||||
}
|
||||
},
|
||||
hasMBR: true,
|
||||
hasGPT: false,
|
||||
partitions: require('./data/images/etcher-test-partitions.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
context('GUID Partition Table', function () {
|
||||
it('should return the correct metadata', function () {
|
||||
const image = path.join(IMAGES_PATH, 'etcher-gpt-test.img.gz')
|
||||
const uncompressedSize = 134217728
|
||||
const expectedSize = fs.statSync(image).size
|
||||
|
||||
return imageStream.getImageMetadata(image).then((metadata) => {
|
||||
m.chai.expect(metadata).to.deep.equal({
|
||||
path: image,
|
||||
extension: 'img',
|
||||
archiveExtension: 'gz',
|
||||
size: {
|
||||
original: expectedSize,
|
||||
final: {
|
||||
estimation: true,
|
||||
value: uncompressedSize
|
||||
}
|
||||
},
|
||||
hasMBR: true,
|
||||
hasGPT: true,
|
||||
partitions: require('./data/images/etcher-gpt-test-partitions.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,51 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const _ = require('lodash')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
|
||||
describe('ImageStream', function () {
|
||||
describe('.supportedFileTypes', function () {
|
||||
it('should be an array', function () {
|
||||
m.chai.expect(_.isArray(imageStream.supportedFileTypes)).to.be.true
|
||||
})
|
||||
|
||||
it('should not be empty', function () {
|
||||
m.chai.expect(_.isEmpty(imageStream.supportedFileTypes)).to.be.false
|
||||
})
|
||||
|
||||
it('should contain only strings', function () {
|
||||
m.chai.expect(_.every(_.map(imageStream.supportedFileTypes, function (fileType) {
|
||||
return _.isString(fileType.extension) && _.isString(fileType.type)
|
||||
}))).to.be.true
|
||||
})
|
||||
|
||||
it('should not contain empty strings', function () {
|
||||
m.chai.expect(_.every(_.map(imageStream.supportedFileTypes, function (fileType) {
|
||||
return !_.isEmpty(fileType.extension) && !_.isEmpty(fileType.type)
|
||||
}))).to.be.true
|
||||
})
|
||||
|
||||
it('should not contain a leading period in any file type extension', function () {
|
||||
m.chai.expect(_.every(_.map(imageStream.supportedFileTypes, function (fileType) {
|
||||
return _.first(fileType.extension) !== '.'
|
||||
}))).to.be.true
|
||||
})
|
||||
})
|
||||
})
|
@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, 'data')
|
||||
const IMAGES_PATH = path.join(DATA_PATH, 'images')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
const tester = require('./tester')
|
||||
|
||||
describe('ImageStream: ISO', function () {
|
||||
this.timeout(tester.DEFAULT_IMAGE_TESTS_TIMEOUT)
|
||||
|
||||
describe('.getFromFilePath()', function () {
|
||||
describe('given an iso image', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(IMAGES_PATH, 'etcher-test.iso'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.iso'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
it('should return the correct metadata', function () {
|
||||
const image = path.join(IMAGES_PATH, 'etcher-test.iso')
|
||||
const expectedSize = fs.statSync(image).size
|
||||
|
||||
return imageStream.getImageMetadata(image).then((metadata) => {
|
||||
m.chai.expect(metadata).to.deep.equal({
|
||||
path: image,
|
||||
extension: 'iso',
|
||||
size: {
|
||||
original: expectedSize,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: expectedSize
|
||||
}
|
||||
},
|
||||
hasMBR: true,
|
||||
hasGPT: false,
|
||||
partitions: require('./data/images/etcher-test-partitions.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,155 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, '..', 'data')
|
||||
const IMAGES_PATH = path.join(DATA_PATH, 'images')
|
||||
const ZIP_PATH = path.join(DATA_PATH, 'metadata', 'zip')
|
||||
const tester = require('../tester')
|
||||
const imageStream = require('../../../lib/sdk/image-stream/index')
|
||||
|
||||
const testMetadataProperty = (archivePath, propertyName, expectedValue) => {
|
||||
return imageStream.getFromFilePath(archivePath).then((image) => {
|
||||
m.chai.expect(image[propertyName]).to.deep.equal(expectedValue)
|
||||
|
||||
return imageStream.getImageMetadata(archivePath).then((metadata) => {
|
||||
m.chai.expect(metadata[propertyName]).to.deep.equal(expectedValue)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
describe('ImageStream: Metadata ZIP', function () {
|
||||
this.timeout(10000)
|
||||
|
||||
describe('given an archive with an invalid `manifest.json`', function () {
|
||||
tester.expectError(
|
||||
path.join(ZIP_PATH, 'etcher-test-invalid-manifest.zip'),
|
||||
'Invalid archive manifest.json')
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
it('should be rejected with an error', function () {
|
||||
const image = path.join(ZIP_PATH, 'etcher-test-invalid-manifest.zip')
|
||||
|
||||
return imageStream.getImageMetadata(image).catch((error) => {
|
||||
m.chai.expect(error).to.be.an.instanceof(Error)
|
||||
m.chai.expect(error.message).to.equal('Invalid archive manifest.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('given an archive with a `manifest.json`', function () {
|
||||
const archive = path.join(ZIP_PATH, 'etcher-test-with-manifest.zip')
|
||||
|
||||
tester.extractFromFilePath(
|
||||
archive,
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
|
||||
it('should read the manifest name property', function () {
|
||||
return testMetadataProperty(archive, 'name', 'Etcher Test')
|
||||
})
|
||||
|
||||
it('should read the manifest version property', function () {
|
||||
return testMetadataProperty(archive, 'version', '1.0.0')
|
||||
})
|
||||
|
||||
it('should read the manifest url property', function () {
|
||||
return testMetadataProperty(archive, 'url', 'https://www.example.com')
|
||||
})
|
||||
|
||||
it('should read the manifest supportUrl property', function () {
|
||||
const expectedValue = 'https://www.example.com/support/'
|
||||
return testMetadataProperty(archive, 'supportUrl', expectedValue)
|
||||
})
|
||||
|
||||
it('should read the manifest releaseNotesUrl property', function () {
|
||||
const expectedValue = 'http://downloads.example.com/release_notes.txt'
|
||||
return testMetadataProperty(archive, 'releaseNotesUrl', expectedValue)
|
||||
})
|
||||
|
||||
it('should read the manifest checksumType property', function () {
|
||||
return testMetadataProperty(archive, 'checksumType', 'md5')
|
||||
})
|
||||
|
||||
it('should read the manifest checksum property', function () {
|
||||
return testMetadataProperty(archive, 'checksum', 'add060b285d512f56c175b76b7ef1bee')
|
||||
})
|
||||
|
||||
it('should read the manifest bytesToZeroOutFromTheBeginning property', function () {
|
||||
return testMetadataProperty(archive, 'bytesToZeroOutFromTheBeginning', 512)
|
||||
})
|
||||
|
||||
it('should read the manifest recommendedDriveSize property', function () {
|
||||
return testMetadataProperty(archive, 'recommendedDriveSize', 4294967296)
|
||||
})
|
||||
})
|
||||
|
||||
describe('given an archive with a `logo.svg`', function () {
|
||||
const archive = path.join(ZIP_PATH, 'etcher-test-with-logo.zip')
|
||||
|
||||
const logo = [
|
||||
'<svg xmlns="http://www.w3.org/2000/svg">',
|
||||
' <text>Hello World</text>',
|
||||
'</svg>',
|
||||
''
|
||||
].join('\n')
|
||||
|
||||
it('should read the logo contents', function () {
|
||||
return testMetadataProperty(archive, 'logo', logo)
|
||||
})
|
||||
})
|
||||
|
||||
describe('given an archive with a bmap file', function () {
|
||||
const archive = path.join(ZIP_PATH, 'etcher-test-with-bmap.zip')
|
||||
|
||||
const bmap = [
|
||||
'<?xml version="1.0" ?>',
|
||||
'<bmap version="1.3">',
|
||||
' <ImageSize> 5242880 </ImageSize>',
|
||||
' <BlockSize> 4096 </BlockSize>',
|
||||
' <BlocksCount> 1280 </BlocksCount>',
|
||||
' <MappedBlocksCount> 1280 </MappedBlocksCount>',
|
||||
' <BmapFileSHA1> cc6f077565c73a46198777b259c231875df4e709 </BmapFileSHA1>',
|
||||
' <BlockMap>',
|
||||
' <Range sha1="7b7d6e1fc44ef224a8c57d3ec6ffc3717c428a14"> 0-1280 </Range>',
|
||||
' </BlockMap>',
|
||||
'</bmap>',
|
||||
''
|
||||
].join('\n')
|
||||
|
||||
it('should read the bmap contents', function () {
|
||||
return testMetadataProperty(archive, 'bmap', bmap)
|
||||
})
|
||||
})
|
||||
|
||||
describe('given an archive with instructions', function () {
|
||||
const archive = path.join(ZIP_PATH, 'etcher-test-with-instructions.zip')
|
||||
|
||||
const instructions = [
|
||||
'# Example Next Steps',
|
||||
'',
|
||||
'Lorem ipsum dolor sit amet.',
|
||||
''
|
||||
].join('\n')
|
||||
|
||||
it('should read the instruction contents', function () {
|
||||
return testMetadataProperty(archive, 'instructions', instructions)
|
||||
})
|
||||
})
|
||||
})
|
@ -1,103 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, 'data')
|
||||
const mime = require('../../lib/sdk/image-stream/mime')
|
||||
|
||||
describe('ImageStream: MIME', function () {
|
||||
describe('.getMimeTypeFromFileName()', function () {
|
||||
it('should resolve application/x-bzip2 for a bz2 archive', function () {
|
||||
const file = path.join(DATA_PATH, 'bz2', 'etcher-test.img.bz2')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/x-bzip2')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve application/x-xz for a xz archive', function () {
|
||||
const file = path.join(DATA_PATH, 'xz', 'etcher-test.img.xz')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/x-xz')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve application/gzip for a gz archive', function () {
|
||||
const file = path.join(DATA_PATH, 'gz', 'etcher-test.img.gz')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/gzip')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve application/zip for a zip archive', function () {
|
||||
const file = path.join(DATA_PATH, 'zip', 'zip-directory-etcher-only.zip')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/zip')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve application/octet-stream for an uncompressed image', function () {
|
||||
const file = path.join(DATA_PATH, 'images', 'etcher-test.img')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/octet-stream')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve application/x-iso9660-image for an uncompressed iso', function () {
|
||||
const file = path.join(DATA_PATH, 'images', 'etcher-test.iso')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/x-iso9660-image')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve application/x-apple-diskimage for a compressed Apple disk image', function () {
|
||||
const file = path.join(DATA_PATH, 'dmg', 'etcher-test-zlib.dmg')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/x-apple-diskimage')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve application/x-apple-diskimage for an uncompressed Apple disk image', function () {
|
||||
const file = path.join(DATA_PATH, 'dmg', 'etcher-test-raw.dmg')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/x-apple-diskimage')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve application/octet-stream for an unrecognized file type', function () {
|
||||
const file = path.join(DATA_PATH, 'unrecognized', 'random.rpi-sdcard')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/octet-stream')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve the correct MIME type given an invalid extension', function () {
|
||||
const file = path.join(DATA_PATH, 'unrecognized', 'xz-with-invalid-extension.foo')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/x-xz')
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve the correct MIME type given no extension', function () {
|
||||
const file = path.join(DATA_PATH, 'unrecognized', 'xz-without-extension')
|
||||
return mime.getMimeTypeFromFileName(file).then((type) => {
|
||||
m.chai.expect(type).to.equal('application/x-xz')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,90 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const _ = require('lodash')
|
||||
const Bluebird = require('bluebird')
|
||||
const fs = Bluebird.promisifyAll(require('fs'))
|
||||
const path = require('path')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
|
||||
const doFilesContainTheSameData = (file1, file2) => {
|
||||
return Bluebird.props({
|
||||
file1: fs.readFileAsync(file1),
|
||||
file2: fs.readFileAsync(file2)
|
||||
}).then(function (data) {
|
||||
return _.isEqual(data.file1, data.file2)
|
||||
})
|
||||
}
|
||||
|
||||
exports.DEFAULT_IMAGE_TESTS_TIMEOUT = 20000
|
||||
|
||||
exports.expectError = function (file, errorMessage, errorDetail) {
|
||||
it('should be rejected with an error', function () {
|
||||
return imageStream.getFromFilePath(file).catch((error) => {
|
||||
m.chai.expect(error).to.be.an.instanceof(Error)
|
||||
m.chai.expect(error.message).to.equal(errorMessage)
|
||||
if (errorDetail) {
|
||||
m.chai.expect(error.description).to.contain(errorDetail)
|
||||
m.chai.expect(error.description).to.be.a.string
|
||||
m.chai.expect(error.description.length > 0).to.be.true
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
exports.extractFromFilePath = function (file, image) {
|
||||
it('should be able to extract the image', function () {
|
||||
const dirname = path.join(__dirname, 'output')
|
||||
const output = path.join(dirname, path.basename(file))
|
||||
|
||||
return fs.mkdirAsync(dirname)
|
||||
.catch({ code: 'EEXIST' }, _.noop)
|
||||
.then(function () {
|
||||
return imageStream.getFromFilePath(file)
|
||||
})
|
||||
.then(function (results) {
|
||||
m.chai.expect(results.path).to.equal(file)
|
||||
m.chai.expect(_.isString(results.extension)).to.be.true
|
||||
m.chai.expect(_.isEmpty(_.trim(results.extension))).to.be.false
|
||||
|
||||
if (!_.some([
|
||||
results.size.original === fs.statSync(file).size,
|
||||
results.size.original === fs.statSync(image).size
|
||||
])) {
|
||||
throw new Error(`Invalid size: ${results.size.original}`)
|
||||
}
|
||||
|
||||
const stream = results.stream
|
||||
.pipe(results.transform)
|
||||
.pipe(fs.createWriteStream(output))
|
||||
|
||||
return new Bluebird((resolve, reject) => {
|
||||
stream.on('error', reject)
|
||||
stream.on('close', resolve)
|
||||
})
|
||||
}).then(function () {
|
||||
return doFilesContainTheSameData(image, output)
|
||||
}).then(function (areEqual) {
|
||||
m.chai.expect(areEqual).to.be.true
|
||||
}).finally(function () {
|
||||
return fs.unlinkAsync(output)
|
||||
.catch({ code: 'ENOENT' }, _.noop)
|
||||
})
|
||||
})
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const StreamReadable = require('stream').Readable
|
||||
const utils = require('../../lib/sdk/image-stream/utils')
|
||||
|
||||
describe('ImageStream: Utils', function () {
|
||||
describe('.extractStream()', function () {
|
||||
describe('given a stream that emits data', function () {
|
||||
beforeEach(function () {
|
||||
this.stream = new StreamReadable()
|
||||
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
this.stream._read = function () {
|
||||
/* eslint-enable no-underscore-dangle */
|
||||
this.push(Buffer.from('Hello', 'utf8'))
|
||||
this.push(Buffer.from(' ', 'utf8'))
|
||||
this.push(Buffer.from('World', 'utf8'))
|
||||
this.push(null)
|
||||
}
|
||||
})
|
||||
|
||||
it('should yield the stream data', function () {
|
||||
return utils.extractStream(this.stream).then((data) => {
|
||||
m.chai.expect(data.toString()).to.equal('Hello World')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('given a stream that throws an error', function () {
|
||||
beforeEach(function () {
|
||||
this.stream = new StreamReadable()
|
||||
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
this.stream._read = function () {
|
||||
/* eslint-enable no-underscore-dangle */
|
||||
this.emit('error', new Error('stream error'))
|
||||
}
|
||||
})
|
||||
|
||||
it('should be rejected with the error', function () {
|
||||
return utils.extractStream(this.stream).catch((error) => {
|
||||
m.chai.expect(error).to.be.an.instanceof(Error)
|
||||
m.chai.expect(error.message).to.equal('stream error')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,64 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, 'data')
|
||||
const IMAGES_PATH = path.join(DATA_PATH, 'images')
|
||||
const XZ_PATH = path.join(DATA_PATH, 'xz')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
const tester = require('./tester')
|
||||
|
||||
describe('ImageStream: XZ', function () {
|
||||
this.timeout(tester.DEFAULT_IMAGE_TESTS_TIMEOUT)
|
||||
|
||||
describe('.getFromFilePath()', function () {
|
||||
describe('given a xz image', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(XZ_PATH, 'etcher-test.img.xz'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
it('should return the correct metadata', function () {
|
||||
const image = path.join(XZ_PATH, 'etcher-test.img.xz')
|
||||
const compressedSize = fs.statSync(image).size
|
||||
const uncompressedSize = fs.statSync(path.join(IMAGES_PATH, 'etcher-test.img')).size
|
||||
|
||||
return imageStream.getImageMetadata(image).then((metadata) => {
|
||||
m.chai.expect(metadata).to.deep.equal({
|
||||
path: image,
|
||||
extension: 'img',
|
||||
archiveExtension: 'xz',
|
||||
size: {
|
||||
original: compressedSize,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: uncompressedSize
|
||||
}
|
||||
},
|
||||
hasMBR: true,
|
||||
hasGPT: false,
|
||||
partitions: require('./data/images/etcher-test-partitions.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,127 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 resin.io
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const m = require('mochainon')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const DATA_PATH = path.join(__dirname, 'data')
|
||||
const IMAGES_PATH = path.join(DATA_PATH, 'images')
|
||||
const ZIP_PATH = path.join(DATA_PATH, 'zip')
|
||||
const imageStream = require('../../lib/sdk/image-stream/index')
|
||||
const tester = require('./tester')
|
||||
|
||||
describe('ImageStream: ZIP', function () {
|
||||
this.timeout(tester.DEFAULT_IMAGE_TESTS_TIMEOUT)
|
||||
|
||||
describe('.getFromFilePath()', function () {
|
||||
describe('given an empty zip directory', function () {
|
||||
tester.expectError(
|
||||
path.join(ZIP_PATH, 'zip-directory-empty.zip'),
|
||||
'Invalid archive image')
|
||||
})
|
||||
|
||||
describe('given a zip directory containing only misc files', function () {
|
||||
tester.expectError(
|
||||
path.join(ZIP_PATH, 'zip-directory-no-image-only-misc.zip'),
|
||||
'Invalid archive image')
|
||||
})
|
||||
|
||||
describe('given a zip with an unsupported compression method', function () {
|
||||
tester.expectError(
|
||||
path.join(ZIP_PATH, 'zip-deflate64.zip'),
|
||||
'unsupported compression method: 9')
|
||||
})
|
||||
|
||||
describe('given a zip directory containing multiple images', function () {
|
||||
tester.expectError(
|
||||
path.join(ZIP_PATH, 'zip-directory-multiple-images.zip'),
|
||||
'Invalid archive image')
|
||||
})
|
||||
|
||||
describe('given a zip directory containing only an image', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(ZIP_PATH, 'zip-directory-etcher-test-only.zip'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
describe('given a zip directory containing an image and other misc files', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(ZIP_PATH, 'zip-directory-etcher-test-and-misc.zip'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('compression method', function () {
|
||||
context('DEFLATE', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(ZIP_PATH, 'zip-deflate.zip'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
// NOTE: These tests are intentionally skipped, as the
|
||||
// zip library we're currently using only supports deflate
|
||||
context.skip('DEFLATE64', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(ZIP_PATH, 'zip-deflate64.zip'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
context.skip('PPMD', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(ZIP_PATH, 'zip-ppmd.zip'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
context.skip('BZIP2', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(ZIP_PATH, 'zip-bzip2.zip'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
|
||||
context.skip('LZMA', function () {
|
||||
tester.extractFromFilePath(
|
||||
path.join(ZIP_PATH, 'zip-lzma.zip'),
|
||||
path.join(IMAGES_PATH, 'etcher-test.img'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('.getImageMetadata()', function () {
|
||||
it('should return the correct metadata', function () {
|
||||
const image = path.join(ZIP_PATH, 'zip-directory-etcher-test-only.zip')
|
||||
const expectedSize = fs.statSync(path.join(IMAGES_PATH, 'etcher-test.img')).size
|
||||
|
||||
return imageStream.getImageMetadata(image).then((metadata) => {
|
||||
m.chai.expect(metadata).to.deep.equal({
|
||||
path: image,
|
||||
extension: 'img',
|
||||
archiveExtension: 'zip',
|
||||
size: {
|
||||
original: expectedSize,
|
||||
final: {
|
||||
estimation: false,
|
||||
value: expectedSize
|
||||
}
|
||||
},
|
||||
hasMBR: true,
|
||||
hasGPT: false,
|
||||
partitions: require('./data/images/etcher-test-partitions.json')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -124,7 +124,7 @@ const etcherConfig = _.assign({
|
||||
// on the tree (for testing purposes) or inside a generated
|
||||
// bundle (for production purposes), by translating
|
||||
// relative require paths within the bundle.
|
||||
if (/\/(etcher-sdk|sdk|shared)/i.test(request) || /package\.json$/.test(request)) {
|
||||
if (/\/(etcher-sdk|shared)/i.test(request) || /package\.json$/.test(request)) {
|
||||
const output = path.join(__dirname, 'generated')
|
||||
const dirname = path.join(context, request)
|
||||
const relative = path.relative(output, dirname)
|
||||
|
Loading…
x
Reference in New Issue
Block a user