mirror of
https://github.com/balena-io/balena-cli.git
synced 2024-12-24 07:46:39 +00:00
Merge pull request #1076 from balena-io/add-livepush
Add livepush to balena push
This commit is contained in:
commit
69db3c0171
@ -1390,18 +1390,23 @@ Docker host TLS key file
|
|||||||
|
|
||||||
## push <applicationOrDevice>
|
## push <applicationOrDevice>
|
||||||
|
|
||||||
This command can be used to start an image build on the remote balenaCloud build
|
This command can be used to start a build on the remote balena cloud builders,
|
||||||
servers, or on a local-mode balena device.
|
or a local mode balena device.
|
||||||
|
|
||||||
When building on the balenaCloud servers, the given source directory will be
|
When building on the balenaCloud servers, the given source directory will be
|
||||||
sent to the remote server. This can be used as a drop-in replacement for the
|
sent to the remote server. This can be used as a drop-in replacement for the
|
||||||
"git push" deployment method.
|
"git push" deployment method.
|
||||||
|
|
||||||
When building on a local-mode device, the given source directory will be
|
When building on a local mode device, the given source directory will be
|
||||||
built on the device, and the resulting containers will be run on the device.
|
built on the device, and the resulting containers will be run on the device.
|
||||||
Logs will be streamed back from the device as part of the same invocation.
|
Logs will be streamed back from the device as part of the same invocation.
|
||||||
The web dashboard can be used to switch a device to local mode:
|
The web dashboard can be used to switch a device to local mode:
|
||||||
https://www.balena.io/docs/learn/develop/local-mode/
|
https://www.balena.io/docs/learn/develop/local-mode/
|
||||||
|
Note that local mode requires a supervisor version of at least v7.21.0.
|
||||||
|
|
||||||
|
It is also possible to run a push to a local mode device in live mode.
|
||||||
|
This will watch for changes in the source directory and perform an
|
||||||
|
in-place build in the running containers [BETA].
|
||||||
|
|
||||||
The --registry-secrets option specifies a JSON or YAML file containing private
|
The --registry-secrets option specifies a JSON or YAML file containing private
|
||||||
Docker registry usernames and passwords to be used when pulling base images.
|
Docker registry usernames and passwords to be used when pulling base images.
|
||||||
@ -1445,6 +1450,17 @@ Don't use cache when building this project
|
|||||||
|
|
||||||
Path to a local YAML or JSON file containing Docker registry passwords used to pull base images
|
Path to a local YAML or JSON file containing Docker registry passwords used to pull base images
|
||||||
|
|
||||||
|
#### --live, -l
|
||||||
|
|
||||||
|
Note this feature is in beta.
|
||||||
|
|
||||||
|
Start a live session with the containers pushed to a local-mode device.
|
||||||
|
The project source folder is watched for filesystem events, and changes
|
||||||
|
to files and folders are automatically synchronized to the running
|
||||||
|
containers. The synchronisation is only in one direction, from this machine to
|
||||||
|
the device, and changes made on the device itself may be overwritten.
|
||||||
|
This feature requires a device running supervisor version v9.7.0 or greater.
|
||||||
|
|
||||||
# Settings
|
# Settings
|
||||||
|
|
||||||
## settings
|
## settings
|
||||||
|
@ -106,6 +106,7 @@ export const push: CommandDefinition<
|
|||||||
emulated: boolean;
|
emulated: boolean;
|
||||||
nocache: boolean;
|
nocache: boolean;
|
||||||
'registry-secrets': string;
|
'registry-secrets': string;
|
||||||
|
live: boolean;
|
||||||
}
|
}
|
||||||
> = {
|
> = {
|
||||||
signature: 'push <applicationOrDevice>',
|
signature: 'push <applicationOrDevice>',
|
||||||
@ -113,18 +114,23 @@ export const push: CommandDefinition<
|
|||||||
description:
|
description:
|
||||||
'Start a remote build on the balena cloud build servers or a local mode device',
|
'Start a remote build on the balena cloud build servers or a local mode device',
|
||||||
help: stripIndent`
|
help: stripIndent`
|
||||||
This command can be used to start an image build on the remote balenaCloud build
|
This command can be used to start a build on the remote balena cloud builders,
|
||||||
servers, or on a local-mode balena device.
|
or a local mode balena device.
|
||||||
|
|
||||||
When building on the balenaCloud servers, the given source directory will be
|
When building on the balenaCloud servers, the given source directory will be
|
||||||
sent to the remote server. This can be used as a drop-in replacement for the
|
sent to the remote server. This can be used as a drop-in replacement for the
|
||||||
"git push" deployment method.
|
"git push" deployment method.
|
||||||
|
|
||||||
When building on a local-mode device, the given source directory will be
|
When building on a local mode device, the given source directory will be
|
||||||
built on the device, and the resulting containers will be run on the device.
|
built on the device, and the resulting containers will be run on the device.
|
||||||
Logs will be streamed back from the device as part of the same invocation.
|
Logs will be streamed back from the device as part of the same invocation.
|
||||||
The web dashboard can be used to switch a device to local mode:
|
The web dashboard can be used to switch a device to local mode:
|
||||||
https://www.balena.io/docs/learn/develop/local-mode/
|
https://www.balena.io/docs/learn/develop/local-mode/
|
||||||
|
Note that local mode requires a supervisor version of at least v7.21.0.
|
||||||
|
|
||||||
|
It is also possible to run a push to a local mode device in live mode.
|
||||||
|
This will watch for changes in the source directory and perform an
|
||||||
|
in-place build in the running containers [BETA].
|
||||||
|
|
||||||
${registrySecretsHelp.split('\n').join('\n\t\t')}
|
${registrySecretsHelp.split('\n').join('\n\t\t')}
|
||||||
|
|
||||||
@ -165,6 +171,20 @@ export const push: CommandDefinition<
|
|||||||
description: stripIndent`
|
description: stripIndent`
|
||||||
Path to a local YAML or JSON file containing Docker registry passwords used to pull base images`,
|
Path to a local YAML or JSON file containing Docker registry passwords used to pull base images`,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
signature: 'live',
|
||||||
|
alias: 'l',
|
||||||
|
boolean: true,
|
||||||
|
description: stripIndent`
|
||||||
|
Note this feature is in beta.
|
||||||
|
|
||||||
|
Start a live session with the containers pushed to a local-mode device.
|
||||||
|
The project source folder is watched for filesystem events, and changes
|
||||||
|
to files and folders are automatically synchronized to the running
|
||||||
|
containers. The synchronisation is only in one direction, from this machine to
|
||||||
|
the device, and changes made on the device itself may be overwritten.
|
||||||
|
This feature requires a device running supervisor version v9.7.0 or greater.`,
|
||||||
|
},
|
||||||
],
|
],
|
||||||
async action(params, options, done) {
|
async action(params, options, done) {
|
||||||
const sdk = (await import('balena-sdk')).fromSharedOptions();
|
const sdk = (await import('balena-sdk')).fromSharedOptions();
|
||||||
@ -194,6 +214,13 @@ export const push: CommandDefinition<
|
|||||||
const buildTarget = getBuildTarget(appOrDevice);
|
const buildTarget = getBuildTarget(appOrDevice);
|
||||||
switch (buildTarget) {
|
switch (buildTarget) {
|
||||||
case BuildTarget.Cloud:
|
case BuildTarget.Cloud:
|
||||||
|
// Ensure that the live argument has not been passed to a cloud build
|
||||||
|
if (options.live) {
|
||||||
|
exitWithExpectedError(
|
||||||
|
'The --live flag is only valid when pushing to a local device.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const app = appOrDevice;
|
const app = appOrDevice;
|
||||||
await exitIfNotLoggedIn();
|
await exitIfNotLoggedIn();
|
||||||
await Bluebird.join(
|
await Bluebird.join(
|
||||||
@ -229,6 +256,7 @@ export const push: CommandDefinition<
|
|||||||
deviceHost: device,
|
deviceHost: device,
|
||||||
registrySecrets,
|
registrySecrets,
|
||||||
nocache: options.nocache || false,
|
nocache: options.nocache || false,
|
||||||
|
live: options.live || false,
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
.catch(BuildError, e => {
|
.catch(BuildError, e => {
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
import * as Bluebird from 'bluebird';
|
import * as Bluebird from 'bluebird';
|
||||||
|
import * as _ from 'lodash';
|
||||||
import * as request from 'request';
|
import * as request from 'request';
|
||||||
import * as Stream from 'stream';
|
import * as Stream from 'stream';
|
||||||
|
|
||||||
@ -33,6 +34,29 @@ export interface DeviceInfo {
|
|||||||
arch: string;
|
arch: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface Status {
|
||||||
|
appState: 'applied' | 'applying';
|
||||||
|
overallDownloadProgress: null | number;
|
||||||
|
containers: Array<{
|
||||||
|
status: string;
|
||||||
|
serviceName: string;
|
||||||
|
appId: number;
|
||||||
|
imageId: number;
|
||||||
|
serviceId: number;
|
||||||
|
containerId: string;
|
||||||
|
createdAt: string;
|
||||||
|
}>;
|
||||||
|
images: Array<{
|
||||||
|
name: string;
|
||||||
|
appId: number;
|
||||||
|
serviceName: string;
|
||||||
|
imageId: number;
|
||||||
|
dockerImageId: string;
|
||||||
|
status: string;
|
||||||
|
downloadProgress: null | number;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|
||||||
const deviceEndpoints = {
|
const deviceEndpoints = {
|
||||||
setTargetState: 'v2/local/target-state',
|
setTargetState: 'v2/local/target-state',
|
||||||
getTargetState: 'v2/local/target-state',
|
getTargetState: 'v2/local/target-state',
|
||||||
@ -40,6 +64,7 @@ const deviceEndpoints = {
|
|||||||
logs: 'v2/local/logs',
|
logs: 'v2/local/logs',
|
||||||
ping: 'ping',
|
ping: 'ping',
|
||||||
version: 'v2/version',
|
version: 'v2/version',
|
||||||
|
status: 'v2/state/status',
|
||||||
};
|
};
|
||||||
|
|
||||||
export class DeviceAPI {
|
export class DeviceAPI {
|
||||||
@ -126,6 +151,23 @@ export class DeviceAPI {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public getStatus(): Promise<Status> {
|
||||||
|
const url = this.getUrlForAction('status');
|
||||||
|
|
||||||
|
return DeviceAPI.promisifiedRequest(request.get, {
|
||||||
|
url,
|
||||||
|
json: true,
|
||||||
|
}).then(body => {
|
||||||
|
if (body.status !== 'success') {
|
||||||
|
throw new ApiErrors.DeviceAPIError(
|
||||||
|
'Non-successful response from supervisor status endpoint',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return _.omit(body, 'status') as Status;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
public getLogStream(): Bluebird<Stream.Readable> {
|
public getLogStream(): Bluebird<Stream.Readable> {
|
||||||
const url = this.getUrlForAction('logs');
|
const url = this.getUrlForAction('logs');
|
||||||
|
|
||||||
@ -160,8 +202,6 @@ export class DeviceAPI {
|
|||||||
opts: T,
|
opts: T,
|
||||||
logger?: Logger,
|
logger?: Logger,
|
||||||
): Promise<any> {
|
): Promise<any> {
|
||||||
const _ = await import('lodash');
|
|
||||||
|
|
||||||
interface ObjectWithUrl {
|
interface ObjectWithUrl {
|
||||||
url?: string;
|
url?: string;
|
||||||
}
|
}
|
||||||
|
@ -30,8 +30,9 @@ import { Readable } from 'stream';
|
|||||||
|
|
||||||
import { makeBuildTasks } from '../compose_ts';
|
import { makeBuildTasks } from '../compose_ts';
|
||||||
import Logger = require('../logger');
|
import Logger = require('../logger');
|
||||||
import { DeviceInfo } from './api';
|
import { DeviceAPI, DeviceInfo } from './api';
|
||||||
import * as LocalPushErrors from './errors';
|
import * as LocalPushErrors from './errors';
|
||||||
|
import LivepushManager from './live';
|
||||||
import { displayBuildLog } from './logs';
|
import { displayBuildLog } from './logs';
|
||||||
|
|
||||||
// Define the logger here so the debug output
|
// Define the logger here so the debug output
|
||||||
@ -44,6 +45,7 @@ export interface DeviceDeployOptions {
|
|||||||
devicePort?: number;
|
devicePort?: number;
|
||||||
registrySecrets: RegistrySecrets;
|
registrySecrets: RegistrySecrets;
|
||||||
nocache: boolean;
|
nocache: boolean;
|
||||||
|
live: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function checkSource(source: string): Promise<boolean> {
|
async function checkSource(source: string): Promise<boolean> {
|
||||||
@ -55,7 +57,6 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
|||||||
const { loadProject, tarDirectory } = await import('../compose');
|
const { loadProject, tarDirectory } = await import('../compose');
|
||||||
const { exitWithExpectedError } = await import('../patterns');
|
const { exitWithExpectedError } = await import('../patterns');
|
||||||
|
|
||||||
const { DeviceAPI } = await import('./api');
|
|
||||||
const { displayDeviceLogs } = await import('./logs');
|
const { displayDeviceLogs } = await import('./logs');
|
||||||
|
|
||||||
if (!(await checkSource(opts.source))) {
|
if (!(await checkSource(opts.source))) {
|
||||||
@ -66,6 +67,7 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
|||||||
|
|
||||||
// First check that we can access the device with a ping
|
// First check that we can access the device with a ping
|
||||||
try {
|
try {
|
||||||
|
globalLogger.logDebug('Checking we can access device');
|
||||||
await api.ping();
|
await api.ping();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
exitWithExpectedError(
|
exitWithExpectedError(
|
||||||
@ -82,9 +84,15 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const version = await api.getVersion();
|
const version = await api.getVersion();
|
||||||
|
globalLogger.logDebug(`Checking device version: ${version}`);
|
||||||
if (!semver.satisfies(version, '>=7.21.4')) {
|
if (!semver.satisfies(version, '>=7.21.4')) {
|
||||||
exitWithExpectedError(versionError);
|
exitWithExpectedError(versionError);
|
||||||
}
|
}
|
||||||
|
if (opts.live && !semver.satisfies(version, '>=9.7.0')) {
|
||||||
|
exitWithExpectedError(
|
||||||
|
new Error('Using livepush requires a supervisor >= v9.7.0'),
|
||||||
|
);
|
||||||
|
}
|
||||||
} catch {
|
} catch {
|
||||||
exitWithExpectedError(versionError);
|
exitWithExpectedError(versionError);
|
||||||
}
|
}
|
||||||
@ -104,13 +112,18 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
|||||||
// Try to detect the device information
|
// Try to detect the device information
|
||||||
const deviceInfo = await api.getDeviceInformation();
|
const deviceInfo = await api.getDeviceInformation();
|
||||||
|
|
||||||
await performBuilds(
|
let buildLogs: Dictionary<string> | undefined;
|
||||||
|
if (opts.live) {
|
||||||
|
buildLogs = {};
|
||||||
|
}
|
||||||
|
const buildTasks = await performBuilds(
|
||||||
project.composition,
|
project.composition,
|
||||||
tarStream,
|
tarStream,
|
||||||
docker,
|
docker,
|
||||||
deviceInfo,
|
deviceInfo,
|
||||||
globalLogger,
|
globalLogger,
|
||||||
opts,
|
opts,
|
||||||
|
buildLogs,
|
||||||
);
|
);
|
||||||
|
|
||||||
globalLogger.logDebug('Setting device state...');
|
globalLogger.logDebug('Setting device state...');
|
||||||
@ -133,7 +146,29 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
|||||||
// Now all we need to do is stream back the logs
|
// Now all we need to do is stream back the logs
|
||||||
const logStream = await api.getLogStream();
|
const logStream = await api.getLogStream();
|
||||||
|
|
||||||
await displayDeviceLogs(logStream, globalLogger);
|
// Now that we've set the target state, the device will do it's thing
|
||||||
|
// so we can either just display the logs, or start a livepush session
|
||||||
|
// (whilst also display logs)
|
||||||
|
if (opts.live) {
|
||||||
|
const livepush = new LivepushManager({
|
||||||
|
api,
|
||||||
|
buildContext: opts.source,
|
||||||
|
buildTasks,
|
||||||
|
docker,
|
||||||
|
logger: globalLogger,
|
||||||
|
composition: project.composition,
|
||||||
|
buildLogs: buildLogs!,
|
||||||
|
deployOpts: opts,
|
||||||
|
});
|
||||||
|
|
||||||
|
globalLogger.logLivepush('Watching for file changes...');
|
||||||
|
await Promise.all([
|
||||||
|
livepush.init(),
|
||||||
|
displayDeviceLogs(logStream, globalLogger),
|
||||||
|
]);
|
||||||
|
} else {
|
||||||
|
await displayDeviceLogs(logStream, globalLogger);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function connectToDocker(host: string, port: number): Docker {
|
function connectToDocker(host: string, port: number): Docker {
|
||||||
@ -151,7 +186,8 @@ export async function performBuilds(
|
|||||||
deviceInfo: DeviceInfo,
|
deviceInfo: DeviceInfo,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
opts: DeviceDeployOptions,
|
opts: DeviceDeployOptions,
|
||||||
): Promise<void> {
|
buildLogs?: Dictionary<string>,
|
||||||
|
): Promise<BuildTask[]> {
|
||||||
const multibuild = await import('resin-multibuild');
|
const multibuild = await import('resin-multibuild');
|
||||||
|
|
||||||
const buildTasks = await makeBuildTasks(
|
const buildTasks = await makeBuildTasks(
|
||||||
@ -165,7 +201,7 @@ export async function performBuilds(
|
|||||||
await assignDockerBuildOpts(docker, buildTasks, opts);
|
await assignDockerBuildOpts(docker, buildTasks, opts);
|
||||||
|
|
||||||
logger.logDebug('Starting builds...');
|
logger.logDebug('Starting builds...');
|
||||||
await assignOutputHandlers(buildTasks, logger);
|
await assignOutputHandlers(buildTasks, logger, buildLogs);
|
||||||
const localImages = await multibuild.performBuilds(buildTasks, docker);
|
const localImages = await multibuild.performBuilds(buildTasks, docker);
|
||||||
|
|
||||||
// Check for failures
|
// Check for failures
|
||||||
@ -184,9 +220,59 @@ export async function performBuilds(
|
|||||||
await image.remove({ force: true });
|
await image.remove({ force: true });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return buildTasks;
|
||||||
}
|
}
|
||||||
|
|
||||||
function assignOutputHandlers(buildTasks: BuildTask[], logger: Logger) {
|
// Rebuild a single container, execute it on device, and
|
||||||
|
// return the build logs
|
||||||
|
export async function rebuildSingleTask(
|
||||||
|
serviceName: string,
|
||||||
|
docker: Docker,
|
||||||
|
logger: Logger,
|
||||||
|
deviceInfo: DeviceInfo,
|
||||||
|
composition: Composition,
|
||||||
|
source: string,
|
||||||
|
opts: DeviceDeployOptions,
|
||||||
|
): Promise<string> {
|
||||||
|
const { tarDirectory } = await import('../compose');
|
||||||
|
const multibuild = await import('resin-multibuild');
|
||||||
|
// First we run the build task, to get the new image id
|
||||||
|
const buildLogs: Dictionary<string> = {};
|
||||||
|
|
||||||
|
const tarStream = await tarDirectory(source);
|
||||||
|
|
||||||
|
const task = _.find(
|
||||||
|
await makeBuildTasks(composition, tarStream, deviceInfo, logger),
|
||||||
|
{ serviceName },
|
||||||
|
);
|
||||||
|
|
||||||
|
if (task == null) {
|
||||||
|
throw new Error(`Could not find build task for service ${serviceName}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await assignDockerBuildOpts(docker, [task], opts);
|
||||||
|
await assignOutputHandlers([task], logger, buildLogs);
|
||||||
|
|
||||||
|
const [localImage] = await multibuild.performBuilds([task], docker);
|
||||||
|
|
||||||
|
if (!localImage.successful) {
|
||||||
|
throw new LocalPushErrors.BuildError([
|
||||||
|
{
|
||||||
|
error: localImage.error!,
|
||||||
|
serviceName,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return buildLogs[task.serviceName];
|
||||||
|
}
|
||||||
|
|
||||||
|
function assignOutputHandlers(
|
||||||
|
buildTasks: BuildTask[],
|
||||||
|
logger: Logger,
|
||||||
|
buildLogs?: Dictionary<string>,
|
||||||
|
) {
|
||||||
_.each(buildTasks, task => {
|
_.each(buildTasks, task => {
|
||||||
if (task.external) {
|
if (task.external) {
|
||||||
task.progressHook = progressObj => {
|
task.progressHook = progressObj => {
|
||||||
@ -196,6 +282,9 @@ function assignOutputHandlers(buildTasks: BuildTask[], logger: Logger) {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
|
if (buildLogs) {
|
||||||
|
buildLogs[task.serviceName] = '';
|
||||||
|
}
|
||||||
task.streamHook = stream => {
|
task.streamHook = stream => {
|
||||||
stream.on('data', (buf: Buffer) => {
|
stream.on('data', (buf: Buffer) => {
|
||||||
const str = _.trimEnd(buf.toString());
|
const str = _.trimEnd(buf.toString());
|
||||||
@ -204,6 +293,12 @@ function assignOutputHandlers(buildTasks: BuildTask[], logger: Logger) {
|
|||||||
{ serviceName: task.serviceName, message: str },
|
{ serviceName: task.serviceName, message: str },
|
||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (buildLogs) {
|
||||||
|
buildLogs[task.serviceName] = `${
|
||||||
|
buildLogs[task.serviceName]
|
||||||
|
}\n${str}`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@ -254,7 +349,7 @@ function generateImageName(serviceName: string): string {
|
|||||||
return `local_image_${serviceName}:latest`;
|
return `local_image_${serviceName}:latest`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateTargetState(
|
export function generateTargetState(
|
||||||
currentTargetState: any,
|
currentTargetState: any,
|
||||||
composition: Composition,
|
composition: Composition,
|
||||||
): any {
|
): any {
|
||||||
|
@ -22,6 +22,15 @@ export class BuildError extends TypedError {
|
|||||||
});
|
});
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public getServiceError(serviceName: string): string {
|
||||||
|
const failure = _.find(this.failures, f => f.serviceName === serviceName);
|
||||||
|
if (failure == null) {
|
||||||
|
return 'Unknown build failure';
|
||||||
|
}
|
||||||
|
|
||||||
|
return failure.error.message;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class DeviceAPIError extends TypedError {}
|
export class DeviceAPIError extends TypedError {}
|
||||||
|
409
lib/utils/device/live.ts
Normal file
409
lib/utils/device/live.ts
Normal file
@ -0,0 +1,409 @@
|
|||||||
|
import * as Bluebird from 'bluebird';
|
||||||
|
import * as chokidar from 'chokidar';
|
||||||
|
import * as Dockerode from 'dockerode';
|
||||||
|
import Livepush from 'livepush';
|
||||||
|
import * as _ from 'lodash';
|
||||||
|
import * as path from 'path';
|
||||||
|
import { Composition } from 'resin-compose-parse';
|
||||||
|
import { BuildTask } from 'resin-multibuild';
|
||||||
|
|
||||||
|
import Logger = require('../logger');
|
||||||
|
|
||||||
|
import DeviceAPI, { DeviceInfo, Status } from './api';
|
||||||
|
import {
|
||||||
|
DeviceDeployOptions,
|
||||||
|
generateTargetState,
|
||||||
|
rebuildSingleTask,
|
||||||
|
} from './deploy';
|
||||||
|
import { BuildError } from './errors';
|
||||||
|
|
||||||
|
// How often do we want to check the device state
|
||||||
|
// engine has settled (delay in ms)
|
||||||
|
const DEVICE_STATUS_SETTLE_CHECK_INTERVAL = 1000;
|
||||||
|
|
||||||
|
interface MonitoredContainer {
|
||||||
|
context: string;
|
||||||
|
livepush: Livepush;
|
||||||
|
monitor: chokidar.FSWatcher;
|
||||||
|
containerId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ContextEvent {
|
||||||
|
type: 'add' | 'change' | 'unlink';
|
||||||
|
filename: string;
|
||||||
|
serviceName: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
type BuildLogs = Dictionary<string>;
|
||||||
|
type StageImageIDs = Dictionary<string[]>;
|
||||||
|
|
||||||
|
export interface LivepushOpts {
|
||||||
|
buildContext: string;
|
||||||
|
composition: Composition;
|
||||||
|
buildTasks: BuildTask[];
|
||||||
|
docker: Dockerode;
|
||||||
|
api: DeviceAPI;
|
||||||
|
logger: Logger;
|
||||||
|
buildLogs: BuildLogs;
|
||||||
|
deployOpts: DeviceDeployOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LivepushManager {
|
||||||
|
private lastDeviceStatus: Status | null = null;
|
||||||
|
private containers: Dictionary<MonitoredContainer> = {};
|
||||||
|
private dockerfilePaths: Dictionary<string[]> = {};
|
||||||
|
private deviceInfo: DeviceInfo;
|
||||||
|
private deployOpts: DeviceDeployOptions;
|
||||||
|
|
||||||
|
private buildContext: string;
|
||||||
|
private composition: Composition;
|
||||||
|
private buildTasks: BuildTask[];
|
||||||
|
private docker: Dockerode;
|
||||||
|
private api: DeviceAPI;
|
||||||
|
private logger: Logger;
|
||||||
|
private imageIds: StageImageIDs;
|
||||||
|
|
||||||
|
public constructor(opts: LivepushOpts) {
|
||||||
|
this.buildContext = opts.buildContext;
|
||||||
|
this.composition = opts.composition;
|
||||||
|
this.buildTasks = opts.buildTasks;
|
||||||
|
this.docker = opts.docker;
|
||||||
|
this.api = opts.api;
|
||||||
|
this.logger = opts.logger;
|
||||||
|
this.deployOpts = opts.deployOpts;
|
||||||
|
this.imageIds = LivepushManager.getMultistageImageIDs(opts.buildLogs);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async init(): Promise<void> {
|
||||||
|
this.deviceInfo = await this.api.getDeviceInformation();
|
||||||
|
this.logger.logLivepush('Waiting for device state to settle...');
|
||||||
|
// The first thing we need to do is let the state 'settle',
|
||||||
|
// so that all of the containers are running and ready to
|
||||||
|
// be livepush'd into
|
||||||
|
await this.awaitDeviceStateSettle();
|
||||||
|
// Split the composition into a load of differents paths which we can
|
||||||
|
// create livepush instances for
|
||||||
|
|
||||||
|
for (const serviceName of _.keys(this.composition.services)) {
|
||||||
|
const service = this.composition.services[serviceName];
|
||||||
|
const buildTask = _.find(this.buildTasks, { serviceName });
|
||||||
|
|
||||||
|
if (buildTask == null) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not find a build task for service: ${serviceName}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// We only care about builds
|
||||||
|
if (service.build != null) {
|
||||||
|
const context = path.join(this.buildContext, service.build.context);
|
||||||
|
const dockerfile = buildTask.dockerfile;
|
||||||
|
if (dockerfile == null) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not detect dockerfile for service: ${serviceName}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (buildTask.dockerfilePath == null) {
|
||||||
|
// this is a bit of a hack as resin-bundle-resolve
|
||||||
|
// does not always export the dockerfilePath, this
|
||||||
|
// only happens when the dockerfile path is
|
||||||
|
// specified differently - this should be patched
|
||||||
|
// in resin-bundle-resolve
|
||||||
|
this.dockerfilePaths[
|
||||||
|
buildTask.serviceName
|
||||||
|
] = this.getDockerfilePathFromTask(buildTask);
|
||||||
|
} else {
|
||||||
|
this.dockerfilePaths[buildTask.serviceName] = [
|
||||||
|
buildTask.dockerfilePath,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the containerId from the device state
|
||||||
|
const container = _.find(this.lastDeviceStatus!.containers, {
|
||||||
|
serviceName,
|
||||||
|
});
|
||||||
|
if (container == null) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not find a container on device for service: ${serviceName}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const log = (msg: string) => {
|
||||||
|
this.logger.logLivepush(`[service ${serviceName}] ${msg}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const livepush = await Livepush.init(
|
||||||
|
dockerfile,
|
||||||
|
context,
|
||||||
|
container.containerId,
|
||||||
|
this.imageIds[serviceName],
|
||||||
|
this.docker,
|
||||||
|
);
|
||||||
|
|
||||||
|
livepush.on('commandExecute', command =>
|
||||||
|
log(`Executing command: \`${command.command}\``),
|
||||||
|
);
|
||||||
|
livepush.on('commandOutput', output =>
|
||||||
|
log(` ${output.output.data.toString()}`),
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO: Memoize this for containers which share a context
|
||||||
|
const monitor = chokidar.watch('.', {
|
||||||
|
cwd: context,
|
||||||
|
ignoreInitial: true,
|
||||||
|
});
|
||||||
|
monitor.on('add', (changedPath: string) =>
|
||||||
|
this.handleFSEvent({
|
||||||
|
filename: changedPath,
|
||||||
|
type: 'add',
|
||||||
|
serviceName,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
monitor.on('change', (changedPath: string) =>
|
||||||
|
this.handleFSEvent({
|
||||||
|
filename: changedPath,
|
||||||
|
type: 'change',
|
||||||
|
serviceName,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
monitor.on('unlink', (changedPath: string) =>
|
||||||
|
this.handleFSEvent({
|
||||||
|
filename: changedPath,
|
||||||
|
type: 'unlink',
|
||||||
|
serviceName,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
this.containers[serviceName] = {
|
||||||
|
livepush,
|
||||||
|
context,
|
||||||
|
monitor,
|
||||||
|
containerId: container.containerId,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup cleanup handlers for the device
|
||||||
|
|
||||||
|
// This is necessary because the `exit-hook` module is used by several
|
||||||
|
// dependencies, and will exit without calling the following handler.
|
||||||
|
// Once https://github.com/balena-io/balena-cli/issues/867 has been solved,
|
||||||
|
// we are free to (and definitely should) remove the below line
|
||||||
|
process.removeAllListeners('SIGINT');
|
||||||
|
process.on('SIGINT', async () => {
|
||||||
|
this.logger.logLivepush('Cleaning up device...');
|
||||||
|
await Promise.all(
|
||||||
|
_.map(this.containers, container => {
|
||||||
|
container.livepush.cleanupIntermediateContainers();
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static getMultistageImageIDs(buildLogs: BuildLogs): StageImageIDs {
|
||||||
|
const stageIds: StageImageIDs = {};
|
||||||
|
_.each(buildLogs, (log, serviceName) => {
|
||||||
|
stageIds[serviceName] = [];
|
||||||
|
|
||||||
|
const lines = log.split(/\r?\n/);
|
||||||
|
let lastArrowMessage: string | undefined;
|
||||||
|
for (const line of lines) {
|
||||||
|
// If this was a from line, take the last found
|
||||||
|
// image id and save it
|
||||||
|
if (
|
||||||
|
/step \d+(?:\/\d+)?\s*:\s*FROM/i.test(line) &&
|
||||||
|
lastArrowMessage != null
|
||||||
|
) {
|
||||||
|
stageIds[serviceName].push(lastArrowMessage);
|
||||||
|
} else {
|
||||||
|
const msg = LivepushManager.extractDockerArrowMessage(line);
|
||||||
|
if (msg != null) {
|
||||||
|
lastArrowMessage = msg;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return stageIds;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async awaitDeviceStateSettle(): Promise<void> {
|
||||||
|
// Cache the state to avoid unnecessary cals
|
||||||
|
this.lastDeviceStatus = await this.api.getStatus();
|
||||||
|
|
||||||
|
if (this.lastDeviceStatus.appState === 'applied') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.logDebug(
|
||||||
|
`Device state not settled, retrying in ${DEVICE_STATUS_SETTLE_CHECK_INTERVAL}ms`,
|
||||||
|
);
|
||||||
|
await Bluebird.delay(DEVICE_STATUS_SETTLE_CHECK_INTERVAL);
|
||||||
|
await this.awaitDeviceStateSettle();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleFSEvent(fsEvent: ContextEvent): Promise<void> {
|
||||||
|
this.logger.logDebug(
|
||||||
|
`Got a filesystem event for service: ${
|
||||||
|
fsEvent.serviceName
|
||||||
|
}. Event: ${JSON.stringify(fsEvent)}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// First we detect if the file changed is the Dockerfile
|
||||||
|
// used to build the service
|
||||||
|
if (
|
||||||
|
_.some(
|
||||||
|
this.dockerfilePaths[fsEvent.serviceName],
|
||||||
|
name => name === fsEvent.filename,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
if (fsEvent.type !== 'change') {
|
||||||
|
throw new Error(`Deletion or addition of Dockerfiles not supported`);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.logLivepush(
|
||||||
|
`Detected Dockerfile change, performing full rebuild of service ${
|
||||||
|
fsEvent.serviceName
|
||||||
|
}`,
|
||||||
|
);
|
||||||
|
await this.handleServiceRebuild(fsEvent.serviceName);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let updates: string[] = [];
|
||||||
|
let deletes: string[] = [];
|
||||||
|
switch (fsEvent.type) {
|
||||||
|
case 'add':
|
||||||
|
case 'change':
|
||||||
|
updates = [fsEvent.filename];
|
||||||
|
break;
|
||||||
|
case 'unlink':
|
||||||
|
deletes = [fsEvent.filename];
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown event: ${fsEvent.type}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Work out if we need to perform any changes on this container
|
||||||
|
const livepush = this.containers[fsEvent.serviceName].livepush;
|
||||||
|
|
||||||
|
this.logger.logLivepush(
|
||||||
|
`Detected changes for container ${fsEvent.serviceName}, updating...`,
|
||||||
|
);
|
||||||
|
await livepush.performLivepush(updates, deletes);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleServiceRebuild(serviceName: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
const buildTask = _.find(this.buildTasks, { serviceName });
|
||||||
|
if (buildTask == null) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not find a build task for service ${serviceName}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let buildLog: string;
|
||||||
|
try {
|
||||||
|
buildLog = await rebuildSingleTask(
|
||||||
|
serviceName,
|
||||||
|
this.docker,
|
||||||
|
this.logger,
|
||||||
|
this.deviceInfo,
|
||||||
|
this.composition,
|
||||||
|
this.buildContext,
|
||||||
|
this.deployOpts,
|
||||||
|
);
|
||||||
|
} catch (e) {
|
||||||
|
if (!(e instanceof BuildError)) {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.logError(
|
||||||
|
`Rebuild of service ${serviceName} failed!\n Error: ${e.getServiceError(
|
||||||
|
serviceName,
|
||||||
|
)}`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: The code below is quite roundabout, and instead
|
||||||
|
// we'd prefer just to call a supervisor endpoint which
|
||||||
|
// recreates a container, but that doesn't exist yet
|
||||||
|
|
||||||
|
// First we request the current target state
|
||||||
|
const currentState = await this.api.getTargetState();
|
||||||
|
|
||||||
|
// Then we generate a target state without the service
|
||||||
|
// we rebuilt
|
||||||
|
const comp = _.cloneDeep(this.composition);
|
||||||
|
delete comp.services[serviceName];
|
||||||
|
const intermediateState = generateTargetState(currentState, comp);
|
||||||
|
await this.api.setTargetState(intermediateState);
|
||||||
|
|
||||||
|
// Now we wait for the device state to settle
|
||||||
|
await this.awaitDeviceStateSettle();
|
||||||
|
|
||||||
|
// And re-set the target state
|
||||||
|
await this.api.setTargetState(
|
||||||
|
generateTargetState(currentState, this.composition),
|
||||||
|
);
|
||||||
|
|
||||||
|
await this.awaitDeviceStateSettle();
|
||||||
|
|
||||||
|
const instance = this.containers[serviceName];
|
||||||
|
// Get the new container
|
||||||
|
const container = _.find(this.lastDeviceStatus!.containers, {
|
||||||
|
serviceName,
|
||||||
|
});
|
||||||
|
if (container == null) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not find new container for service ${serviceName}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const buildLogs: Dictionary<string> = {};
|
||||||
|
buildLogs[serviceName] = buildLog;
|
||||||
|
const stageImages = LivepushManager.getMultistageImageIDs(buildLogs);
|
||||||
|
|
||||||
|
instance.livepush = await Livepush.init(
|
||||||
|
buildTask.dockerfile!,
|
||||||
|
buildTask.context!,
|
||||||
|
container.containerId,
|
||||||
|
stageImages[serviceName],
|
||||||
|
this.docker,
|
||||||
|
);
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.logError(`There was an error rebuilding the service: ${e}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static extractDockerArrowMessage(
|
||||||
|
outputLine: string,
|
||||||
|
): string | undefined {
|
||||||
|
const arrowTest = /^.*\s*-+>\s*(.+)/i;
|
||||||
|
const match = arrowTest.exec(outputLine);
|
||||||
|
if (match != null) {
|
||||||
|
return match[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private getDockerfilePathFromTask(task: BuildTask): string[] {
|
||||||
|
switch (task.projectType) {
|
||||||
|
case 'Standard Dockerfile':
|
||||||
|
return ['Dockerfile'];
|
||||||
|
case 'Dockerfile.template':
|
||||||
|
return ['Dockerfile.template'];
|
||||||
|
case 'Architecture-specific Dockerfile':
|
||||||
|
return [
|
||||||
|
`Dockerfile.${this.deviceInfo.arch}`,
|
||||||
|
`Dockerfile.${this.deviceInfo.deviceType}`,
|
||||||
|
];
|
||||||
|
default:
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default LivepushManager;
|
@ -28,6 +28,7 @@ class Logger {
|
|||||||
warn: NodeJS.ReadWriteStream;
|
warn: NodeJS.ReadWriteStream;
|
||||||
error: NodeJS.ReadWriteStream;
|
error: NodeJS.ReadWriteStream;
|
||||||
logs: NodeJS.ReadWriteStream;
|
logs: NodeJS.ReadWriteStream;
|
||||||
|
livepush: NodeJS.ReadWriteStream;
|
||||||
};
|
};
|
||||||
|
|
||||||
public formatMessage: (name: string, message: string) => string;
|
public formatMessage: (name: string, message: string) => string;
|
||||||
@ -41,6 +42,7 @@ class Logger {
|
|||||||
logger.addPrefix('warn', chalk.yellow('[Warn]'));
|
logger.addPrefix('warn', chalk.yellow('[Warn]'));
|
||||||
logger.addPrefix('error', chalk.red('[Error]'));
|
logger.addPrefix('error', chalk.red('[Error]'));
|
||||||
logger.addPrefix('logs', chalk.green('[Logs]'));
|
logger.addPrefix('logs', chalk.green('[Logs]'));
|
||||||
|
logger.addPrefix('live', chalk.yellow('[Live]'));
|
||||||
|
|
||||||
this.streams = {
|
this.streams = {
|
||||||
build: logger.createLogStream('build'),
|
build: logger.createLogStream('build'),
|
||||||
@ -50,6 +52,7 @@ class Logger {
|
|||||||
warn: logger.createLogStream('warn'),
|
warn: logger.createLogStream('warn'),
|
||||||
error: logger.createLogStream('error'),
|
error: logger.createLogStream('error'),
|
||||||
logs: logger.createLogStream('logs'),
|
logs: logger.createLogStream('logs'),
|
||||||
|
livepush: logger.createLogStream('live'),
|
||||||
};
|
};
|
||||||
|
|
||||||
_.forEach(this.streams, function(stream, key) {
|
_.forEach(this.streams, function(stream, key) {
|
||||||
@ -88,6 +91,10 @@ class Logger {
|
|||||||
public logLogs(msg: string) {
|
public logLogs(msg: string) {
|
||||||
return this.streams.logs.write(msg + eol);
|
return this.streams.logs.write(msg + eol);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public logLivepush(msg: string) {
|
||||||
|
return this.streams.livepush.write(msg + eol);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export = Logger;
|
export = Logger;
|
||||||
|
@ -65,6 +65,7 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/archiver": "2.1.2",
|
"@types/archiver": "2.1.2",
|
||||||
"@types/bluebird": "3.5.21",
|
"@types/bluebird": "3.5.21",
|
||||||
|
"@types/chokidar": "^1.7.5",
|
||||||
"@types/common-tags": "1.4.0",
|
"@types/common-tags": "1.4.0",
|
||||||
"@types/dockerode": "2.5.5",
|
"@types/dockerode": "2.5.5",
|
||||||
"@types/es6-promise": "0.0.32",
|
"@types/es6-promise": "0.0.32",
|
||||||
@ -119,6 +120,7 @@
|
|||||||
"body-parser": "^1.14.1",
|
"body-parser": "^1.14.1",
|
||||||
"capitano": "^1.9.0",
|
"capitano": "^1.9.0",
|
||||||
"chalk": "^2.3.0",
|
"chalk": "^2.3.0",
|
||||||
|
"chokidar": "^2.0.4",
|
||||||
"cli-truncate": "^1.1.0",
|
"cli-truncate": "^1.1.0",
|
||||||
"coffeescript": "^1.12.6",
|
"coffeescript": "^1.12.6",
|
||||||
"color-hash": "^1.0.3",
|
"color-hash": "^1.0.3",
|
||||||
@ -143,6 +145,7 @@
|
|||||||
"is-root": "^1.0.0",
|
"is-root": "^1.0.0",
|
||||||
"js-yaml": "^3.10.0",
|
"js-yaml": "^3.10.0",
|
||||||
"klaw": "^3.0.0",
|
"klaw": "^3.0.0",
|
||||||
|
"livepush": "^1.1.3",
|
||||||
"lodash": "^4.17.4",
|
"lodash": "^4.17.4",
|
||||||
"minimatch": "^3.0.4",
|
"minimatch": "^3.0.4",
|
||||||
"mixpanel": "^0.10.1",
|
"mixpanel": "^0.10.1",
|
||||||
|
3
typings/global.d.ts
vendored
Normal file
3
typings/global.d.ts
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
interface Dictionary<T> {
|
||||||
|
[key: string]: T;
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user