mirror of
https://github.com/balena-io/balena-cli.git
synced 2024-12-18 21:27:51 +00:00
Add livepush ability to balena push
Change-type: minor Signed-off-by: Cameron Diver <cameron@balena.io>
This commit is contained in:
parent
cf2ad66955
commit
6a9a9e1fdb
@ -1445,6 +1445,11 @@ Don't use cache when building this project
|
||||
|
||||
Path to a local YAML or JSON file containing Docker registry passwords used to pull base images
|
||||
|
||||
#### --live, -l
|
||||
|
||||
Start a live session after the push, which will wait for code changes, and synchronise them with
|
||||
running containers. This mode is only valid when pushing to a local device.
|
||||
|
||||
# Settings
|
||||
|
||||
## settings
|
||||
|
@ -106,6 +106,7 @@ export const push: CommandDefinition<
|
||||
emulated: boolean;
|
||||
nocache: boolean;
|
||||
'registry-secrets': string;
|
||||
live: boolean;
|
||||
}
|
||||
> = {
|
||||
signature: 'push <applicationOrDevice>',
|
||||
@ -165,6 +166,14 @@ export const push: CommandDefinition<
|
||||
description: stripIndent`
|
||||
Path to a local YAML or JSON file containing Docker registry passwords used to pull base images`,
|
||||
},
|
||||
{
|
||||
signature: 'live',
|
||||
alias: 'l',
|
||||
boolean: true,
|
||||
description: stripIndent`
|
||||
Start a live session after the push, which will wait for code changes, and synchronise them with
|
||||
running containers. This mode is only valid when pushing to a local device.`,
|
||||
},
|
||||
],
|
||||
async action(params, options, done) {
|
||||
const sdk = (await import('balena-sdk')).fromSharedOptions();
|
||||
@ -194,6 +203,13 @@ export const push: CommandDefinition<
|
||||
const buildTarget = getBuildTarget(appOrDevice);
|
||||
switch (buildTarget) {
|
||||
case BuildTarget.Cloud:
|
||||
// Ensure that the live argument has not been passed to a cloud build
|
||||
if (options.live) {
|
||||
exitWithExpectedError(
|
||||
'The --live flag is only valid when pushing to a local device.',
|
||||
);
|
||||
}
|
||||
|
||||
const app = appOrDevice;
|
||||
await exitIfNotLoggedIn();
|
||||
await Bluebird.join(
|
||||
@ -229,6 +245,7 @@ export const push: CommandDefinition<
|
||||
deviceHost: device,
|
||||
registrySecrets,
|
||||
nocache: options.nocache || false,
|
||||
live: options.live || false,
|
||||
}),
|
||||
)
|
||||
.catch(BuildError, e => {
|
||||
|
@ -32,6 +32,7 @@ import { makeBuildTasks } from '../compose_ts';
|
||||
import Logger = require('../logger');
|
||||
import { DeviceInfo } from './api';
|
||||
import * as LocalPushErrors from './errors';
|
||||
import LivepushManager from './live';
|
||||
import { displayBuildLog } from './logs';
|
||||
|
||||
// Define the logger here so the debug output
|
||||
@ -44,6 +45,7 @@ export interface DeviceDeployOptions {
|
||||
devicePort?: number;
|
||||
registrySecrets: RegistrySecrets;
|
||||
nocache: boolean;
|
||||
live: boolean;
|
||||
}
|
||||
|
||||
async function checkSource(source: string): Promise<boolean> {
|
||||
@ -66,6 +68,7 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
||||
|
||||
// First check that we can access the device with a ping
|
||||
try {
|
||||
globalLogger.logDebug('Checking we can access device');
|
||||
await api.ping();
|
||||
} catch (e) {
|
||||
exitWithExpectedError(
|
||||
@ -82,9 +85,18 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
||||
|
||||
try {
|
||||
const version = await api.getVersion();
|
||||
globalLogger.logDebug(`Checking device version: ${version}`);
|
||||
if (!semver.satisfies(version, '>=7.21.4')) {
|
||||
exitWithExpectedError(versionError);
|
||||
}
|
||||
// FIXME: DO NOT MERGE until this version number has been updated
|
||||
// with the version which the following PR ends up in the supervisor
|
||||
// https://github.com/balena-io/balena-supervisor/pull/828
|
||||
if (opts.live && !semver.satisfies(version, '>=1.0.0')) {
|
||||
exitWithExpectedError(
|
||||
new Error('Using livepush requires a supervisor >= v1.0.0'),
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
exitWithExpectedError(versionError);
|
||||
}
|
||||
@ -104,13 +116,18 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
||||
// Try to detect the device information
|
||||
const deviceInfo = await api.getDeviceInformation();
|
||||
|
||||
await performBuilds(
|
||||
let buildLogs: Dictionary<string> | undefined;
|
||||
if (opts.live) {
|
||||
buildLogs = {};
|
||||
}
|
||||
const buildTasks = await performBuilds(
|
||||
project.composition,
|
||||
tarStream,
|
||||
docker,
|
||||
deviceInfo,
|
||||
globalLogger,
|
||||
opts,
|
||||
buildLogs,
|
||||
);
|
||||
|
||||
globalLogger.logDebug('Setting device state...');
|
||||
@ -133,6 +150,23 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
||||
// Now all we need to do is stream back the logs
|
||||
const logStream = await api.getLogStream();
|
||||
|
||||
// Now that we've set the target state, the device will do it's thing
|
||||
// so we can either just display the logs, or start a livepush session
|
||||
// (whilst also display logs)
|
||||
if (opts.live) {
|
||||
const livepush = new LivepushManager({
|
||||
api,
|
||||
buildContext: opts.source,
|
||||
buildTasks,
|
||||
docker,
|
||||
logger: globalLogger,
|
||||
composition: project.composition,
|
||||
buildLogs: buildLogs!,
|
||||
});
|
||||
|
||||
globalLogger.logLivepush('Watching for file changes...');
|
||||
await livepush.init();
|
||||
}
|
||||
await displayDeviceLogs(logStream, globalLogger);
|
||||
}
|
||||
|
||||
@ -151,7 +185,8 @@ export async function performBuilds(
|
||||
deviceInfo: DeviceInfo,
|
||||
logger: Logger,
|
||||
opts: DeviceDeployOptions,
|
||||
): Promise<void> {
|
||||
buildLogs?: Dictionary<string>,
|
||||
): Promise<BuildTask[]> {
|
||||
const multibuild = await import('resin-multibuild');
|
||||
|
||||
const buildTasks = await makeBuildTasks(
|
||||
@ -165,7 +200,7 @@ export async function performBuilds(
|
||||
await assignDockerBuildOpts(docker, buildTasks, opts);
|
||||
|
||||
logger.logDebug('Starting builds...');
|
||||
await assignOutputHandlers(buildTasks, logger);
|
||||
await assignOutputHandlers(buildTasks, logger, buildLogs);
|
||||
const localImages = await multibuild.performBuilds(buildTasks, docker);
|
||||
|
||||
// Check for failures
|
||||
@ -184,9 +219,15 @@ export async function performBuilds(
|
||||
await image.remove({ force: true });
|
||||
}
|
||||
});
|
||||
|
||||
return buildTasks;
|
||||
}
|
||||
|
||||
function assignOutputHandlers(buildTasks: BuildTask[], logger: Logger) {
|
||||
function assignOutputHandlers(
|
||||
buildTasks: BuildTask[],
|
||||
logger: Logger,
|
||||
buildLogs?: Dictionary<string>,
|
||||
) {
|
||||
_.each(buildTasks, task => {
|
||||
if (task.external) {
|
||||
task.progressHook = progressObj => {
|
||||
@ -196,6 +237,9 @@ function assignOutputHandlers(buildTasks: BuildTask[], logger: Logger) {
|
||||
);
|
||||
};
|
||||
} else {
|
||||
if (buildLogs) {
|
||||
buildLogs[task.serviceName] = '';
|
||||
}
|
||||
task.streamHook = stream => {
|
||||
stream.on('data', (buf: Buffer) => {
|
||||
const str = _.trimEnd(buf.toString());
|
||||
@ -204,6 +248,12 @@ function assignOutputHandlers(buildTasks: BuildTask[], logger: Logger) {
|
||||
{ serviceName: task.serviceName, message: str },
|
||||
logger,
|
||||
);
|
||||
|
||||
if (buildLogs) {
|
||||
buildLogs[task.serviceName] = `${
|
||||
buildLogs[task.serviceName]
|
||||
}\n${str}`;
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
244
lib/utils/device/live.ts
Normal file
244
lib/utils/device/live.ts
Normal file
@ -0,0 +1,244 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as chokidar from 'chokidar';
|
||||
import * as Dockerode from 'dockerode';
|
||||
import Livepush from 'livepush';
|
||||
import * as _ from 'lodash';
|
||||
import * as path from 'path';
|
||||
import { Composition } from 'resin-compose-parse';
|
||||
import { BuildTask } from 'resin-multibuild';
|
||||
|
||||
import Logger = require('../logger');
|
||||
|
||||
import DeviceAPI, { Status } from './api';
|
||||
|
||||
// How often do we want to check the device state
|
||||
// engine has settled (delay in ms)
|
||||
const DEVICE_STATUS_SETTLE_CHECK_INTERVAL = 500;
|
||||
|
||||
interface MonitoredContainer {
|
||||
context: string;
|
||||
livepush: Livepush;
|
||||
monitor: chokidar.FSWatcher;
|
||||
}
|
||||
|
||||
interface ContextEvent {
|
||||
type: 'add' | 'change' | 'unlink';
|
||||
filename: string;
|
||||
serviceName: string;
|
||||
}
|
||||
|
||||
type BuildLogs = Dictionary<string>;
|
||||
type StageImageIDs = Dictionary<string[]>;
|
||||
|
||||
export interface LivepushOpts {
|
||||
buildContext: string;
|
||||
composition: Composition;
|
||||
buildTasks: BuildTask[];
|
||||
docker: Dockerode;
|
||||
api: DeviceAPI;
|
||||
logger: Logger;
|
||||
buildLogs: BuildLogs;
|
||||
}
|
||||
|
||||
export class LivepushManager {
|
||||
private lastDeviceStatus: Status | null = null;
|
||||
private containers: Dictionary<MonitoredContainer> = {};
|
||||
|
||||
private buildContext: string;
|
||||
private composition: Composition;
|
||||
private buildTasks: BuildTask[];
|
||||
private docker: Dockerode;
|
||||
private api: DeviceAPI;
|
||||
private logger: Logger;
|
||||
private imageIds: StageImageIDs;
|
||||
|
||||
public constructor(opts: LivepushOpts) {
|
||||
this.buildContext = opts.buildContext;
|
||||
this.composition = opts.composition;
|
||||
this.buildTasks = opts.buildTasks;
|
||||
this.docker = opts.docker;
|
||||
this.api = opts.api;
|
||||
this.logger = opts.logger;
|
||||
this.imageIds = LivepushManager.getMultistageImageIDs(opts.buildLogs);
|
||||
}
|
||||
|
||||
public async init(): Promise<void> {
|
||||
this.logger.logLivepush('Waiting for device state to settle...');
|
||||
// The first thing we need to do is let the state 'settle',
|
||||
// so that all of the containers are running and ready to
|
||||
// be livepush'd into
|
||||
await this.awaitDeviceStateSettle();
|
||||
// Split the composition into a load of differents paths which we can
|
||||
// create livepush instances for
|
||||
|
||||
for (const serviceName of _.keys(this.composition.services)) {
|
||||
const service = this.composition.services[serviceName];
|
||||
const buildTask = _.find(this.buildTasks, { serviceName });
|
||||
|
||||
if (buildTask == null) {
|
||||
throw new Error(
|
||||
`Could not find a build task for service: ${serviceName}`,
|
||||
);
|
||||
}
|
||||
|
||||
// We only care about builds
|
||||
if (service.build != null) {
|
||||
const context = path.join(this.buildContext, service.build.context);
|
||||
const dockerfile = buildTask.dockerfile;
|
||||
if (dockerfile == null) {
|
||||
throw new Error(
|
||||
`Could not detect dockerfile for service: ${serviceName}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Find the containerId from the device state
|
||||
const container = _.find(this.lastDeviceStatus!.containers, {
|
||||
serviceName,
|
||||
});
|
||||
if (container == null) {
|
||||
throw new Error(
|
||||
`Could not find a container on device for service: ${serviceName}`,
|
||||
);
|
||||
}
|
||||
|
||||
const log = (msg: string) => {
|
||||
this.logger.logLivepush(`[service ${serviceName}] ${msg}`);
|
||||
};
|
||||
|
||||
const livepush = await Livepush.init(
|
||||
dockerfile,
|
||||
context,
|
||||
container.containerId,
|
||||
this.imageIds[serviceName],
|
||||
this.docker,
|
||||
);
|
||||
|
||||
livepush.on('commandExecute', command =>
|
||||
log(`Executing command: \`${command.command}\``),
|
||||
);
|
||||
livepush.on('commandOutput', output =>
|
||||
log(` ${output.output.data.toString()}`),
|
||||
);
|
||||
|
||||
// TODO: Memoize this for containers which share a context
|
||||
const monitor = chokidar.watch('.', {
|
||||
cwd: context,
|
||||
ignoreInitial: true,
|
||||
});
|
||||
monitor.on('add', (changedPath: string) =>
|
||||
this.handleFSEvent({
|
||||
filename: changedPath,
|
||||
type: 'add',
|
||||
serviceName,
|
||||
}),
|
||||
);
|
||||
monitor.on('change', (changedPath: string) =>
|
||||
this.handleFSEvent({
|
||||
filename: changedPath,
|
||||
type: 'change',
|
||||
serviceName,
|
||||
}),
|
||||
);
|
||||
monitor.on('unlink', (changedPath: string) =>
|
||||
this.handleFSEvent({
|
||||
filename: changedPath,
|
||||
type: 'unlink',
|
||||
serviceName,
|
||||
}),
|
||||
);
|
||||
this.containers[serviceName] = {
|
||||
livepush,
|
||||
context,
|
||||
monitor,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static getMultistageImageIDs(buildLogs: BuildLogs): StageImageIDs {
|
||||
const stageIds: StageImageIDs = {};
|
||||
_.each(buildLogs, (log, serviceName) => {
|
||||
stageIds[serviceName] = [];
|
||||
|
||||
const lines = log.split(/\r?\n/);
|
||||
let lastArrowMessage: string | undefined;
|
||||
for (const line of lines) {
|
||||
// If this was a from line, take the last found
|
||||
// image id and save it
|
||||
if (
|
||||
/step \d+(?:\/\d+)?\s*:\s*FROM/i.test(line) &&
|
||||
lastArrowMessage != null
|
||||
) {
|
||||
stageIds[serviceName].push(lastArrowMessage);
|
||||
} else {
|
||||
const msg = LivepushManager.extractDockerArrowMessage(line);
|
||||
if (msg != null) {
|
||||
lastArrowMessage = msg;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return stageIds;
|
||||
}
|
||||
|
||||
private async awaitDeviceStateSettle(): Promise<void> {
|
||||
// Cache the state to avoid unnecessary cals
|
||||
this.lastDeviceStatus = await this.api.getStatus();
|
||||
|
||||
if (this.lastDeviceStatus.appState === 'applied') {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.logDebug(
|
||||
`Device state not settled, retrying in ${DEVICE_STATUS_SETTLE_CHECK_INTERVAL}ms`,
|
||||
);
|
||||
await Bluebird.delay(DEVICE_STATUS_SETTLE_CHECK_INTERVAL);
|
||||
await this.awaitDeviceStateSettle();
|
||||
}
|
||||
|
||||
private async handleFSEvent(fsEvent: ContextEvent): Promise<void> {
|
||||
// TODO: If there's a dockerfile event, we must perform a rebuild
|
||||
this.logger.logDebug(
|
||||
`Got a filesystem event for service: ${
|
||||
fsEvent.serviceName
|
||||
}. Event: ${JSON.stringify(fsEvent)}`,
|
||||
);
|
||||
|
||||
let updates: string[] = [];
|
||||
let deletes: string[] = [];
|
||||
switch (fsEvent.type) {
|
||||
case 'add':
|
||||
updates = [fsEvent.filename];
|
||||
break;
|
||||
case 'change':
|
||||
updates = [fsEvent.filename];
|
||||
break;
|
||||
case 'unlink':
|
||||
deletes = [fsEvent.filename];
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown event: ${fsEvent.type}`);
|
||||
}
|
||||
|
||||
// Work out if we need to perform any changes on this container
|
||||
const livepush = this.containers[fsEvent.serviceName].livepush;
|
||||
|
||||
this.logger.logLivepush(
|
||||
`Detected changes for container ${fsEvent.serviceName}, updating...`,
|
||||
);
|
||||
await livepush.performLivepush(updates, deletes);
|
||||
}
|
||||
|
||||
private static extractDockerArrowMessage(
|
||||
outputLine: string,
|
||||
): string | undefined {
|
||||
const arrowTest = /^.*\s*-+>\s*(.+)/i;
|
||||
const match = arrowTest.exec(outputLine);
|
||||
if (match != null) {
|
||||
return match[1];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default LivepushManager;
|
@ -65,6 +65,7 @@
|
||||
"devDependencies": {
|
||||
"@types/archiver": "2.1.2",
|
||||
"@types/bluebird": "3.5.21",
|
||||
"@types/chokidar": "^1.7.5",
|
||||
"@types/common-tags": "1.4.0",
|
||||
"@types/dockerode": "2.5.5",
|
||||
"@types/es6-promise": "0.0.32",
|
||||
@ -119,6 +120,7 @@
|
||||
"body-parser": "^1.14.1",
|
||||
"capitano": "^1.9.0",
|
||||
"chalk": "^2.3.0",
|
||||
"chokidar": "^2.0.4",
|
||||
"cli-truncate": "^1.1.0",
|
||||
"coffeescript": "^1.12.6",
|
||||
"color-hash": "^1.0.3",
|
||||
@ -143,6 +145,7 @@
|
||||
"is-root": "^1.0.0",
|
||||
"js-yaml": "^3.10.0",
|
||||
"klaw": "^3.0.0",
|
||||
"livepush": "^1.1.3",
|
||||
"lodash": "^4.17.4",
|
||||
"minimatch": "^3.0.4",
|
||||
"mixpanel": "^0.10.1",
|
||||
|
3
typings/global.d.ts
vendored
Normal file
3
typings/global.d.ts
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
interface Dictionary<T> {
|
||||
[key: string]: T;
|
||||
}
|
Loading…
Reference in New Issue
Block a user