Merge pull request #2069 from balena-io/1053-buildArgs-compose

Fix `--buildArg` with compose projects; Convert `buildProject` to Typescript
This commit is contained in:
bulldozer-balena[bot] 2020-10-23 10:04:53 +00:00 committed by GitHub
commit 7534042519
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 825 additions and 566 deletions

View File

@ -22,8 +22,8 @@ import * as compose from '../utils/compose';
import type { Application, ApplicationType, BalenaSDK } from 'balena-sdk';
import { dockerignoreHelp, registrySecretsHelp } from '../utils/messages';
import type { ComposeCliFlags, ComposeOpts } from '../utils/compose-types';
import { composeCliFlags } from '../utils/compose_ts';
import type { DockerCliFlags } from '../utils/docker';
import { buildProject, composeCliFlags } from '../utils/compose_ts';
import type { BuildOpts, DockerCliFlags } from '../utils/docker';
import { dockerCliFlags } from '../utils/docker';
interface FlagsDef extends ComposeCliFlags, DockerCliFlags {
@ -219,7 +219,7 @@ ${dockerignoreHelp}
arch: string;
deviceType: string;
buildEmulated: boolean;
buildOpts: any;
buildOpts: BuildOpts;
},
) {
const { loadProject } = await import('../utils/compose_ts');
@ -238,21 +238,21 @@ ${dockerignoreHelp}
);
}
await compose.buildProject(
await buildProject({
docker,
logger,
project.path,
project.name,
project.composition,
opts.arch,
opts.deviceType,
opts.buildEmulated,
opts.buildOpts,
composeOpts.inlineLogs,
composeOpts.convertEol,
composeOpts.dockerfilePath,
composeOpts.nogitignore,
composeOpts.multiDockerignore,
);
projectPath: project.path,
projectName: project.name,
composition: project.composition,
arch: opts.arch,
deviceType: opts.deviceType,
emulated: opts.buildEmulated,
buildOpts: opts.buildOpts,
inlineLogs: composeOpts.inlineLogs,
convertEol: composeOpts.convertEol,
dockerfilePath: composeOpts.dockerfilePath,
nogitignore: composeOpts.nogitignore,
multiDockerignore: composeOpts.multiDockerignore,
});
}
}

View File

@ -16,14 +16,24 @@
*/
import { flags } from '@oclif/command';
import type { ImageDescriptor } from 'resin-compose-parse';
import Command from '../command';
import { ExpectedError } from '../errors';
import { getBalenaSdk, getChalk } from '../utils/lazy';
import { dockerignoreHelp, registrySecretsHelp } from '../utils/messages';
import * as compose from '../utils/compose';
import type { ComposeCliFlags, ComposeOpts } from '../utils/compose-types';
import type {
BuiltImage,
ComposeCliFlags,
ComposeOpts,
} from '../utils/compose-types';
import type { DockerCliFlags } from '../utils/docker';
import { composeCliFlags } from '../utils/compose_ts';
import {
buildProject,
composeCliFlags,
isBuildConfig,
} from '../utils/compose_ts';
import { dockerCliFlags } from '../utils/docker';
import type { Application, ApplicationType, DeviceType } from 'balena-sdk';
@ -214,22 +224,21 @@ ${dockerignoreHelp}
}
// find which services use images that already exist locally
let servicesToSkip = await Promise.all(
project.descriptors.map(async function (d: any) {
let servicesToSkip: string[] = await Promise.all(
project.descriptors.map(async function (d: ImageDescriptor) {
// unconditionally build (or pull) if explicitly requested
if (opts.shouldPerformBuild) {
return d;
return '';
}
try {
await docker
.getImage(
(typeof d.image === 'string' ? d.image : d.image.tag) || '',
)
.getImage((isBuildConfig(d.image) ? d.image.tag : d.image) || '')
.inspect();
return d.serviceName;
} catch {
// Ignore
return '';
}
}),
);
@ -243,35 +252,35 @@ ${dockerignoreHelp}
compositionToBuild.services,
servicesToSkip,
);
let builtImagesByService: Dictionary<any> = {};
let builtImagesByService: Dictionary<BuiltImage> = {};
if (_.size(compositionToBuild.services) === 0) {
logger.logInfo(
'Everything is up to date (use --build to force a rebuild)',
);
} else {
const builtImages = await compose.buildProject(
const builtImages = await buildProject({
docker,
logger,
project.path,
project.name,
compositionToBuild,
opts.app.arch,
(opts.app?.is_for__device_type as DeviceType[])?.[0].slug,
opts.buildEmulated,
opts.buildOpts,
composeOpts.inlineLogs,
composeOpts.convertEol,
composeOpts.dockerfilePath,
composeOpts.nogitignore,
composeOpts.multiDockerignore,
);
projectPath: project.path,
projectName: project.name,
composition: compositionToBuild,
arch: opts.app.arch,
deviceType: (opts.app?.is_for__device_type as DeviceType[])?.[0].slug,
emulated: opts.buildEmulated,
buildOpts: opts.buildOpts,
inlineLogs: composeOpts.inlineLogs,
convertEol: composeOpts.convertEol,
dockerfilePath: composeOpts.dockerfilePath,
nogitignore: composeOpts.nogitignore,
multiDockerignore: composeOpts.multiDockerignore,
});
builtImagesByService = _.keyBy(builtImages, 'serviceName');
}
const images = project.descriptors.map(
const images: BuiltImage[] = project.descriptors.map(
(d) =>
builtImagesByService[d.serviceName] ?? {
serviceName: d.serviceName,
name: typeof d.image === 'string' ? d.image : d.image.tag,
name: (isBuildConfig(d.image) ? d.image.tag : d.image) || '',
logs: 'Build skipped; image for service already exists.',
props: {},
},

View File

@ -30,6 +30,8 @@ export interface BuiltImage {
dockerfile?: string;
projectType?: string;
size?: number;
startTime?: Date;
endTime?: Date;
};
serviceName: string;
}
@ -64,7 +66,7 @@ export interface ComposeCliFlags {
'multi-dockerignore': boolean;
nogitignore: boolean;
'noparent-check': boolean;
'registry-secrets'?: string | RegistrySecrets;
'registry-secrets'?: RegistrySecrets;
'convert-eol': boolean;
'noconvert-eol': boolean;
projectName?: string;

View File

@ -85,23 +85,6 @@ export function createProject(composePath, composeStr, projectName = null) {
};
}
/**
* Create a tar stream out of the local filesystem at the given directory,
* while optionally applying file filters such as '.dockerignore' and
* optionally converting text file line endings (CRLF to LF).
* @param {string} dir Source directory
* @param {import('./compose-types').TarDirectoryOptions} param
* @returns {Promise<import('stream').Readable>}
*/
export function tarDirectory(dir, param) {
let { nogitignore = false } = param;
if (nogitignore) {
return require('./compose_ts').tarDirectory(dir, param);
} else {
return originalTarDirectory(dir, param);
}
}
/**
* This is the CLI v10 / v11 "original" tarDirectory function. It is still
* around for the benefit of the `--gitignore` option, but is expected to be
@ -110,7 +93,7 @@ export function tarDirectory(dir, param) {
* @param {import('./compose-types').TarDirectoryOptions} param
* @returns {Promise<import('stream').Readable>}
*/
function originalTarDirectory(dir, param) {
export async function originalTarDirectory(dir, param) {
let {
preFinalizeCallback = null,
convertEol = false,
@ -185,265 +168,6 @@ function originalTarDirectory(dir, param) {
});
}
/**
* @param {string} str
* @param {number} len
* @returns {string}
*/
const truncateString = function (str, len) {
if (str.length < len) {
return str;
}
str = str.slice(0, len);
// return everything up to the last line. this is a cheeky way to avoid
// having to deal with splitting the string midway through some special
// character sequence.
return str.slice(0, str.lastIndexOf('\n'));
};
const LOG_LENGTH_MAX = 512 * 1024; // 512KB
export function buildProject(
docker,
logger,
projectPath,
projectName,
composition,
arch,
deviceType,
emulated,
buildOpts,
inlineLogs,
convertEol,
dockerfilePath,
nogitignore,
multiDockerignore,
) {
const Bluebird = require('bluebird');
const _ = require('lodash');
const humanize = require('humanize');
const compose = require('resin-compose-parse');
const builder = require('resin-multibuild');
const transpose = require('docker-qemu-transpose');
const { BALENA_ENGINE_TMP_PATH } = require('../config');
const {
checkBuildSecretsRequirements,
makeBuildTasks,
} = require('./compose_ts');
const qemu = require('./qemu');
const { toPosixPath } = builder.PathUtils;
logger.logInfo(`Building for ${arch}/${deviceType}`);
const imageDescriptors = compose.parse(composition);
const imageDescriptorsByServiceName = _.keyBy(
imageDescriptors,
'serviceName',
);
let renderer;
if (inlineLogs) {
renderer = new BuildProgressInline(
logger.streams['build'],
imageDescriptors,
);
} else {
const tty = require('./tty')(process.stdout);
renderer = new BuildProgressUI(tty, imageDescriptors);
}
renderer.start();
return Bluebird.resolve(checkBuildSecretsRequirements(docker, projectPath))
.then(() => qemu.installQemuIfNeeded(emulated, logger, arch, docker))
.tap(function (needsQemu) {
if (!needsQemu) {
return;
}
logger.logInfo('Emulation is enabled');
// Copy qemu into all build contexts
return Promise.all(
imageDescriptors.map(function (d) {
if (typeof d.image === 'string' || d.image.context == null) {
return;
}
// external image
return qemu.copyQemu(path.join(projectPath, d.image.context), arch);
}),
);
})
.then((
needsQemu, // Tar up the directory, ready for the build stream
) =>
Bluebird.resolve(
tarDirectory(projectPath, {
composition,
convertEol,
multiDockerignore,
nogitignore,
}),
)
.then((tarStream) =>
makeBuildTasks(
composition,
tarStream,
{ arch, deviceType },
logger,
projectName,
),
)
.map(function (/** @type {any} */ task) {
const d = imageDescriptorsByServiceName[task.serviceName];
// multibuild parses the composition internally so any tags we've
// set before are lost; re-assign them here
task.tag ??= [projectName, task.serviceName].join('_').toLowerCase();
if (typeof d.image !== 'string' && d.image.context != null) {
d.image.tag = task.tag;
}
// configure build opts appropriately
task.dockerOpts ??= {};
_.merge(task.dockerOpts, buildOpts, { t: task.tag });
if (typeof d.image !== 'string') {
/** @type {any} */
const context = d.image.context;
if (context?.args != null) {
task.dockerOpts.buildargs ??= {};
_.merge(task.dockerOpts.buildargs, context.args);
}
}
// Get the service-specific log stream
// Caveat: `multibuild.BuildTask` defines no `logStream` property
// but it's convenient to store it there; it's JS ultimately.
task.logStream = renderer.streams[task.serviceName];
task.logBuffer = [];
// Setup emulation if needed
if (task.external || !needsQemu) {
return [task, null];
}
const binPath = qemu.qemuPathInContext(
path.join(projectPath, task.context ?? ''),
);
if (task.buildStream == null) {
throw new Error(`No buildStream for task '${task.tag}'`);
}
return transpose
.transposeTarStream(
task.buildStream,
{
hostQemuPath: toPosixPath(binPath),
containerQemuPath: `/tmp/${qemu.QEMU_BIN_NAME}`,
qemuFileMode: 0o555,
},
dockerfilePath || undefined,
)
.then((/** @type {any} */ stream) => {
task.buildStream = stream;
})
.return([task, binPath]);
}),
)
.map(function ([task, qemuPath]) {
const captureStream = buildLogCapture(task.external, task.logBuffer);
if (task.external) {
// External image -- there's no build to be performed,
// just follow pull progress.
captureStream.pipe(task.logStream);
task.progressHook = pullProgressAdapter(captureStream);
} else {
task.streamHook = function (stream) {
let rawStream;
stream = createLogStream(stream);
if (qemuPath != null) {
const buildThroughStream = transpose.getBuildThroughStream({
hostQemuPath: toPosixPath(qemuPath),
containerQemuPath: `/tmp/${qemu.QEMU_BIN_NAME}`,
});
rawStream = stream.pipe(buildThroughStream);
} else {
rawStream = stream;
}
// `stream` sends out raw strings in contrast to `task.progressHook`
// where we're given objects. capture these strings as they come
// before we parse them.
return rawStream
.pipe(dropEmptyLinesStream())
.pipe(captureStream)
.pipe(buildProgressAdapter(inlineLogs))
.pipe(task.logStream);
};
}
return task;
})
.then(function (tasks) {
logger.logDebug('Prepared tasks; building...');
return builder
.performBuilds(tasks, docker, BALENA_ENGINE_TMP_PATH)
.then(function (builtImages) {
return Promise.all(
builtImages.map(function (builtImage) {
if (!builtImage.successful) {
/** @type {Error & {serviceName?: string}} */
const error = builtImage.error ?? new Error();
error.serviceName = builtImage.serviceName;
throw error;
}
const d = imageDescriptorsByServiceName[builtImage.serviceName];
const task = _.find(tasks, {
serviceName: builtImage.serviceName,
});
const image = {
serviceName: d.serviceName,
name: typeof d.image === 'string' ? d.image : d.image.tag,
logs: truncateString(task.logBuffer.join('\n'), LOG_LENGTH_MAX),
props: {
dockerfile: builtImage.dockerfile,
projectType: builtImage.projectType,
},
};
// Times here are timestamps, so test whether they're null
// before creating a date out of them, as `new Date(null)`
// creates a date representing UNIX time 0.
if (builtImage.startTime) {
image.props.startTime = new Date(builtImage.startTime);
}
if (builtImage.endTime) {
image.props.endTime = new Date(builtImage.endTime);
}
return docker
.getImage(image.name)
.inspect()
.get('Size')
.then((size) => {
image.props.size = size;
})
.return(image);
}),
);
})
.then(function (images) {
const summary = _(images)
.map(({ serviceName, props }) => [
serviceName,
`Image size: ${humanize.filesize(props.size)}`,
])
.fromPairs()
.value();
renderer.end(summary);
return images;
});
})
.finally(renderer.end);
}
/**
* @param {string} apiEndpoint
* @param {string} auth
@ -641,8 +365,7 @@ export const pushAndUpdateServiceImages = function (
Promise.all(
images.map(({ serviceImage, localImage, props, logs }, index) =>
Promise.all([
// @ts-ignore
localImage.inspect().get('Size'),
localImage.inspect().then((img) => img.Size),
retry(
// @ts-ignore
() => progress.push(localImage.name, reporters[index], opts),
@ -708,102 +431,7 @@ var pushProgressRenderer = function (tty, prefix) {
return fn;
};
var createLogStream = function (input) {
const split = require('split');
const stripAnsi = require('strip-ansi-stream');
return input.pipe(stripAnsi()).pipe(split());
};
var dropEmptyLinesStream = function () {
const through = require('through2');
return through(function (data, _enc, cb) {
const str = data.toString('utf-8');
if (str.trim()) {
this.push(str);
}
return cb();
});
};
var buildLogCapture = function (objectMode, buffer) {
const through = require('through2');
return through({ objectMode }, function (data, _enc, cb) {
// data from pull stream
if (data.error) {
buffer.push(`${data.error}`);
} else if (data.progress && data.status) {
buffer.push(`${data.progress}% ${data.status}`);
} else if (data.status) {
buffer.push(`${data.status}`);
// data from build stream
} else {
buffer.push(data);
}
return cb(null, data);
});
};
var buildProgressAdapter = function (inline) {
const through = require('through2');
const stepRegex = /^\s*Step\s+(\d+)\/(\d+)\s*: (.+)$/;
let step = null;
let numSteps = null;
let progress;
return through({ objectMode: true }, function (str, _enc, cb) {
if (str == null) {
return cb(null, str);
}
if (inline) {
return cb(null, { status: str });
}
if (/^Successfully tagged /.test(str)) {
progress = undefined;
} else {
const match = stepRegex.exec(str);
if (match) {
step = match[1];
numSteps ??= match[2];
str = match[3];
}
if (step != null) {
str = `Step ${step}/${numSteps}: ${str}`;
progress = Math.floor(
(parseInt(step, 10) * 100) / parseInt(numSteps, 10),
);
}
}
return cb(null, { status: str, progress });
});
};
var pullProgressAdapter = (outStream) =>
function ({ status, id, percentage, error, errorDetail }) {
if (status != null) {
status = status.replace(/^Status: /, '');
}
if (id != null) {
status = `${id}: ${status}`;
}
if (percentage === 100) {
percentage = undefined;
}
return outStream.write({
status,
progress: percentage,
error: errorDetail?.message ?? error,
});
};
class BuildProgressUI {
export class BuildProgressUI {
constructor(tty, descriptors) {
this._handleEvent = this._handleEvent.bind(this);
this._handleInterrupt = this._handleInterrupt.bind(this);
@ -978,7 +606,7 @@ class BuildProgressUI {
}
}
class BuildProgressInline {
export class BuildProgressInline {
constructor(outStream, descriptors) {
this.start = this.start.bind(this);
this.end = this.end.bind(this);
@ -1029,7 +657,7 @@ class BuildProgressInline {
if (summary != null) {
this._services.forEach((service) => {
this._renderEvent(service, summary[service]);
this._renderEvent(service, { status: summary[service] });
});
}

View File

@ -14,17 +14,23 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { flags } from '@oclif/command';
import { BalenaSDK } from 'balena-sdk';
import type { TransposeOptions } from 'docker-qemu-transpose';
import type * as Dockerode from 'dockerode';
import * as _ from 'lodash';
import { promises as fs } from 'fs';
import * as path from 'path';
import type { Composition } from 'resin-compose-parse';
import type {
BuildConfig,
Composition,
ImageDescriptor,
} from 'resin-compose-parse';
import type * as MultiBuild from 'resin-multibuild';
import type { Readable } from 'stream';
import type { Duplex, Readable } from 'stream';
import type { Pack } from 'tar-stream';
import { ExpectedError } from '../errors';
import { getBalenaSdk, getChalk, stripIndent } from './lazy';
import {
BuiltImage,
ComposeCliFlags,
@ -34,16 +40,9 @@ import {
TaggedImage,
TarDirectoryOptions,
} from './compose-types';
import { DeviceInfo } from './device/api';
import type { DeviceInfo } from './device/api';
import { getBalenaSdk, getChalk, stripIndent } from './lazy';
import Logger = require('./logger');
import { flags } from '@oclif/command';
export interface RegistrySecrets {
[registryAddress: string]: {
username: string;
password: string;
};
}
const exists = async (filename: string) => {
try {
@ -54,8 +53,8 @@ const exists = async (filename: string) => {
}
};
const LOG_LENGTH_MAX = 512 * 1024; // 512KB
const compositionFileNames = ['docker-compose.yml', 'docker-compose.yaml'];
const hr =
'----------------------------------------------------------------------';
@ -131,6 +130,372 @@ async function resolveProject(
return [composeFileName, composeFileContents];
}
interface BuildTaskPlus extends MultiBuild.BuildTask {
logBuffer?: string[];
}
interface Renderer {
start: () => void;
end: (buildSummaryByService?: Dictionary<string>) => void;
streams: Dictionary<NodeJS.ReadWriteStream>;
}
export async function buildProject(opts: {
docker: Dockerode;
logger: Logger;
projectPath: string;
projectName: string;
composition: Composition;
arch: string;
deviceType: string;
emulated: boolean;
buildOpts: import('./docker').BuildOpts;
inlineLogs?: boolean;
convertEol: boolean;
dockerfilePath?: string;
nogitignore: boolean;
multiDockerignore: boolean;
}): Promise<BuiltImage[]> {
const { logger, projectName } = opts;
logger.logInfo(`Building for ${opts.arch}/${opts.deviceType}`);
let buildSummaryByService: Dictionary<string> | undefined;
const compose = await import('resin-compose-parse');
const imageDescriptors = compose.parse(opts.composition);
const imageDescriptorsByServiceName = _.keyBy(
imageDescriptors,
'serviceName',
);
const renderer = await startRenderer({ imageDescriptors, ...opts });
try {
await checkBuildSecretsRequirements(opts.docker, opts.projectPath);
const needsQemu = await installQemuIfNeeded({ ...opts, imageDescriptors });
const tarStream = await tarDirectory(opts.projectPath, opts);
const tasks: BuildTaskPlus[] = await makeBuildTasks(
opts.composition,
tarStream,
opts,
logger,
projectName,
);
setTaskAttributes({ tasks, imageDescriptorsByServiceName, ...opts });
const transposeOptArray: Array<
TransposeOptions | undefined
> = await Promise.all(
tasks.map((task) => {
// Setup emulation if needed
if (needsQemu && !task.external) {
return qemuTransposeBuildStream({ task, ...opts });
}
}),
);
await Promise.all(
// transposeOptions may be undefined. That's OK.
transposeOptArray.map((transposeOptions, index) =>
setTaskProgressHooks({
task: tasks[index],
renderer,
transposeOptions,
...opts,
}),
),
);
logger.logDebug('Prepared tasks; building...');
const { BALENA_ENGINE_TMP_PATH } = await import('../config');
const builder = await import('resin-multibuild');
const builtImages = await builder.performBuilds(
tasks,
opts.docker,
BALENA_ENGINE_TMP_PATH,
);
const [images, summaryMsgByService] = await inspectBuiltImages({
builtImages,
imageDescriptorsByServiceName,
tasks,
...opts,
});
buildSummaryByService = summaryMsgByService;
return images;
} finally {
renderer.end(buildSummaryByService);
}
}
async function startRenderer({
imageDescriptors,
inlineLogs,
logger,
}: {
imageDescriptors: ImageDescriptor[];
inlineLogs?: boolean;
logger: Logger;
}): Promise<Renderer> {
let renderer: Renderer;
if (inlineLogs) {
renderer = new (await import('./compose')).BuildProgressInline(
logger.streams['build'],
imageDescriptors,
);
} else {
const tty = (await import('./tty'))(process.stdout);
renderer = new (await import('./compose')).BuildProgressUI(
tty,
imageDescriptors,
);
}
renderer.start();
return renderer;
}
async function installQemuIfNeeded({
arch,
docker,
emulated,
imageDescriptors,
logger,
projectPath,
}: {
arch: string;
docker: Dockerode;
emulated: boolean;
imageDescriptors: ImageDescriptor[];
logger: Logger;
projectPath: string;
}): Promise<boolean> {
const qemu = await import('./qemu');
const needsQemu = await qemu.installQemuIfNeeded(
emulated,
logger,
arch,
docker,
);
if (needsQemu) {
logger.logInfo('Emulation is enabled');
// Copy qemu into all build contexts
await Promise.all(
imageDescriptors.map(function (d) {
if (isBuildConfig(d.image)) {
return qemu.copyQemu(
path.join(projectPath, d.image.context || '.'),
arch,
);
}
}),
);
}
return needsQemu;
}
function setTaskAttributes({
tasks,
buildOpts,
imageDescriptorsByServiceName,
projectName,
}: {
tasks: BuildTaskPlus[];
buildOpts: import('./docker').BuildOpts;
imageDescriptorsByServiceName: Dictionary<ImageDescriptor>;
projectName: string;
}) {
for (const task of tasks) {
const d = imageDescriptorsByServiceName[task.serviceName];
// multibuild (splitBuildStream) parses the composition internally so
// any tags we've set before are lost; re-assign them here
task.tag ??= [projectName, task.serviceName].join('_').toLowerCase();
if (isBuildConfig(d.image)) {
d.image.tag = task.tag;
}
// reassign task.args so that the `--buildArg` flag takes precedence
// over assignments in the docker-compose.yml file (service.build.args)
task.args = {
...task.args,
...buildOpts.buildargs,
};
// Docker image build options
task.dockerOpts ??= {};
if (task.args && Object.keys(task.args).length) {
task.dockerOpts.buildargs = {
...task.dockerOpts.buildargs,
...task.args,
};
}
_.merge(task.dockerOpts, buildOpts, { t: task.tag });
}
}
async function qemuTransposeBuildStream({
task,
dockerfilePath,
projectPath,
}: {
task: BuildTaskPlus;
dockerfilePath?: string;
projectPath: string;
}): Promise<TransposeOptions> {
const qemu = await import('./qemu');
const binPath = qemu.qemuPathInContext(
path.join(projectPath, task.context ?? ''),
);
if (task.buildStream == null) {
throw new Error(`No buildStream for task '${task.tag}'`);
}
const transpose = await import('docker-qemu-transpose');
const { toPosixPath } = (await import('resin-multibuild')).PathUtils;
const transposeOptions: TransposeOptions = {
hostQemuPath: toPosixPath(binPath),
containerQemuPath: `/tmp/${qemu.QEMU_BIN_NAME}`,
qemuFileMode: 0o555,
};
task.buildStream = (await transpose.transposeTarStream(
task.buildStream,
transposeOptions,
dockerfilePath || undefined,
)) as Pack;
return transposeOptions;
}
async function setTaskProgressHooks({
inlineLogs,
renderer,
task,
transposeOptions,
}: {
inlineLogs?: boolean;
renderer: Renderer;
task: BuildTaskPlus;
transposeOptions?: import('docker-qemu-transpose').TransposeOptions;
}) {
const transpose = await import('docker-qemu-transpose');
// Get the service-specific log stream
const logStream = renderer.streams[task.serviceName];
task.logBuffer = [];
const captureStream = buildLogCapture(task.external, task.logBuffer);
if (task.external) {
// External image -- there's no build to be performed,
// just follow pull progress.
captureStream.pipe(logStream);
task.progressHook = pullProgressAdapter(captureStream);
} else {
task.streamHook = function (stream) {
let rawStream;
stream = createLogStream(stream);
if (transposeOptions) {
const buildThroughStream = transpose.getBuildThroughStream(
transposeOptions,
);
rawStream = stream.pipe(buildThroughStream);
} else {
rawStream = stream;
}
// `stream` sends out raw strings in contrast to `task.progressHook`
// where we're given objects. capture these strings as they come
// before we parse them.
return rawStream
.pipe(dropEmptyLinesStream())
.pipe(captureStream)
.pipe(buildProgressAdapter(!!inlineLogs))
.pipe(logStream);
};
}
}
async function inspectBuiltImages({
builtImages,
docker,
imageDescriptorsByServiceName,
tasks,
}: {
builtImages: MultiBuild.LocalImage[];
docker: Dockerode;
imageDescriptorsByServiceName: Dictionary<ImageDescriptor>;
tasks: BuildTaskPlus[];
}): Promise<[BuiltImage[], Dictionary<string>]> {
const images: BuiltImage[] = await Promise.all(
builtImages.map((builtImage: MultiBuild.LocalImage) =>
inspectBuiltImage({
builtImage,
docker,
imageDescriptorsByServiceName,
tasks,
}),
),
);
const humanize = require('humanize');
const summaryMsgByService: { [serviceName: string]: string } = {};
for (const image of images) {
summaryMsgByService[image.serviceName] = `Image size: ${humanize.filesize(
image.props.size,
)}`;
}
return [images, summaryMsgByService];
}
async function inspectBuiltImage({
builtImage,
docker,
imageDescriptorsByServiceName,
tasks,
}: {
builtImage: MultiBuild.LocalImage;
docker: Dockerode;
imageDescriptorsByServiceName: Dictionary<ImageDescriptor>;
tasks: BuildTaskPlus[];
}): Promise<BuiltImage> {
if (!builtImage.successful) {
const error: Error & { serviceName?: string } =
builtImage.error ?? new Error();
error.serviceName = builtImage.serviceName;
throw error;
}
const d = imageDescriptorsByServiceName[builtImage.serviceName];
const task = _.find(tasks, {
serviceName: builtImage.serviceName,
});
const image: BuiltImage = {
serviceName: d.serviceName,
name: (isBuildConfig(d.image) ? d.image.tag : d.image) || '',
logs: truncateString(task?.logBuffer?.join('\n') || '', LOG_LENGTH_MAX),
props: {
dockerfile: builtImage.dockerfile,
projectType: builtImage.projectType,
},
};
// Times here are timestamps, so test whether they're null
// before creating a date out of them, as `new Date(null)`
// creates a date representing UNIX time 0.
if (builtImage.startTime) {
image.props.startTime = new Date(builtImage.startTime);
}
if (builtImage.endTime) {
image.props.endTime = new Date(builtImage.endTime);
}
image.props.size = (await docker.getImage(image.name).inspect()).Size;
return image;
}
/**
* Load the ".balena/balena.yml" file (or resin.yml, or yaml or json),
* which contains "build metadata" for features like "build secrets" and
@ -207,9 +572,9 @@ async function getServiceDirsFromComposition(
const relPrefix = '.' + path.sep;
for (const [serviceName, service] of Object.entries(composition.services)) {
let dir =
typeof service.build === 'string'
(typeof service.build === 'string'
? service.build
: service.build?.context || '.';
: service.build?.context) || '.';
// Convert forward slashes to backslashes on Windows
dir = path.normalize(dir);
// Make sure the path is relative to the project directory
@ -230,15 +595,58 @@ async function getServiceDirsFromComposition(
return serviceDirs;
}
/**
* Return true if `image` is actually a docker-compose.yml `services.service.build`
* configuration object, rather than an "external image" (`services.service.image`).
*
* The `image` argument may therefore refere to either a `build` or `image` property
* of a service in a docker-compose.yml file, which is a bit confusing but it matches
* the `ImageDescriptor.image` property as defined by `resin-compose-parse`.
*
* Note that `resin-compose-parse` "normalizes" the docker-compose.yml file such
* that, if `services.service.build` is a string, it is converted to a BuildConfig
* object with the string value assigned to `services.service.build.context`:
* https://github.com/balena-io-modules/resin-compose-parse/blob/v2.1.3/src/compose.ts#L166-L167
* This is why this implementation works when `services.service.build` is defined
* as a string in the docker-compose.yml file.
*
* @param image The `ImageDescriptor.image` attribute parsed with `resin-compose-parse`
*/
export function isBuildConfig(
image: string | BuildConfig,
): image is BuildConfig {
return image != null && typeof image !== 'string';
}
/**
* Create a tar stream out of the local filesystem at the given directory,
* while optionally applying file filters such as '.dockerignore' and
* optionally converting text file line endings (CRLF to LF).
* @param dir Source directory
* @param param Options
* @returns {Promise<import('stream').Readable>}
* @returns Readable stream
*/
export async function tarDirectory(
dir: string,
param: TarDirectoryOptions,
): Promise<import('stream').Readable> {
const { nogitignore = false } = param;
if (nogitignore) {
return newTarDirectory(dir, param);
} else {
return (await import('./compose')).originalTarDirectory(dir, param);
}
}
/**
* Create a tar stream out of the local filesystem at the given directory,
* while optionally applying file filters such as '.dockerignore' and
* optionally converting text file line endings (CRLF to LF).
* @param dir Source directory
* @param param Options
* @returns Readable stream
*/
async function newTarDirectory(
dir: string,
{
composition,
@ -441,7 +849,7 @@ export async function checkBuildSecretsRequirements(
export async function getRegistrySecrets(
sdk: BalenaSDK,
inputFilename?: string,
): Promise<RegistrySecrets> {
): Promise<MultiBuild.RegistrySecrets> {
if (inputFilename != null) {
return await parseRegistrySecrets(inputFilename);
}
@ -464,7 +872,7 @@ export async function getRegistrySecrets(
async function parseRegistrySecrets(
secretsFilename: string,
): Promise<RegistrySecrets> {
): Promise<MultiBuild.RegistrySecrets> {
try {
let isYaml = false;
if (/.+\.ya?ml$/i.test(secretsFilename)) {
@ -661,7 +1069,7 @@ async function validateSpecifiedDockerfile(
export interface ProjectValidationResult {
dockerfilePath: string;
registrySecrets: RegistrySecrets;
registrySecrets: MultiBuild.RegistrySecrets;
}
/**
@ -797,7 +1205,7 @@ async function pushServiceImages(
export async function deployProject(
docker: import('docker-toolbelt'),
logger: Logger,
composition: import('resin-compose-parse').Composition,
composition: Composition,
images: BuiltImage[],
appId: number,
userId: number,
@ -907,6 +1315,123 @@ export function createRunLoop(tick: (...args: any[]) => void) {
return runloop;
}
function createLogStream(input: Readable) {
const split = require('split') as typeof import('split');
const stripAnsi = require('strip-ansi-stream');
return input.pipe<Duplex>(stripAnsi()).pipe(split());
}
function dropEmptyLinesStream() {
const through = require('through2') as typeof import('through2');
return through(function (data, _enc, cb) {
const str = data.toString('utf-8');
if (str.trim()) {
this.push(str);
}
return cb();
});
}
function buildLogCapture(objectMode: boolean, buffer: string[]) {
const through = require('through2') as typeof import('through2');
return through({ objectMode }, function (data, _enc, cb) {
// data from pull stream
if (data.error) {
buffer.push(`${data.error}`);
} else if (data.progress && data.status) {
buffer.push(`${data.progress}% ${data.status}`);
} else if (data.status) {
buffer.push(`${data.status}`);
// data from build stream
} else {
buffer.push(data);
}
return cb(null, data);
});
}
function buildProgressAdapter(inline: boolean) {
const through = require('through2') as typeof import('through2');
const stepRegex = /^\s*Step\s+(\d+)\/(\d+)\s*: (.+)$/;
let step = '';
let numSteps = '';
let progress: number | undefined;
return through({ objectMode: true }, function (str, _enc, cb) {
if (str == null) {
return cb(null, str);
}
if (inline) {
return cb(null, { status: str });
}
if (!/^Successfully tagged /.test(str)) {
const match = stepRegex.exec(str);
if (match) {
step = match[1];
numSteps ??= match[2];
str = match[3];
}
if (step) {
str = `Step ${step}/${numSteps}: ${str}`;
progress = Math.floor(
(parseInt(step, 10) * 100) / parseInt(numSteps, 10),
);
}
}
return cb(null, { status: str, progress });
});
}
function pullProgressAdapter(outStream: Duplex) {
return function ({
status,
id,
percentage,
error,
errorDetail,
}: {
status: string;
id: string;
percentage: number | undefined;
error: Error;
errorDetail: Error;
}) {
if (status != null) {
status = status.replace(/^Status: /, '');
}
if (id != null) {
status = `${id}: ${status}`;
}
if (percentage === 100) {
percentage = undefined;
}
return outStream.write({
status,
progress: percentage,
error: errorDetail?.message ?? error,
});
};
}
function truncateString(str: string, len: number): string {
if (str.length < len) {
return str;
}
str = str.slice(0, len);
// return everything up to the last line. this is a cheeky way to avoid
// having to deal with splitting the string midway through some special
// character sequence.
return str.slice(0, str.lastIndexOf('\n'));
}
export const composeCliFlags: flags.Input<ComposeCliFlags> = {
emulated: flags.boolean({
description:

View File

@ -30,13 +30,18 @@ const getBuilderLogPushEndpoint = function (baseUrl, buildId, owner, app) {
return `https://builder.${baseUrl}/v1/pushLogs?${args}`;
};
/**
* @param {import('docker-toolbelt')} docker
* @param {string} imageId
* @param {string} bufferFile
*/
const bufferImage = function (docker, imageId, bufferFile) {
const streamUtils = require('./streams');
const image = docker.getImage(imageId);
const imageMetadata = image.inspect();
const sizePromise = image.inspect().then((img) => img.Size);
return Promise.all([image.get(), imageMetadata.get('Size')]).then(
return Promise.all([image.get(), sizePromise]).then(
([imageStream, imageSize]) =>
streamUtils.buffer(imageStream, bufferFile).then((bufferedStream) => {
// @ts-ignore adding an extra property
@ -150,14 +155,17 @@ const uploadLogs = function (logs, token, url, buildId, username, appName) {
});
};
/*
opts must be a hash with the following keys:
- appName: the name of the app to deploy to
- imageName: the name of the image to deploy
- buildLogs: a string with build output
- shouldUploadLogs
*/
/**
* @param {import('docker-toolbelt')} docker
* @param {import('./logger')} logger
* @param {string} token
* @param {string} username
* @param {string} url
* @param {{appName: string; imageName: string; buildLogs: string; shouldUploadLogs: boolean}} opts
* - appName: the name of the app to deploy to
* - imageName: the name of the image to deploy
* - buildLogs: a string with build output
*/
export const deployLegacy = async function (
docker,
logger,

View File

@ -32,6 +32,7 @@ import {
checkBuildSecretsRequirements,
loadProject,
makeBuildTasks,
tarDirectory,
} from '../compose_ts';
import Logger = require('../logger');
import { DeviceAPI, DeviceInfo } from './api';
@ -121,7 +122,6 @@ async function environmentFromInput(
}
export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
const { tarDirectory } = await import('../compose');
const { exitWithExpectedError } = await import('../../errors');
const { displayDeviceLogs } = await import('./logs');
@ -400,7 +400,6 @@ export async function rebuildSingleTask(
// this should provide the following callback
containerIdCb?: (id: string) => void,
): Promise<string> {
const { tarDirectory } = await import('../compose');
const multibuild = await import('resin-multibuild');
// First we run the build task, to get the new image id
let buildLogs = '';

View File

@ -91,48 +91,6 @@ const generateConnectOpts = async function (opts) {
return connectOpts;
};
const parseBuildArgs = function (args) {
if (!Array.isArray(args)) {
args = [args];
}
const buildArgs = {};
args.forEach(function (arg) {
// note: [^] matches any character, including line breaks
const pair = /^([^\s]+?)=([^]*)$/.exec(arg);
if (pair != null) {
buildArgs[pair[1]] = pair[2] ?? '';
} else {
throw new ExpectedError(`Could not parse build argument: '${arg}'`);
}
});
return buildArgs;
};
export function generateBuildOpts(options) {
const opts = {};
if (options.tag != null) {
opts.t = options.tag;
}
if (options.nocache != null) {
opts.nocache = true;
}
if (options['cache-from']?.trim()) {
opts.cachefrom = options['cache-from'].split(',').filter((i) => !!i.trim());
}
if (options.pull != null) {
opts.pull = true;
}
if (options.squash != null) {
opts.squash = true;
}
if (options.buildArg != null) {
opts.buildargs = parseBuildArgs(options.buildArg);
}
if (!_.isEmpty(options['registry-secrets'])) {
opts.registryconfig = options['registry-secrets'];
}
return opts;
}
/**
* @param {{
* ca?: string; // path to ca (Certificate Authority) file (TLS)

View File

@ -17,6 +17,8 @@
import type * as dockerode from 'dockerode';
import { flags } from '@oclif/command';
import { ExpectedError } from '../errors';
import { parseAsInteger } from './validation';
export * from './docker-js';
@ -98,6 +100,70 @@ Implements the same feature as the "docker build --cache-from" option.`,
...dockerConnectionCliFlags,
};
export interface BuildOpts {
buildargs?: Dictionary<string>;
cachefrom?: string[];
nocache?: boolean;
pull?: boolean;
registryconfig?: import('resin-multibuild').RegistrySecrets;
squash?: boolean;
t?: string;
}
function parseBuildArgs(args: string[]): Dictionary<string> {
if (!Array.isArray(args)) {
args = [args];
}
const buildArgs: Dictionary<string> = {};
args.forEach(function (arg) {
// note: [^] matches any character, including line breaks
const pair = /^([^\s]+?)=([^]*)$/.exec(arg);
if (pair != null) {
buildArgs[pair[1]] = pair[2] ?? '';
} else {
throw new ExpectedError(`Could not parse build argument: '${arg}'`);
}
});
return buildArgs;
}
export function generateBuildOpts(options: {
buildArg?: string[];
'cache-from'?: string;
nocache: boolean;
pull?: boolean;
'registry-secrets'?: import('resin-multibuild').RegistrySecrets;
squash: boolean;
tag?: string;
}): BuildOpts {
const opts: BuildOpts = {};
if (options.buildArg != null) {
opts.buildargs = parseBuildArgs(options.buildArg);
}
if (options['cache-from']?.trim()) {
opts.cachefrom = options['cache-from'].split(',').filter((i) => !!i.trim());
}
if (options.nocache != null) {
opts.nocache = true;
}
if (options.pull != null) {
opts.pull = true;
}
if (
options['registry-secrets'] &&
Object.keys(options['registry-secrets']).length
) {
opts.registryconfig = options['registry-secrets'];
}
if (options.squash != null) {
opts.squash = true;
}
if (options.tag != null) {
opts.t = options.tag;
}
return opts;
}
export async function isBalenaEngine(docker: dockerode): Promise<boolean> {
// dockerVersion.Engine should equal 'balena-engine' for the current/latest
// version of balenaEngine, but it was at one point (mis)spelt 'balaena':

View File

@ -24,7 +24,7 @@ import type { Pack } from 'tar-stream';
import { ExpectedError } from '../errors';
import { exitWithExpectedError } from '../errors';
import { tarDirectory } from './compose';
import { tarDirectory } from './compose_ts';
import { getVisuals, stripIndent } from './lazy';
import Logger = require('./logger');

View File

@ -16,6 +16,7 @@
*/
import { expect } from 'chai';
import * as _ from 'lodash';
import mock = require('mock-require');
import { promises as fs } from 'fs';
import * as path from 'path';
@ -45,13 +46,16 @@ const commonResponseLines: { [key: string]: string[] } = {
const commonQueryParams = {
t: '${tag}',
buildargs: '{}',
buildargs: {},
labels: '',
};
const commonComposeQueryParams = {
t: '${tag}',
buildargs: '{"MY_VAR_1":"This is a variable","MY_VAR_2":"Also a variable"}',
buildargs: {
MY_VAR_1: 'This is a variable',
MY_VAR_2: 'Also a variable',
},
labels: '',
};
@ -375,19 +379,26 @@ describe('balena build', function () {
'utf8',
);
const expectedQueryParamsByService = {
service1: Object.entries({
...commonComposeQueryParams,
buildargs:
'{"BARG1":"b1","barg2":"B2","MY_VAR_1":"This is a variable","MY_VAR_2":"Also a variable","SERVICE1_VAR":"This is a service specific variable"}',
cachefrom: '["my/img1","my/img2"]',
}),
service2: Object.entries({
...commonComposeQueryParams,
buildargs:
'{"BARG1":"b1","barg2":"B2","MY_VAR_1":"This is a variable","MY_VAR_2":"Also a variable"}',
cachefrom: '["my/img1","my/img2"]',
dockerfile: 'Dockerfile-alt',
}),
service1: Object.entries(
_.merge({}, commonComposeQueryParams, {
buildargs: {
COMPOSE_ARG: 'A',
barg: 'b',
SERVICE1_VAR: 'This is a service specific variable',
},
cachefrom: ['my/img1', 'my/img2'],
}),
),
service2: Object.entries(
_.merge({}, commonComposeQueryParams, {
buildargs: {
COMPOSE_ARG: 'A',
barg: 'b',
},
cachefrom: ['my/img1', 'my/img2'],
dockerfile: 'Dockerfile-alt',
}),
),
};
const expectedResponseLines: string[] = [
...commonResponseLines[responseFilename],
@ -417,7 +428,7 @@ describe('balena build', function () {
}
docker.expectGetInfo({});
await testDockerBuildStream({
commandLine: `build ${projectPath} --deviceType nuc --arch amd64 --convert-eol -G -B BARG1=b1 -B barg2=B2 --cache-from my/img1,my/img2`,
commandLine: `build ${projectPath} --deviceType nuc --arch amd64 --convert-eol -G -B COMPOSE_ARG=A -B barg=b --cache-from my/img1,my/img2`,
dockerMock: docker,
expectedFilesByService,
expectedQueryParamsByService,
@ -464,15 +475,19 @@ describe('balena build', function () {
'utf8',
);
const expectedQueryParamsByService = {
service1: Object.entries({
...commonComposeQueryParams,
buildargs:
'{"MY_VAR_1":"This is a variable","MY_VAR_2":"Also a variable","SERVICE1_VAR":"This is a service specific variable"}',
}),
service2: Object.entries({
...commonComposeQueryParams,
dockerfile: 'Dockerfile-alt',
}),
service1: Object.entries(
_.merge({}, commonComposeQueryParams, {
buildargs: { SERVICE1_VAR: 'This is a service specific variable' },
}),
),
service2: Object.entries(
_.merge({}, commonComposeQueryParams, {
buildargs: {
COMPOSE_ARG: 'an argument defined in the docker-compose.yml file',
},
dockerfile: 'Dockerfile-alt',
}),
),
};
const expectedResponseLines: string[] = [
...commonResponseLines[responseFilename],

View File

@ -17,6 +17,7 @@
import { expect } from 'chai';
import { promises as fs } from 'fs';
import * as _ from 'lodash';
import * as path from 'path';
import * as sinon from 'sinon';
@ -53,14 +54,14 @@ const commonQueryParams = [
['labels', ''],
];
const commonComposeQueryParams = [
['t', '${tag}'],
[
'buildargs',
'{"MY_VAR_1":"This is a variable","MY_VAR_2":"Also a variable"}',
],
['labels', ''],
];
const commonComposeQueryParams = {
t: '${tag}',
buildargs: {
MY_VAR_1: 'This is a variable',
MY_VAR_2: 'Also a variable',
},
labels: '',
};
const hr =
'----------------------------------------------------------------------';
@ -268,15 +269,19 @@ describe('balena deploy', function () {
'utf8',
);
const expectedQueryParamsByService = {
service1: [
['t', '${tag}'],
[
'buildargs',
'{"MY_VAR_1":"This is a variable","MY_VAR_2":"Also a variable","SERVICE1_VAR":"This is a service specific variable"}',
],
['labels', ''],
],
service2: [...commonComposeQueryParams, ['dockerfile', 'Dockerfile-alt']],
service1: Object.entries(
_.merge({}, commonComposeQueryParams, {
buildargs: { SERVICE1_VAR: 'This is a service specific variable' },
}),
),
service2: Object.entries(
_.merge({}, commonComposeQueryParams, {
buildargs: {
COMPOSE_ARG: 'an argument defined in the docker-compose.yml file',
},
dockerfile: 'Dockerfile-alt',
}),
),
};
const expectedResponseLines: string[] = [
...commonResponseLines[responseFilename],

View File

@ -465,7 +465,7 @@ describe('balena push', function () {
const expectedFiles: ExpectedTarStreamFiles = {
'.balena/balena.yml': { fileSize: 197, type: 'file' },
'.dockerignore': { fileSize: 22, type: 'file' },
'docker-compose.yml': { fileSize: 245, type: 'file' },
'docker-compose.yml': { fileSize: 332, type: 'file' },
'service1/Dockerfile.template': { fileSize: 144, type: 'file' },
'service1/file1.sh': { fileSize: 12, type: 'file' },
'service2/Dockerfile-alt': { fileSize: 40, type: 'file' },
@ -523,7 +523,7 @@ describe('balena push', function () {
const expectedFiles: ExpectedTarStreamFiles = {
'.balena/balena.yml': { fileSize: 197, type: 'file' },
'.dockerignore': { fileSize: 22, type: 'file' },
'docker-compose.yml': { fileSize: 245, type: 'file' },
'docker-compose.yml': { fileSize: 332, type: 'file' },
'service1/Dockerfile.template': { fileSize: 144, type: 'file' },
'service1/file1.sh': { fileSize: 12, type: 'file' },
'service1/test-ignore.txt': { fileSize: 12, type: 'file' },

View File

@ -29,7 +29,12 @@ import { URL } from 'url';
import { stripIndent } from '../lib/utils/lazy';
import { BuilderMock } from './builder-mock';
import { DockerMock } from './docker-mock';
import { cleanOutput, fillTemplateArray, runCommand } from './helpers';
import {
cleanOutput,
deepJsonParse,
deepTemplateReplace,
runCommand,
} from './helpers';
import {
ExpectedTarStreamFile,
ExpectedTarStreamFiles,
@ -152,7 +157,7 @@ export async function testDockerBuildStream(o: {
commandLine: string;
dockerMock: DockerMock;
expectedFilesByService: ExpectedTarStreamFilesByService;
expectedQueryParamsByService: { [service: string]: string[][] };
expectedQueryParamsByService: { [service: string]: any[][] };
expectedErrorLines?: string[];
expectedExitCode?: number;
expectedResponseLines: string[];
@ -161,15 +166,15 @@ export async function testDockerBuildStream(o: {
responseBody: string;
services: string[]; // e.g. ['main'] or ['service1', 'service2']
}) {
const expectedErrorLines = fillTemplateArray(o.expectedErrorLines || [], o);
const expectedResponseLines = fillTemplateArray(o.expectedResponseLines, o);
const expectedErrorLines = deepTemplateReplace(o.expectedErrorLines || [], o);
const expectedResponseLines = deepTemplateReplace(o.expectedResponseLines, o);
for (const service of o.services) {
// tagPrefix is, for example, 'myApp' if the path is 'path/to/myApp'
const tagPrefix = o.projectPath.split(path.sep).pop();
const tag = `${tagPrefix}_${service}`;
const expectedFiles = o.expectedFilesByService[service];
const expectedQueryParams = fillTemplateArray(
const expectedQueryParams = deepTemplateReplace(
o.expectedQueryParamsByService[service],
{ tag, ...o },
);
@ -181,7 +186,9 @@ export async function testDockerBuildStream(o: {
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(expectedQueryParams);
expect(deepJsonParse(queryParams)).to.have.deep.members(
deepJsonParse(expectedQueryParams),
);
},
checkBuildRequestBody: (buildRequestBody: string) =>
inspectTarStream(buildRequestBody, expectedFiles, projectPath),
@ -226,15 +233,17 @@ export async function testPushBuildStream(o: {
responseCode: number;
responseBody: string;
}) {
const expectedQueryParams = fillTemplateArray(o.expectedQueryParams, o);
const expectedResponseLines = fillTemplateArray(o.expectedResponseLines, o);
const expectedQueryParams = deepTemplateReplace(o.expectedQueryParams, o);
const expectedResponseLines = deepTemplateReplace(o.expectedResponseLines, o);
o.builderMock.expectPostBuild({
...o,
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(expectedQueryParams);
expect(deepJsonParse(queryParams)).to.have.deep.members(
deepJsonParse(expectedQueryParams),
);
},
checkBuildRequestBody: (buildRequestBody) =>
inspectTarStream(buildRequestBody, o.expectedFiles, o.projectPath),

View File

@ -47,6 +47,7 @@ function filterCliOutputForTests(testOutput: TestOutput): TestOutput {
// TODO stop this warning message from appearing when running
// sdk.setSharedOptions multiple times in the same process
!line.startsWith('Shared SDK options') &&
!line.startsWith('WARN: disabling Sentry.io error reporting') &&
// Node 12: '[DEP0066] DeprecationWarning: OutgoingMessage.prototype._headers is deprecated'
!line.includes('[DEP0066]'),
),
@ -264,23 +265,55 @@ export function fillTemplate(
return unescaped;
}
export function fillTemplateArray(
templateStringArray: string[],
templateVars: object,
): string[];
export function fillTemplateArray(
templateStringArray: Array<string | string[]>,
templateVars: object,
): Array<string | string[]>;
export function fillTemplateArray(
templateStringArray: Array<string | string[]>,
templateVars: object,
): Array<string | string[]> {
return templateStringArray.map((i) =>
Array.isArray(i)
? fillTemplateArray(i, templateVars)
: fillTemplate(i, templateVars),
);
/**
* Recursively navigate the `data` argument (if it is an array or object),
* finding and replacing "template strings" such as 'hello ${name}!' with
* the variable values given in `templateVars` such as { name: 'world' }.
*
* @param data Any data type (array, object, string) containing template
* strings to be replaced
* @param templateVars Map of template variable names to values
*/
export function deepTemplateReplace(
data: any,
templateVars: { [key: string]: any },
): any {
switch (typeof data) {
case 'string':
return fillTemplate(data, templateVars);
case 'object':
if (Array.isArray(data)) {
return data.map((i) => deepTemplateReplace(i, templateVars));
}
return _.mapValues(data, (value) =>
deepTemplateReplace(value, templateVars),
);
default:
// number, undefined, null, or something else
return data;
}
}
export const fillTemplateArray = deepTemplateReplace;
/**
* Recursively navigate the `data` argument (if it is an array or object),
* looking for strings that start with `[` or `{` which are assumed to contain
* JSON arrays or objects that are then parsed with JSON.parse().
* @param data
*/
export function deepJsonParse(data: any): any {
if (typeof data === 'string') {
const maybeJson = data.trim();
if (maybeJson.startsWith('{') || maybeJson.startsWith('[')) {
return JSON.parse(maybeJson);
}
} else if (Array.isArray(data)) {
return data.map((i) => deepJsonParse(i));
} else if (typeof data === 'object') {
return _.mapValues(data, (value) => deepJsonParse(value));
}
return data;
}
export async function switchSentry(

View File

@ -12,3 +12,5 @@ services:
build:
context: ./service2
dockerfile: Dockerfile-alt
args:
- 'COMPOSE_ARG=an argument defined in the docker-compose.yml file'

View File

@ -20,7 +20,7 @@ import * as _ from 'lodash';
import * as path from 'path';
import * as tar from 'tar-stream';
import { tarDirectory } from '../../build/utils/compose';
import { tarDirectory } from '../../build/utils/compose_ts';
import { setupDockerignoreTestData } from '../projects';
const repoPath = path.normalize(path.join(__dirname, '..', '..'));