balena-cli/lib/utils/compose_ts.ts

479 lines
15 KiB
TypeScript
Raw Normal View History

/**
* @license
* Copyright 2018-2020 Balena Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { BalenaSDK } from 'balena-sdk';
import * as Bluebird from 'bluebird';
import { stripIndent } from 'common-tags';
import Dockerode = require('dockerode');
import * as _ from 'lodash';
import { fs } from 'mz';
import * as path from 'path';
import { Composition } from 'resin-compose-parse';
import * as MultiBuild from 'resin-multibuild';
import { Readable } from 'stream';
import * as tar from 'tar-stream';
import { ExpectedError } from '../errors';
import { DeviceInfo } from './device/api';
import Logger = require('./logger');
export interface RegistrySecrets {
[registryAddress: string]: {
username: string;
password: string;
};
}
const compositionFileNames = ['docker-compose.yml', 'docker-compose.yaml'];
/**
* high-level function resolving a project and creating a composition out
* of it in one go. if image is given, it'll create a default project for
* that without looking for a project. falls back to creating a default
* project if none is found at the given projectPath.
*/
export async function loadProject(
logger: Logger,
opts: import('./compose').ComposeOpts,
image?: string,
): Promise<import('./compose').ComposeProject> {
const compose = await import('resin-compose-parse');
const { createProject } = await import('./compose');
let composeName: string;
let composeStr: string;
logger.logDebug('Loading project...');
if (image) {
logger.logInfo(`Creating default composition with image: "${image}"`);
composeStr = compose.defaultComposition(image);
} else {
logger.logDebug('Resolving project...');
[composeName, composeStr] = await resolveProject(logger, opts.projectPath);
if (composeName) {
if (opts.dockerfilePath) {
logger.logWarn(
`Ignoring alternative dockerfile "${opts.dockerfilePath}" because composition file "${composeName}" exists`,
);
}
} else {
logger.logInfo(
`Creating default composition with source: "${opts.projectPath}"`,
);
composeStr = compose.defaultComposition(undefined, opts.dockerfilePath);
}
}
logger.logDebug('Creating project...');
return createProject(opts.projectPath, composeStr, opts.projectName);
}
/**
* Look into the given directory for valid compose files and return
* the contents of the first one found.
*/
async function resolveProject(
logger: Logger,
projectRoot: string,
): Promise<[string, string]> {
let composeFileName = '';
let composeFileContents = '';
for (const fname of compositionFileNames) {
const fpath = path.join(projectRoot, fname);
if (await fs.exists(fpath)) {
logger.logDebug(`${fname} file found at "${projectRoot}"`);
composeFileName = fname;
try {
composeFileContents = await fs.readFile(fpath, 'utf-8');
} catch (err) {
logger.logError(`Error reading composition file "${fpath}":\n${err}`);
throw err;
}
break;
}
}
if (!composeFileName) {
logger.logInfo(`No "docker-compose.yml" file found at "${projectRoot}"`);
}
return [composeFileName, composeFileContents];
}
/**
* Load the ".balena/balena.yml" file (or resin.yml, or yaml or json),
* which contains "build metadata" for features like "build secrets" and
* "build variables".
* @returns Pair of metadata object and metadata file path
*/
async function loadBuildMetatada(
sourceDir: string,
): Promise<[MultiBuild.ParsedBalenaYml, string]> {
let metadataPath = '';
let rawString = '';
outer: for (const fName of ['balena', 'resin']) {
for (const fExt of ['yml', 'yaml', 'json']) {
metadataPath = path.join(sourceDir, `.${fName}`, `${fName}.${fExt}`);
try {
rawString = await fs.readFile(metadataPath, 'utf8');
break outer;
} catch (err) {
if (err.code === 'ENOENT') {
// file not found, try the next name.extension combination
continue;
} else {
throw err;
}
}
}
}
if (!rawString) {
return [{}, ''];
}
let buildMetadata: MultiBuild.ParsedBalenaYml;
try {
if (metadataPath.endsWith('json')) {
buildMetadata = JSON.parse(rawString);
} else {
buildMetadata = require('js-yaml').safeLoad(rawString);
}
} catch (err) {
throw new ExpectedError(
`Error parsing file "${metadataPath}":\n ${err.message}`,
);
}
return [buildMetadata, metadataPath];
}
/**
* Check whether the "build secrets" feature is being used and, if so,
* verify that the target docker daemon is balenaEngine. If the
* requirement is not satisfied, reject with an ExpectedError.
* @param docker Dockerode instance
* @param sourceDir Project directory where to find .balena/balena.yml
*/
export async function checkBuildSecretsRequirements(
docker: Dockerode,
sourceDir: string,
) {
const [metaObj, metaFilename] = await loadBuildMetatada(sourceDir);
if (metaObj && !_.isEmpty(metaObj['build-secrets'])) {
const dockerUtils = await import('./docker');
const isBalenaEngine = await dockerUtils.isBalenaEngine(docker);
if (!isBalenaEngine) {
throw new ExpectedError(stripIndent`
The "build secrets" feature currently requires balenaEngine, but a standard Docker
daemon was detected. Please use command-line options to specify the hostname and
port number (or socket path) of a balenaEngine daemon, running on a balena device
or a virtual machine with balenaOS. If the build secrets feature is not required,
comment out or delete the 'build-secrets' entry in the file:
"${metaFilename}"
`);
}
}
}
export async function getRegistrySecrets(
sdk: BalenaSDK,
inputFilename?: string,
): Promise<RegistrySecrets> {
if (inputFilename != null) {
return await parseRegistrySecrets(inputFilename);
}
const directory = await sdk.settings.get('dataDirectory');
const potentialPaths = [
path.join(directory, 'secrets.yml'),
path.join(directory, 'secrets.yaml'),
path.join(directory, 'secrets.json'),
];
for (const potentialPath of potentialPaths) {
if (await fs.exists(potentialPath)) {
return await parseRegistrySecrets(potentialPath);
}
}
return {};
}
async function parseRegistrySecrets(
secretsFilename: string,
): Promise<RegistrySecrets> {
try {
let isYaml = false;
if (/.+\.ya?ml$/i.test(secretsFilename)) {
isYaml = true;
} else if (!/.+\.json$/i.test(secretsFilename)) {
throw new Error('Filename must end with .json, .yml or .yaml');
}
const raw = (await fs.readFile(secretsFilename)).toString();
const registrySecrets = new MultiBuild.RegistrySecretValidator().validateRegistrySecrets(
isYaml ? require('js-yaml').safeLoad(raw) : JSON.parse(raw),
);
MultiBuild.addCanonicalDockerHubEntry(registrySecrets);
return registrySecrets;
} catch (error) {
throw new ExpectedError(
`Error validating registry secrets file "${secretsFilename}":\n${error.message}`,
);
}
}
/**
* Create a BuildTask array of "resolved build tasks" by calling multibuild
* .splitBuildStream() and performResolution(), and add build stream error
* handlers and debug logging.
* Both `balena build` and `balena deploy` call this function.
*/
export async function makeBuildTasks(
composition: Composition,
tarStream: Readable,
deviceInfo: DeviceInfo,
logger: Logger,
): Promise<MultiBuild.BuildTask[]> {
const buildTasks = await MultiBuild.splitBuildStream(composition, tarStream);
logger.logDebug('Found build tasks:');
_.each(buildTasks, task => {
let infoStr: string;
if (task.external) {
infoStr = `image pull [${task.imageName}]`;
} else {
infoStr = `build [${task.context}]`;
}
logger.logDebug(` ${task.serviceName}: ${infoStr}`);
});
logger.logDebug(
`Resolving services with [${deviceInfo.deviceType}|${deviceInfo.arch}]`,
);
await performResolution(buildTasks, deviceInfo);
logger.logDebug('Found project types:');
_.each(buildTasks, task => {
if (task.external) {
logger.logDebug(` ${task.serviceName}: External image`);
} else {
logger.logDebug(` ${task.serviceName}: ${task.projectType}`);
}
});
return buildTasks;
}
async function performResolution(
tasks: MultiBuild.BuildTask[],
deviceInfo: DeviceInfo,
): Promise<MultiBuild.BuildTask[]> {
const { cloneTarStream } = require('tar-utils');
return await new Promise<MultiBuild.BuildTask[]>((resolve, reject) => {
const buildTasks = MultiBuild.performResolution(
tasks,
deviceInfo.arch,
deviceInfo.deviceType,
{ error: [reject] },
);
// Do one task at a time (Bluebird.each instead of Bluebird.all)
// in order to reduce peak memory usage. Resolves to buildTasks.
Bluebird.each(buildTasks, buildTask => {
// buildStream is falsy for "external" tasks (image pull)
if (!buildTask.buildStream) {
return buildTask;
}
// Consume each task.buildStream in order to trigger the
// resolution events that define fields like:
// task.dockerfile, task.dockerfilePath,
// task.projectType, task.resolved
// This mimics what is currently done in `resin-builder`.
return cloneTarStream(buildTask.buildStream).then(
(clonedStream: tar.Pack) => {
buildTask.buildStream = clonedStream;
if (!buildTask.external && !buildTask.resolved) {
throw new Error(
`Project type for service "${buildTask.serviceName}" could not be determined. Missing a Dockerfile?`,
);
}
return buildTask;
},
);
}).then(resolve, reject);
});
}
/**
* Enforce that, for example, if 'myProject/MyDockerfile.template' is specified
* as an alternativate Dockerfile name, then 'myProject/MyDockerfile' must not
* exist.
* Return the tar stream path (Posix, normalized) for the given dockerfilePath.
* For example, on Windows, given a dockerfilePath of 'foo\..\bar\Dockerfile',
* return 'bar/Dockerfile'. On Linux, given './bar/Dockerfile', return 'bar/Dockerfile'.
*
* @param projectPath The project source folder (-s command-line option)
* @param dockerfilePath The alternative Dockerfile specified by the user
* @return A normalized posix representation of dockerfilePath
*/
async function validateSpecifiedDockerfile(
projectPath: string,
dockerfilePath: string,
): Promise<string> {
const { contains, toNativePath, toPosixPath } = MultiBuild.PathUtils;
const nativeProjectPath = path.normalize(projectPath);
const nativeDockerfilePath = path.normalize(toNativePath(dockerfilePath));
// reminder: native windows paths may start with a drive specificaton,
// e.g. 'C:\absolute' or 'C:relative'.
if (path.isAbsolute(nativeDockerfilePath)) {
throw new ExpectedError(stripIndent`
Error: the specified Dockerfile cannot be an absolute path. The path must be
relative to, and not a parent folder of, the project's source folder.
Specified dockerfile: "${nativeDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
`);
}
// note that path.normalize('a/../../b') results in '../b'
if (nativeDockerfilePath.startsWith('..')) {
throw new ExpectedError(stripIndent`
Error: the specified Dockerfile cannot be in a parent folder of the project's
source folder. Note that the path should be relative to the project's source
folder, not the current folder.
Specified dockerfile: "${nativeDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
`);
}
const fullDockerfilePath = path.join(nativeProjectPath, nativeDockerfilePath);
if (!(await fs.exists(fullDockerfilePath))) {
throw new ExpectedError(stripIndent`
Error: specified Dockerfile not found:
Specified dockerfile: "${fullDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
Note that the specified Dockerfile path should be relative to the source folder.
`);
}
if (!contains(nativeProjectPath, fullDockerfilePath)) {
throw new ExpectedError(stripIndent`
Error: the specified Dockerfile must be in a subfolder of the source folder:
Specified dockerfile: "${fullDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
`);
}
const { dir, ext, name } = path.parse(fullDockerfilePath);
if (ext) {
const nativePathMinusExt = path.join(dir, name);
if (await fs.exists(nativePathMinusExt)) {
throw new ExpectedError(stripIndent`
Error: "${name}" exists on the same folder as "${nativeDockerfilePath}".
When an alternative Dockerfile name is specified, a file with the same base name
(minus the file extension) must not exist in the same folder. This is because
the base name file will be auto generated and added to the tar stream that is
sent to balenaEngine or the Docker daemon, resulting in duplicate Dockerfiles
and undefined behavior.
`);
}
}
return toPosixPath(nativeDockerfilePath);
}
export interface ProjectValidationResult {
dockerfilePath: string;
registrySecrets: RegistrySecrets;
}
/**
* Perform "sanity checks" on the project directory, e.g. for the existence
* of a 'Dockerfile[.*]' or 'docker-compose.yml' file or 'package.json' file.
* Also validate registry secrets if any, and perform checks around an
* alternative specified dockerfile (--dockerfile) if any.
*
* Return the parsed registry secrets if any, and the "tar stream path" for
* an alternative specified Dockerfile if any (see validateSpecifiedDockerfile()).
*/
export async function validateProjectDirectory(
sdk: BalenaSDK,
opts: {
dockerfilePath?: string;
noParentCheck: boolean;
projectPath: string;
registrySecretsPath?: string;
},
): Promise<ProjectValidationResult> {
if (
!(await fs.exists(opts.projectPath)) ||
!(await fs.stat(opts.projectPath)).isDirectory()
) {
throw new ExpectedError(
`Could not access source folder: "${opts.projectPath}"`,
);
}
const result: ProjectValidationResult = {
dockerfilePath: opts.dockerfilePath || '',
registrySecrets: {},
};
if (opts.dockerfilePath) {
result.dockerfilePath = await validateSpecifiedDockerfile(
opts.projectPath,
opts.dockerfilePath,
);
} else {
const files = await fs.readdir(opts.projectPath);
const projectMatch = (file: string) =>
/^(Dockerfile|Dockerfile\.\S+|docker-compose.ya?ml|package.json)$/.test(
file,
);
if (!_.some(files, projectMatch)) {
throw new ExpectedError(stripIndent`
Error: no "Dockerfile[.*]", "docker-compose.yml" or "package.json" file
found in source folder "${opts.projectPath}"
`);
}
if (!opts.noParentCheck) {
const checkCompose = async (folder: string) => {
return _.some(
await Promise.all(
compositionFileNames.map(filename =>
fs.exists(path.join(folder, filename)),
),
),
);
};
const [hasCompose, hasParentCompose] = await Promise.all([
checkCompose(opts.projectPath),
checkCompose(path.join(opts.projectPath, '..')),
]);
if (!hasCompose && hasParentCompose) {
Logger.getLogger().logWarn(stripIndent`
"docker-compose.y[a]ml" file found in parent directory: please check
that the correct folder was specified. (Suppress with '--noparent-check'.)
`);
}
}
}
result.registrySecrets = await getRegistrySecrets(
sdk,
opts.registrySecretsPath,
);
return result;
}