2018-11-07 18:15:05 +00:00
|
|
|
/**
|
|
|
|
* @license
|
2020-02-15 20:20:56 +00:00
|
|
|
* Copyright 2018-2020 Balena Ltd.
|
2018-11-07 18:15:05 +00:00
|
|
|
*
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
* limitations under the License.
|
|
|
|
*/
|
2020-02-15 20:20:56 +00:00
|
|
|
import { BalenaSDK } from 'balena-sdk';
|
2019-02-07 15:10:16 +00:00
|
|
|
import * as Bluebird from 'bluebird';
|
2019-07-09 14:43:36 +00:00
|
|
|
import { stripIndent } from 'common-tags';
|
|
|
|
import Dockerode = require('dockerode');
|
2019-03-12 22:07:57 +00:00
|
|
|
import * as _ from 'lodash';
|
2020-02-15 20:20:56 +00:00
|
|
|
import { fs } from 'mz';
|
|
|
|
import * as path from 'path';
|
2019-02-07 15:10:16 +00:00
|
|
|
import { Composition } from 'resin-compose-parse';
|
2019-03-12 22:07:57 +00:00
|
|
|
import * as MultiBuild from 'resin-multibuild';
|
|
|
|
import { Readable } from 'stream';
|
|
|
|
import * as tar from 'tar-stream';
|
2019-02-07 15:10:16 +00:00
|
|
|
|
2020-02-15 20:20:56 +00:00
|
|
|
import { ExpectedError } from '../errors';
|
2019-02-07 15:10:16 +00:00
|
|
|
import { DeviceInfo } from './device/api';
|
|
|
|
import Logger = require('./logger');
|
2018-11-07 18:15:05 +00:00
|
|
|
|
2019-02-26 13:32:27 +00:00
|
|
|
export interface RegistrySecrets {
|
|
|
|
[registryAddress: string]: {
|
|
|
|
username: string;
|
|
|
|
password: string;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-02-15 20:20:56 +00:00
|
|
|
const compositionFileNames = ['docker-compose.yml', 'docker-compose.yaml'];
|
|
|
|
|
|
|
|
/**
|
|
|
|
* high-level function resolving a project and creating a composition out
|
|
|
|
* of it in one go. if image is given, it'll create a default project for
|
|
|
|
* that without looking for a project. falls back to creating a default
|
|
|
|
* project if none is found at the given projectPath.
|
|
|
|
*/
|
|
|
|
export async function loadProject(
|
|
|
|
logger: Logger,
|
2020-04-23 12:46:19 +00:00
|
|
|
opts: import('./compose-types').ComposeOpts,
|
2020-02-15 20:20:56 +00:00
|
|
|
image?: string,
|
2020-04-23 12:46:19 +00:00
|
|
|
): Promise<import('./compose-types').ComposeProject> {
|
2020-02-15 20:20:56 +00:00
|
|
|
const compose = await import('resin-compose-parse');
|
|
|
|
const { createProject } = await import('./compose');
|
|
|
|
let composeName: string;
|
|
|
|
let composeStr: string;
|
|
|
|
|
|
|
|
logger.logDebug('Loading project...');
|
|
|
|
|
|
|
|
if (image) {
|
|
|
|
logger.logInfo(`Creating default composition with image: "${image}"`);
|
|
|
|
composeStr = compose.defaultComposition(image);
|
|
|
|
} else {
|
|
|
|
logger.logDebug('Resolving project...');
|
|
|
|
[composeName, composeStr] = await resolveProject(logger, opts.projectPath);
|
|
|
|
if (composeName) {
|
|
|
|
if (opts.dockerfilePath) {
|
|
|
|
logger.logWarn(
|
|
|
|
`Ignoring alternative dockerfile "${opts.dockerfilePath}" because composition file "${composeName}" exists`,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
logger.logInfo(
|
|
|
|
`Creating default composition with source: "${opts.projectPath}"`,
|
|
|
|
);
|
|
|
|
composeStr = compose.defaultComposition(undefined, opts.dockerfilePath);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
logger.logDebug('Creating project...');
|
|
|
|
return createProject(opts.projectPath, composeStr, opts.projectName);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Look into the given directory for valid compose files and return
|
|
|
|
* the contents of the first one found.
|
|
|
|
*/
|
|
|
|
async function resolveProject(
|
|
|
|
logger: Logger,
|
|
|
|
projectRoot: string,
|
|
|
|
): Promise<[string, string]> {
|
|
|
|
let composeFileName = '';
|
|
|
|
let composeFileContents = '';
|
|
|
|
for (const fname of compositionFileNames) {
|
|
|
|
const fpath = path.join(projectRoot, fname);
|
|
|
|
if (await fs.exists(fpath)) {
|
|
|
|
logger.logDebug(`${fname} file found at "${projectRoot}"`);
|
|
|
|
composeFileName = fname;
|
|
|
|
try {
|
|
|
|
composeFileContents = await fs.readFile(fpath, 'utf-8');
|
|
|
|
} catch (err) {
|
|
|
|
logger.logError(`Error reading composition file "${fpath}":\n${err}`);
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!composeFileName) {
|
|
|
|
logger.logInfo(`No "docker-compose.yml" file found at "${projectRoot}"`);
|
|
|
|
}
|
|
|
|
return [composeFileName, composeFileContents];
|
|
|
|
}
|
|
|
|
|
2019-07-09 14:43:36 +00:00
|
|
|
/**
|
|
|
|
* Load the ".balena/balena.yml" file (or resin.yml, or yaml or json),
|
|
|
|
* which contains "build metadata" for features like "build secrets" and
|
|
|
|
* "build variables".
|
|
|
|
* @returns Pair of metadata object and metadata file path
|
|
|
|
*/
|
2020-02-15 20:20:56 +00:00
|
|
|
async function loadBuildMetatada(
|
2019-07-09 14:43:36 +00:00
|
|
|
sourceDir: string,
|
|
|
|
): Promise<[MultiBuild.ParsedBalenaYml, string]> {
|
|
|
|
let metadataPath = '';
|
|
|
|
let rawString = '';
|
|
|
|
|
|
|
|
outer: for (const fName of ['balena', 'resin']) {
|
|
|
|
for (const fExt of ['yml', 'yaml', 'json']) {
|
|
|
|
metadataPath = path.join(sourceDir, `.${fName}`, `${fName}.${fExt}`);
|
|
|
|
try {
|
|
|
|
rawString = await fs.readFile(metadataPath, 'utf8');
|
|
|
|
break outer;
|
|
|
|
} catch (err) {
|
|
|
|
if (err.code === 'ENOENT') {
|
|
|
|
// file not found, try the next name.extension combination
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!rawString) {
|
|
|
|
return [{}, ''];
|
|
|
|
}
|
|
|
|
let buildMetadata: MultiBuild.ParsedBalenaYml;
|
|
|
|
try {
|
|
|
|
if (metadataPath.endsWith('json')) {
|
|
|
|
buildMetadata = JSON.parse(rawString);
|
|
|
|
} else {
|
|
|
|
buildMetadata = require('js-yaml').safeLoad(rawString);
|
|
|
|
}
|
|
|
|
} catch (err) {
|
2020-02-15 20:20:56 +00:00
|
|
|
throw new ExpectedError(
|
2019-07-09 14:43:36 +00:00
|
|
|
`Error parsing file "${metadataPath}":\n ${err.message}`,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return [buildMetadata, metadataPath];
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Check whether the "build secrets" feature is being used and, if so,
|
|
|
|
* verify that the target docker daemon is balenaEngine. If the
|
2020-02-15 20:20:56 +00:00
|
|
|
* requirement is not satisfied, reject with an ExpectedError.
|
2019-07-09 14:43:36 +00:00
|
|
|
* @param docker Dockerode instance
|
|
|
|
* @param sourceDir Project directory where to find .balena/balena.yml
|
|
|
|
*/
|
|
|
|
export async function checkBuildSecretsRequirements(
|
|
|
|
docker: Dockerode,
|
|
|
|
sourceDir: string,
|
|
|
|
) {
|
|
|
|
const [metaObj, metaFilename] = await loadBuildMetatada(sourceDir);
|
2019-08-21 19:38:17 +00:00
|
|
|
if (metaObj && !_.isEmpty(metaObj['build-secrets'])) {
|
2019-07-09 14:43:36 +00:00
|
|
|
const dockerUtils = await import('./docker');
|
|
|
|
const isBalenaEngine = await dockerUtils.isBalenaEngine(docker);
|
|
|
|
if (!isBalenaEngine) {
|
2020-02-15 20:20:56 +00:00
|
|
|
throw new ExpectedError(stripIndent`
|
2019-07-09 14:43:36 +00:00
|
|
|
The "build secrets" feature currently requires balenaEngine, but a standard Docker
|
|
|
|
daemon was detected. Please use command-line options to specify the hostname and
|
|
|
|
port number (or socket path) of a balenaEngine daemon, running on a balena device
|
|
|
|
or a virtual machine with balenaOS. If the build secrets feature is not required,
|
|
|
|
comment out or delete the 'build-secrets' entry in the file:
|
|
|
|
"${metaFilename}"
|
|
|
|
`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-18 10:13:09 +00:00
|
|
|
export async function getRegistrySecrets(
|
|
|
|
sdk: BalenaSDK,
|
|
|
|
inputFilename?: string,
|
|
|
|
): Promise<RegistrySecrets> {
|
|
|
|
if (inputFilename != null) {
|
|
|
|
return await parseRegistrySecrets(inputFilename);
|
|
|
|
}
|
|
|
|
|
|
|
|
const directory = await sdk.settings.get('dataDirectory');
|
|
|
|
const potentialPaths = [
|
2020-02-15 20:20:56 +00:00
|
|
|
path.join(directory, 'secrets.yml'),
|
|
|
|
path.join(directory, 'secrets.yaml'),
|
|
|
|
path.join(directory, 'secrets.json'),
|
2019-06-18 10:13:09 +00:00
|
|
|
];
|
|
|
|
|
2020-02-15 20:20:56 +00:00
|
|
|
for (const potentialPath of potentialPaths) {
|
|
|
|
if (await fs.exists(potentialPath)) {
|
|
|
|
return await parseRegistrySecrets(potentialPath);
|
2019-06-18 10:13:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
async function parseRegistrySecrets(
|
2019-02-26 13:32:27 +00:00
|
|
|
secretsFilename: string,
|
|
|
|
): Promise<RegistrySecrets> {
|
|
|
|
try {
|
|
|
|
let isYaml = false;
|
|
|
|
if (/.+\.ya?ml$/i.test(secretsFilename)) {
|
|
|
|
isYaml = true;
|
|
|
|
} else if (!/.+\.json$/i.test(secretsFilename)) {
|
|
|
|
throw new Error('Filename must end with .json, .yml or .yaml');
|
|
|
|
}
|
|
|
|
const raw = (await fs.readFile(secretsFilename)).toString();
|
|
|
|
const registrySecrets = new MultiBuild.RegistrySecretValidator().validateRegistrySecrets(
|
|
|
|
isYaml ? require('js-yaml').safeLoad(raw) : JSON.parse(raw),
|
|
|
|
);
|
|
|
|
MultiBuild.addCanonicalDockerHubEntry(registrySecrets);
|
|
|
|
return registrySecrets;
|
|
|
|
} catch (error) {
|
2020-02-15 20:20:56 +00:00
|
|
|
throw new ExpectedError(
|
2020-01-20 21:21:05 +00:00
|
|
|
`Error validating registry secrets file "${secretsFilename}":\n${error.message}`,
|
2019-03-04 15:08:58 +00:00
|
|
|
);
|
2019-02-26 13:32:27 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-07 18:15:05 +00:00
|
|
|
/**
|
2019-02-07 15:10:16 +00:00
|
|
|
* Create a BuildTask array of "resolved build tasks" by calling multibuild
|
|
|
|
* .splitBuildStream() and performResolution(), and add build stream error
|
|
|
|
* handlers and debug logging.
|
|
|
|
* Both `balena build` and `balena deploy` call this function.
|
2018-11-07 18:15:05 +00:00
|
|
|
*/
|
2019-02-07 15:10:16 +00:00
|
|
|
export async function makeBuildTasks(
|
|
|
|
composition: Composition,
|
|
|
|
tarStream: Readable,
|
|
|
|
deviceInfo: DeviceInfo,
|
|
|
|
logger: Logger,
|
2020-02-18 03:21:12 +00:00
|
|
|
projectName: string,
|
|
|
|
releaseHash: string = 'unavailable',
|
2019-02-07 15:10:16 +00:00
|
|
|
): Promise<MultiBuild.BuildTask[]> {
|
|
|
|
const buildTasks = await MultiBuild.splitBuildStream(composition, tarStream);
|
|
|
|
|
|
|
|
logger.logDebug('Found build tasks:');
|
|
|
|
_.each(buildTasks, task => {
|
|
|
|
let infoStr: string;
|
|
|
|
if (task.external) {
|
|
|
|
infoStr = `image pull [${task.imageName}]`;
|
|
|
|
} else {
|
|
|
|
infoStr = `build [${task.context}]`;
|
|
|
|
}
|
|
|
|
logger.logDebug(` ${task.serviceName}: ${infoStr}`);
|
|
|
|
});
|
|
|
|
|
|
|
|
logger.logDebug(
|
|
|
|
`Resolving services with [${deviceInfo.deviceType}|${deviceInfo.arch}]`,
|
|
|
|
);
|
|
|
|
|
2020-02-18 03:21:12 +00:00
|
|
|
await performResolution(buildTasks, deviceInfo, projectName, releaseHash);
|
2019-02-07 15:10:16 +00:00
|
|
|
|
|
|
|
logger.logDebug('Found project types:');
|
|
|
|
_.each(buildTasks, task => {
|
|
|
|
if (task.external) {
|
|
|
|
logger.logDebug(` ${task.serviceName}: External image`);
|
|
|
|
} else {
|
|
|
|
logger.logDebug(` ${task.serviceName}: ${task.projectType}`);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
return buildTasks;
|
|
|
|
}
|
|
|
|
|
|
|
|
async function performResolution(
|
|
|
|
tasks: MultiBuild.BuildTask[],
|
|
|
|
deviceInfo: DeviceInfo,
|
2020-02-18 03:21:12 +00:00
|
|
|
appName: string,
|
|
|
|
releaseHash: string,
|
2019-02-07 15:10:16 +00:00
|
|
|
): Promise<MultiBuild.BuildTask[]> {
|
|
|
|
const { cloneTarStream } = require('tar-utils');
|
|
|
|
|
|
|
|
return await new Promise<MultiBuild.BuildTask[]>((resolve, reject) => {
|
|
|
|
const buildTasks = MultiBuild.performResolution(
|
|
|
|
tasks,
|
|
|
|
deviceInfo.arch,
|
|
|
|
deviceInfo.deviceType,
|
|
|
|
{ error: [reject] },
|
2020-02-18 03:21:12 +00:00
|
|
|
{
|
|
|
|
BALENA_RELEASE_HASH: releaseHash,
|
|
|
|
BALENA_APP_NAME: appName,
|
|
|
|
},
|
2019-02-07 15:10:16 +00:00
|
|
|
);
|
|
|
|
// Do one task at a time (Bluebird.each instead of Bluebird.all)
|
|
|
|
// in order to reduce peak memory usage. Resolves to buildTasks.
|
|
|
|
Bluebird.each(buildTasks, buildTask => {
|
|
|
|
// buildStream is falsy for "external" tasks (image pull)
|
|
|
|
if (!buildTask.buildStream) {
|
|
|
|
return buildTask;
|
|
|
|
}
|
|
|
|
// Consume each task.buildStream in order to trigger the
|
|
|
|
// resolution events that define fields like:
|
|
|
|
// task.dockerfile, task.dockerfilePath,
|
|
|
|
// task.projectType, task.resolved
|
|
|
|
// This mimics what is currently done in `resin-builder`.
|
|
|
|
return cloneTarStream(buildTask.buildStream).then(
|
|
|
|
(clonedStream: tar.Pack) => {
|
|
|
|
buildTask.buildStream = clonedStream;
|
|
|
|
if (!buildTask.external && !buildTask.resolved) {
|
|
|
|
throw new Error(
|
2020-01-20 21:21:05 +00:00
|
|
|
`Project type for service "${buildTask.serviceName}" could not be determined. Missing a Dockerfile?`,
|
2019-02-07 15:10:16 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
return buildTask;
|
|
|
|
},
|
2018-11-07 18:15:05 +00:00
|
|
|
);
|
2019-02-07 15:10:16 +00:00
|
|
|
}).then(resolve, reject);
|
|
|
|
});
|
2018-11-07 18:15:05 +00:00
|
|
|
}
|
2019-04-11 11:49:19 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Enforce that, for example, if 'myProject/MyDockerfile.template' is specified
|
|
|
|
* as an alternativate Dockerfile name, then 'myProject/MyDockerfile' must not
|
|
|
|
* exist.
|
2020-02-15 20:20:56 +00:00
|
|
|
* Return the tar stream path (Posix, normalized) for the given dockerfilePath.
|
|
|
|
* For example, on Windows, given a dockerfilePath of 'foo\..\bar\Dockerfile',
|
|
|
|
* return 'bar/Dockerfile'. On Linux, given './bar/Dockerfile', return 'bar/Dockerfile'.
|
|
|
|
*
|
2019-04-11 11:49:19 +00:00
|
|
|
* @param projectPath The project source folder (-s command-line option)
|
|
|
|
* @param dockerfilePath The alternative Dockerfile specified by the user
|
2020-02-15 20:20:56 +00:00
|
|
|
* @return A normalized posix representation of dockerfilePath
|
2019-04-11 11:49:19 +00:00
|
|
|
*/
|
2020-02-15 20:20:56 +00:00
|
|
|
async function validateSpecifiedDockerfile(
|
2019-04-11 11:49:19 +00:00
|
|
|
projectPath: string,
|
2020-02-15 20:20:56 +00:00
|
|
|
dockerfilePath: string,
|
|
|
|
): Promise<string> {
|
2019-04-11 11:49:19 +00:00
|
|
|
const { contains, toNativePath, toPosixPath } = MultiBuild.PathUtils;
|
|
|
|
|
2020-02-15 20:20:56 +00:00
|
|
|
const nativeProjectPath = path.normalize(projectPath);
|
|
|
|
const nativeDockerfilePath = path.normalize(toNativePath(dockerfilePath));
|
|
|
|
|
2019-04-11 11:49:19 +00:00
|
|
|
// reminder: native windows paths may start with a drive specificaton,
|
|
|
|
// e.g. 'C:\absolute' or 'C:relative'.
|
2020-02-15 20:20:56 +00:00
|
|
|
if (path.isAbsolute(nativeDockerfilePath)) {
|
|
|
|
throw new ExpectedError(stripIndent`
|
|
|
|
Error: the specified Dockerfile cannot be an absolute path. The path must be
|
|
|
|
relative to, and not a parent folder of, the project's source folder.
|
|
|
|
Specified dockerfile: "${nativeDockerfilePath}"
|
|
|
|
Project's source folder: "${nativeProjectPath}"
|
2019-04-11 11:49:19 +00:00
|
|
|
`);
|
|
|
|
}
|
2020-02-15 20:20:56 +00:00
|
|
|
|
|
|
|
// note that path.normalize('a/../../b') results in '../b'
|
|
|
|
if (nativeDockerfilePath.startsWith('..')) {
|
|
|
|
throw new ExpectedError(stripIndent`
|
|
|
|
Error: the specified Dockerfile cannot be in a parent folder of the project's
|
|
|
|
source folder. Note that the path should be relative to the project's source
|
|
|
|
folder, not the current folder.
|
2019-04-11 11:49:19 +00:00
|
|
|
Specified dockerfile: "${nativeDockerfilePath}"
|
2020-02-15 20:20:56 +00:00
|
|
|
Project's source folder: "${nativeProjectPath}"
|
|
|
|
`);
|
|
|
|
}
|
|
|
|
|
|
|
|
const fullDockerfilePath = path.join(nativeProjectPath, nativeDockerfilePath);
|
|
|
|
|
|
|
|
if (!(await fs.exists(fullDockerfilePath))) {
|
|
|
|
throw new ExpectedError(stripIndent`
|
|
|
|
Error: specified Dockerfile not found:
|
|
|
|
Specified dockerfile: "${fullDockerfilePath}"
|
|
|
|
Project's source folder: "${nativeProjectPath}"
|
|
|
|
Note that the specified Dockerfile path should be relative to the source folder.
|
2019-04-11 11:49:19 +00:00
|
|
|
`);
|
|
|
|
}
|
|
|
|
|
2020-02-15 20:20:56 +00:00
|
|
|
if (!contains(nativeProjectPath, fullDockerfilePath)) {
|
|
|
|
throw new ExpectedError(stripIndent`
|
|
|
|
Error: the specified Dockerfile must be in a subfolder of the source folder:
|
|
|
|
Specified dockerfile: "${fullDockerfilePath}"
|
|
|
|
Project's source folder: "${nativeProjectPath}"
|
2019-04-11 11:49:19 +00:00
|
|
|
`);
|
|
|
|
}
|
|
|
|
|
2020-02-15 20:20:56 +00:00
|
|
|
return toPosixPath(nativeDockerfilePath);
|
|
|
|
}
|
|
|
|
|
|
|
|
export interface ProjectValidationResult {
|
|
|
|
dockerfilePath: string;
|
|
|
|
registrySecrets: RegistrySecrets;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Perform "sanity checks" on the project directory, e.g. for the existence
|
|
|
|
* of a 'Dockerfile[.*]' or 'docker-compose.yml' file or 'package.json' file.
|
|
|
|
* Also validate registry secrets if any, and perform checks around an
|
|
|
|
* alternative specified dockerfile (--dockerfile) if any.
|
|
|
|
*
|
|
|
|
* Return the parsed registry secrets if any, and the "tar stream path" for
|
|
|
|
* an alternative specified Dockerfile if any (see validateSpecifiedDockerfile()).
|
|
|
|
*/
|
|
|
|
export async function validateProjectDirectory(
|
|
|
|
sdk: BalenaSDK,
|
|
|
|
opts: {
|
|
|
|
dockerfilePath?: string;
|
|
|
|
noParentCheck: boolean;
|
|
|
|
projectPath: string;
|
|
|
|
registrySecretsPath?: string;
|
|
|
|
},
|
|
|
|
): Promise<ProjectValidationResult> {
|
|
|
|
if (
|
|
|
|
!(await fs.exists(opts.projectPath)) ||
|
|
|
|
!(await fs.stat(opts.projectPath)).isDirectory()
|
|
|
|
) {
|
|
|
|
throw new ExpectedError(
|
|
|
|
`Could not access source folder: "${opts.projectPath}"`,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
const result: ProjectValidationResult = {
|
|
|
|
dockerfilePath: opts.dockerfilePath || '',
|
|
|
|
registrySecrets: {},
|
|
|
|
};
|
|
|
|
|
|
|
|
if (opts.dockerfilePath) {
|
|
|
|
result.dockerfilePath = await validateSpecifiedDockerfile(
|
|
|
|
opts.projectPath,
|
|
|
|
opts.dockerfilePath,
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
const files = await fs.readdir(opts.projectPath);
|
|
|
|
const projectMatch = (file: string) =>
|
|
|
|
/^(Dockerfile|Dockerfile\.\S+|docker-compose.ya?ml|package.json)$/.test(
|
|
|
|
file,
|
|
|
|
);
|
|
|
|
if (!_.some(files, projectMatch)) {
|
|
|
|
throw new ExpectedError(stripIndent`
|
|
|
|
Error: no "Dockerfile[.*]", "docker-compose.yml" or "package.json" file
|
|
|
|
found in source folder "${opts.projectPath}"
|
|
|
|
`);
|
|
|
|
}
|
|
|
|
if (!opts.noParentCheck) {
|
|
|
|
const checkCompose = async (folder: string) => {
|
|
|
|
return _.some(
|
|
|
|
await Promise.all(
|
|
|
|
compositionFileNames.map(filename =>
|
|
|
|
fs.exists(path.join(folder, filename)),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
};
|
|
|
|
const [hasCompose, hasParentCompose] = await Promise.all([
|
|
|
|
checkCompose(opts.projectPath),
|
|
|
|
checkCompose(path.join(opts.projectPath, '..')),
|
|
|
|
]);
|
|
|
|
if (!hasCompose && hasParentCompose) {
|
|
|
|
Logger.getLogger().logWarn(stripIndent`
|
|
|
|
"docker-compose.y[a]ml" file found in parent directory: please check
|
|
|
|
that the correct folder was specified. (Suppress with '--noparent-check'.)
|
|
|
|
`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
result.registrySecrets = await getRegistrySecrets(
|
|
|
|
sdk,
|
|
|
|
opts.registrySecretsPath,
|
|
|
|
);
|
|
|
|
|
|
|
|
return result;
|
2019-04-11 11:49:19 +00:00
|
|
|
}
|