Merge pull request #1571 from balena-io/1421-validate-project-dir

Add project directory validation for balena push / build / deploy
This commit is contained in:
Paulo Castro 2020-02-17 15:50:03 +00:00 committed by GitHub
commit 59b9429570
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 1043 additions and 534 deletions

3
.gitattributes vendored
View File

@ -7,5 +7,6 @@
# lf for the docs as it's auto-generated and will otherwise trigger an uncommited error on windows
doc/cli.markdown text eol=lf
# crlf for the for the windows-crlf test file
# crlf for the eol conversion test files
tests/test-data/projects/docker-compose/basic/service2/file2-crlf.sh eol=crlf
tests/test-data/projects/no-docker-compose/basic/src/windows-crlf.sh eol=crlf

View File

@ -1761,6 +1761,10 @@ Alternative Dockerfile name/path, relative to the source folder
Don't use cache when building this project
#### --noparent-check
Disable project validation check of 'docker-compose.yml' file in parent folder
#### --registry-secrets, -R <secrets.yml|.json>
Path to a local YAML or JSON file containing Docker registry passwords used to pull base images.
@ -1928,6 +1932,10 @@ Alternative Dockerfile name/path, relative to the source folder
Display full log output
#### --noparent-check
Disable project validation check of 'docker-compose.yml' file in parent folder
#### --registry-secrets, -R <secrets.yml|.json>
Path to a YAML or JSON file with passwords for a private Docker registry
@ -2061,6 +2069,10 @@ Alternative Dockerfile name/path, relative to the source folder
Display full log output
#### --noparent-check
Disable project validation check of 'docker-compose.yml' file in parent folder
#### --registry-secrets, -R <secrets.yml|.json>
Path to a YAML or JSON file with passwords for a private Docker registry

View File

@ -1,3 +1,20 @@
###*
# @license
# Copyright 2016-2020 Balena Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
# Imported here because it's needed for the setup
# of this action
Promise = require('bluebird')
@ -15,13 +32,8 @@ Opts must be an object with the following keys:
buildOpts: arguments to forward to docker build command
###
buildProject = (docker, logger, composeOpts, opts) ->
compose.loadProject(
logger
composeOpts.projectPath
composeOpts.projectName
undefined # image: name of pre-built image
composeOpts.dockerfilePath # ok if undefined
)
{ loadProject } = require('../utils/compose_ts')
Promise.resolve(loadProject(logger, composeOpts))
.then (project) ->
appType = opts.app?.application_type?[0]
if appType? and project.descriptors.length > 1 and not appType.supports_multicontainer
@ -106,8 +118,8 @@ module.exports =
require('events').defaultMaxListeners = 1000
sdk = (require('balena-sdk')).fromSharedOptions()
{ validateComposeOptions } = require('../utils/compose_ts')
{ exitWithExpectedError } = require('../utils/patterns')
{ ExpectedError } = require('../errors')
{ validateProjectDirectory } = require('../utils/compose_ts')
helpers = require('../utils/helpers')
Logger = require('../utils/logger')
@ -122,12 +134,21 @@ module.exports =
options.convertEol = options['convert-eol'] || false
delete options['convert-eol']
Promise.resolve(validateComposeOptions(sdk, options))
.then ->
{ application, arch, deviceType } = options
{ application, arch, deviceType } = options
Promise.try ->
if (not (arch? and deviceType?) and not application?) or (application? and (arch? or deviceType?))
exitWithExpectedError('You must specify either an application or an arch/deviceType pair to build for')
throw new ExpectedError('You must specify either an application or an arch/deviceType pair to build for')
.then ->
validateProjectDirectory(sdk, {
dockerfilePath: options.dockerfile,
noParentCheck: options['noparent-check'] || false,
projectPath: options.source || '.',
registrySecretsPath: options['registry-secrets'],
})
.then ({ dockerfilePath, registrySecrets }) ->
options.dockerfile = dockerfilePath
options['registry-secrets'] = registrySecrets
if arch? and deviceType?
[ undefined, arch, deviceType ]

View File

@ -1,3 +1,20 @@
###*
# @license
# Copyright 2016-2020 Balena Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
# Imported here because it's needed for the setup
# of this action
Promise = require('bluebird')
@ -21,14 +38,9 @@ deployProject = (docker, logger, composeOpts, opts) ->
_ = require('lodash')
doodles = require('resin-doodles')
sdk = require('balena-sdk').fromSharedOptions()
{ loadProject } = require('../utils/compose_ts')
compose.loadProject(
logger
composeOpts.projectPath
composeOpts.projectName
opts.image
composeOpts.dockerfilePath # ok if undefined
)
Promise.resolve(loadProject(logger, composeOpts, opts.image))
.then (project) ->
if project.descriptors.length > 1 and !opts.app.application_type?[0]?.supports_multicontainer
throw new Error('Target application does not support multiple containers. Aborting!')
@ -184,7 +196,8 @@ module.exports =
# compositions with many services trigger misleading warnings
require('events').defaultMaxListeners = 1000
sdk = (require('balena-sdk')).fromSharedOptions()
{ validateComposeOptions } = require('../utils/compose_ts')
{ ExpectedError } = require('../errors')
{ validateProjectDirectory } = require('../utils/compose_ts')
helpers = require('../utils/helpers')
Logger = require('../utils/logger')
@ -204,13 +217,22 @@ module.exports =
if options.convertEol and not options.build
return done(new ExpectedError('The --eol-conversion flag is only valid with --build.'))
Promise.resolve(validateComposeOptions(sdk, options))
.then ->
Promise.try ->
if not appName?
throw new Error('Please specify the name of the application to deploy')
throw new ExpectedError('Please specify the name of the application to deploy')
if image? and options.build
throw new Error('Build option is not applicable when specifying an image')
throw new ExpectedError('Build option is not applicable when specifying an image')
.then ->
validateProjectDirectory(sdk, {
dockerfilePath: options.dockerfile,
noParentCheck: options['noparent-check'] || false,
projectPath: options.source || '.',
registrySecretsPath: options['registry-secrets'],
})
.then ({ dockerfilePath, registrySecrets }) ->
options.dockerfile = dockerfilePath
options['registry-secrets'] = registrySecrets
Promise.join(
helpers.getApplication(appName)

View File

@ -18,6 +18,7 @@ import { BalenaSDK } from 'balena-sdk';
import { CommandDefinition } from 'capitano';
import { stripIndent } from 'common-tags';
import { ExpectedError } from '../errors';
import { registrySecretsHelp } from '../utils/messages';
import {
validateApplicationName,
@ -44,9 +45,7 @@ function getBuildTarget(appOrDevice: string): BuildTarget | null {
}
async function getAppOwner(sdk: BalenaSDK, appName: string) {
const { exitWithExpectedError, selectFromList } = await import(
'../utils/patterns'
);
const { selectFromList } = await import('../utils/patterns');
const _ = await import('lodash');
const applications = await sdk.models.application.getAll({
@ -62,7 +61,7 @@ async function getAppOwner(sdk: BalenaSDK, appName: string) {
});
if (applications == null || applications.length === 0) {
exitWithExpectedError(
throw new ExpectedError(
stripIndent`
No applications found with name: ${appName}.
@ -107,6 +106,7 @@ export const push: CommandDefinition<
emulated?: boolean;
dockerfile?: string; // DeviceDeployOptions.dockerfilePath (alternative Dockerfile)
nocache?: boolean;
'noparent-check'?: boolean;
'registry-secrets'?: string;
nolive?: boolean;
detached?: boolean;
@ -188,6 +188,12 @@ export const push: CommandDefinition<
description: "Don't use cache when building this project",
boolean: true,
},
{
signature: 'noparent-check',
description:
"Disable project validation check of 'docker-compose.yml' file in parent folder",
boolean: true,
},
{
signature: 'registry-secrets',
alias: 'R',
@ -253,24 +259,20 @@ export const push: CommandDefinition<
boolean: true,
},
],
async action(params, options, done) {
async action(params, options) {
const sdk = (await import('balena-sdk')).fromSharedOptions();
const Bluebird = await import('bluebird');
const isArray = await import('lodash/isArray');
const remote = await import('../utils/remote-build');
const deviceDeploy = await import('../utils/device/deploy');
const { exitIfNotLoggedIn, exitWithExpectedError } = await import(
'../utils/patterns'
);
const { validateSpecifiedDockerfile, getRegistrySecrets } = await import(
'../utils/compose_ts'
);
const { checkLoggedIn } = await import('../utils/patterns');
const { validateProjectDirectory } = await import('../utils/compose_ts');
const { BuildError } = await import('../utils/device/errors');
const appOrDevice: string | null =
params.applicationOrDevice_raw || params.applicationOrDevice;
if (appOrDevice == null) {
exitWithExpectedError('You must specify an application or a device');
throw new ExpectedError('You must specify an application or a device');
}
const source = options.source || '.';
@ -278,14 +280,14 @@ export const push: CommandDefinition<
console.error(`[debug] Using ${source} as build source`);
}
const dockerfilePath = validateSpecifiedDockerfile(
source,
options.dockerfile,
);
const registrySecrets = await getRegistrySecrets(
const { dockerfilePath, registrySecrets } = await validateProjectDirectory(
sdk,
options['registry-secrets'],
{
dockerfilePath: options.dockerfile,
noParentCheck: options['noparent-check'] || false,
projectPath: source,
registrySecretsPath: options['registry-secrets'],
},
);
const buildTarget = getBuildTarget(appOrDevice);
@ -293,28 +295,28 @@ export const push: CommandDefinition<
case BuildTarget.Cloud:
// Ensure that the live argument has not been passed to a cloud build
if (options.nolive != null) {
exitWithExpectedError(
throw new ExpectedError(
'The --nolive flag is only valid when pushing to a local mode device',
);
}
if (options.service) {
exitWithExpectedError(
throw new ExpectedError(
'The --service flag is only valid when pushing to a local mode device.',
);
}
if (options.system) {
exitWithExpectedError(
throw new ExpectedError(
'The --system flag is only valid when pushing to a local mode device.',
);
}
if (options.env) {
exitWithExpectedError(
throw new ExpectedError(
'The --env flag is only valid when pushing to a local mode device.',
);
}
const app = appOrDevice;
await exitIfNotLoggedIn();
await checkLoggedIn();
await Bluebird.join(
sdk.auth.getToken(),
sdk.settings.get('balenaUrl'),
@ -339,7 +341,7 @@ export const push: CommandDefinition<
};
return await remote.startRemoteBuild(args);
},
).nodeify(done);
);
break;
case BuildTarget.Device:
const device = appOrDevice;
@ -357,6 +359,7 @@ export const push: CommandDefinition<
dockerfilePath,
registrySecrets,
nocache: options.nocache || false,
noParentCheck: options['noparent-check'] || false,
nolive: options.nolive || false,
detached: options.detached || false,
services: servicesToDisplay,
@ -367,23 +370,20 @@ export const push: CommandDefinition<
: options.env || [],
convertEol: options['convert-eol'] || false,
}),
)
.catch(BuildError, e => {
exitWithExpectedError(e.toString());
})
.nodeify(done);
).catch(BuildError, e => {
throw new ExpectedError(e.toString());
});
break;
default:
exitWithExpectedError(
throw new ExpectedError(
stripIndent`
Build target not recognised. Please provide either an application name or device address.
Build target not recognized. Please provide either an application name or device address.
The only supported device addresses currently are IP addresses.
If you believe your build target should have been detected, and this is an error, please
create an issue.`,
);
break;
}
},
};

View File

@ -46,6 +46,11 @@ exports.appendOptions = (opts) ->
description: 'Display full log output'
boolean: true
},
{
signature: 'noparent-check'
description: 'Disable project validation check of \'docker-compose.yml\' file in parent folder'
boolean: true
},
{
signature: 'registry-secrets'
alias: 'R'
@ -69,23 +74,12 @@ exports.generateOpts = (options) ->
projectPath: projectPath
inlineLogs: !!options.logs
dockerfilePath: options.dockerfile
compositionFileNames = [
'docker-compose.yml'
'docker-compose.yaml'
]
# look into the given directory for valid compose files and return
# the contents of the first one found.
exports.resolveProject = resolveProject = (rootDir) ->
fs = require('mz/fs')
Promise.any compositionFileNames.map (filename) ->
fs.readFile(path.join(rootDir, filename), 'utf-8')
noParentCheck: options['noparent-check']
# Parse the given composition and return a structure with info. Input is:
# - composePath: the *absolute* path to the directory containing the compose file
# - composeStr: the contents of the compose file, as a string
createProject = (composePath, composeStr, projectName = null) ->
exports.createProject = (composePath, composeStr, projectName = null) ->
yml = require('js-yaml')
compose = require('resin-compose-parse')
@ -107,39 +101,6 @@ createProject = (composePath, composeStr, projectName = null) ->
descriptors
}
# high-level function resolving a project and creating a composition out
# of it in one go. if image is given, it'll create a default project for
# that without looking for a project. falls back to creating a default
# project if none is found at the given projectPath.
exports.loadProject = (logger, projectPath, projectName, image, dockerfilePath) ->
{ validateSpecifiedDockerfile } = require('./compose_ts')
compose = require('resin-compose-parse')
logger.logDebug('Loading project...')
Promise.try ->
dockerfilePath = validateSpecifiedDockerfile(projectPath, dockerfilePath)
if image?
logger.logInfo("Creating default composition with image: #{image}")
return compose.defaultComposition(image)
logger.logDebug('Resolving project...')
resolveProject(projectPath)
.tap ->
if dockerfilePath
logger.logWarn("Ignoring alternative dockerfile \"#{dockerfilePath}\"\ because a docker-compose file exists")
else
logger.logInfo('Compose file detected')
.catch (e) ->
logger.logDebug("Failed to resolve project: #{e}")
logger.logInfo("Creating default composition with source: #{projectPath}")
return compose.defaultComposition(undefined, dockerfilePath)
.then (composeStr) ->
logger.logDebug('Creating project...')
createProject(projectPath, composeStr, projectName)
exports.tarDirectory = tarDirectory = (dir, { preFinalizeCallback, convertEol } = {}) ->
preFinalizeCallback ?= null
convertEol ?= false

View File

@ -32,7 +32,13 @@ interface Descriptor {
serviceName: string;
}
export function resolveProject(projectRoot: string): Bluebird<string>;
export interface ComposeOpts {
dockerfilePath?: string;
inlineLogs?: boolean;
noParentCheck: boolean;
projectName: string;
projectPath: string;
}
export interface ComposeProject {
path: string;
@ -41,13 +47,11 @@ export interface ComposeProject {
descriptors: Descriptor[];
}
export function loadProject(
logger: Logger,
projectPath: string,
projectName: string,
image?: string,
dockerfilePath?: string,
): Bluebird<ComposeProject>;
export function createProject(
composePath: string,
composeStr: string,
projectName: string | null = null,
): ComposeProject;
interface TarDirectoryOptions {
preFinalizeCallback?: (pack: Pack) => void;

View File

@ -1,6 +1,6 @@
/**
* @license
* Copyright 2018 Balena Ltd.
* Copyright 2018-2020 Balena Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -14,19 +14,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { BalenaSDK } from 'balena-sdk';
import * as Bluebird from 'bluebird';
import { stripIndent } from 'common-tags';
import Dockerode = require('dockerode');
import * as _ from 'lodash';
import { fs } from 'mz';
import * as path from 'path';
import { Composition } from 'resin-compose-parse';
import * as MultiBuild from 'resin-multibuild';
import { Readable } from 'stream';
import * as tar from 'tar-stream';
import { BalenaSDK } from 'balena-sdk';
import { ExpectedError } from '../errors';
import { DeviceInfo } from './device/api';
import Logger = require('./logger');
import { exitWithExpectedError } from './patterns';
export interface RegistrySecrets {
[registryAddress: string]: {
@ -35,17 +37,88 @@ export interface RegistrySecrets {
};
}
const compositionFileNames = ['docker-compose.yml', 'docker-compose.yaml'];
/**
* high-level function resolving a project and creating a composition out
* of it in one go. if image is given, it'll create a default project for
* that without looking for a project. falls back to creating a default
* project if none is found at the given projectPath.
*/
export async function loadProject(
logger: Logger,
opts: import('./compose').ComposeOpts,
image?: string,
): Promise<import('./compose').ComposeProject> {
const compose = await import('resin-compose-parse');
const { createProject } = await import('./compose');
let composeName: string;
let composeStr: string;
logger.logDebug('Loading project...');
if (image) {
logger.logInfo(`Creating default composition with image: "${image}"`);
composeStr = compose.defaultComposition(image);
} else {
logger.logDebug('Resolving project...');
[composeName, composeStr] = await resolveProject(logger, opts.projectPath);
if (composeName) {
if (opts.dockerfilePath) {
logger.logWarn(
`Ignoring alternative dockerfile "${opts.dockerfilePath}" because composition file "${composeName}" exists`,
);
}
} else {
logger.logInfo(
`Creating default composition with source: "${opts.projectPath}"`,
);
composeStr = compose.defaultComposition(undefined, opts.dockerfilePath);
}
}
logger.logDebug('Creating project...');
return createProject(opts.projectPath, composeStr, opts.projectName);
}
/**
* Look into the given directory for valid compose files and return
* the contents of the first one found.
*/
async function resolveProject(
logger: Logger,
projectRoot: string,
): Promise<[string, string]> {
let composeFileName = '';
let composeFileContents = '';
for (const fname of compositionFileNames) {
const fpath = path.join(projectRoot, fname);
if (await fs.exists(fpath)) {
logger.logDebug(`${fname} file found at "${projectRoot}"`);
composeFileName = fname;
try {
composeFileContents = await fs.readFile(fpath, 'utf-8');
} catch (err) {
logger.logError(`Error reading composition file "${fpath}":\n${err}`);
throw err;
}
break;
}
}
if (!composeFileName) {
logger.logInfo(`No "docker-compose.yml" file found at "${projectRoot}"`);
}
return [composeFileName, composeFileContents];
}
/**
* Load the ".balena/balena.yml" file (or resin.yml, or yaml or json),
* which contains "build metadata" for features like "build secrets" and
* "build variables".
* @returns Pair of metadata object and metadata file path
*/
export async function loadBuildMetatada(
async function loadBuildMetatada(
sourceDir: string,
): Promise<[MultiBuild.ParsedBalenaYml, string]> {
const { fs } = await import('mz');
const path = await import('path');
let metadataPath = '';
let rawString = '';
@ -76,7 +149,7 @@ export async function loadBuildMetatada(
buildMetadata = require('js-yaml').safeLoad(rawString);
}
} catch (err) {
return exitWithExpectedError(
throw new ExpectedError(
`Error parsing file "${metadataPath}":\n ${err.message}`,
);
}
@ -86,7 +159,7 @@ export async function loadBuildMetatada(
/**
* Check whether the "build secrets" feature is being used and, if so,
* verify that the target docker daemon is balenaEngine. If the
* requirement is not satisfied, call exitWithExpectedError().
* requirement is not satisfied, reject with an ExpectedError.
* @param docker Dockerode instance
* @param sourceDir Project directory where to find .balena/balena.yml
*/
@ -99,7 +172,7 @@ export async function checkBuildSecretsRequirements(
const dockerUtils = await import('./docker');
const isBalenaEngine = await dockerUtils.isBalenaEngine(docker);
if (!isBalenaEngine) {
exitWithExpectedError(stripIndent`
throw new ExpectedError(stripIndent`
The "build secrets" feature currently requires balenaEngine, but a standard Docker
daemon was detected. Please use command-line options to specify the hostname and
port number (or socket path) of a balenaEngine daemon, running on a balena device
@ -115,23 +188,20 @@ export async function getRegistrySecrets(
sdk: BalenaSDK,
inputFilename?: string,
): Promise<RegistrySecrets> {
const { fs } = await import('mz');
const Path = await import('path');
if (inputFilename != null) {
return await parseRegistrySecrets(inputFilename);
}
const directory = await sdk.settings.get('dataDirectory');
const potentialPaths = [
Path.join(directory, 'secrets.yml'),
Path.join(directory, 'secrets.yaml'),
Path.join(directory, 'secrets.json'),
path.join(directory, 'secrets.yml'),
path.join(directory, 'secrets.yaml'),
path.join(directory, 'secrets.json'),
];
for (const path of potentialPaths) {
if (await fs.exists(path)) {
return await parseRegistrySecrets(path);
for (const potentialPath of potentialPaths) {
if (await fs.exists(potentialPath)) {
return await parseRegistrySecrets(potentialPath);
}
}
@ -141,7 +211,6 @@ export async function getRegistrySecrets(
async function parseRegistrySecrets(
secretsFilename: string,
): Promise<RegistrySecrets> {
const { fs } = await import('mz');
try {
let isYaml = false;
if (/.+\.ya?ml$/i.test(secretsFilename)) {
@ -156,27 +225,12 @@ async function parseRegistrySecrets(
MultiBuild.addCanonicalDockerHubEntry(registrySecrets);
return registrySecrets;
} catch (error) {
return exitWithExpectedError(
throw new ExpectedError(
`Error validating registry secrets file "${secretsFilename}":\n${error.message}`,
);
}
}
/**
* Validate the compose-specific command-line options defined in compose.coffee.
* This function is meant to be called very early on to validate users' input,
* before any project loading / building / deploying.
*/
export async function validateComposeOptions(
sdk: BalenaSDK,
options: { [opt: string]: any },
) {
options['registry-secrets'] = await getRegistrySecrets(
sdk,
options['registry-secrets'],
);
}
/**
* Create a BuildTask array of "resolved build tasks" by calling multibuild
* .splitBuildStream() and performResolution(), and add build stream error
@ -264,63 +318,161 @@ async function performResolution(
* Enforce that, for example, if 'myProject/MyDockerfile.template' is specified
* as an alternativate Dockerfile name, then 'myProject/MyDockerfile' must not
* exist.
* Return the tar stream path (Posix, normalized) for the given dockerfilePath.
* For example, on Windows, given a dockerfilePath of 'foo\..\bar\Dockerfile',
* return 'bar/Dockerfile'. On Linux, given './bar/Dockerfile', return 'bar/Dockerfile'.
*
* @param projectPath The project source folder (-s command-line option)
* @param dockerfilePath The alternative Dockerfile specified by the user
* @return A normalized posix representation of dockerfilePath
*/
export function validateSpecifiedDockerfile(
async function validateSpecifiedDockerfile(
projectPath: string,
dockerfilePath: string = '',
): string {
if (!dockerfilePath) {
return dockerfilePath;
}
const { isAbsolute, join, normalize, parse, posix } = require('path');
const { existsSync } = require('fs');
dockerfilePath: string,
): Promise<string> {
const { contains, toNativePath, toPosixPath } = MultiBuild.PathUtils;
const nativeProjectPath = path.normalize(projectPath);
const nativeDockerfilePath = path.normalize(toNativePath(dockerfilePath));
// reminder: native windows paths may start with a drive specificaton,
// e.g. 'C:\absolute' or 'C:relative'.
if (isAbsolute(dockerfilePath) || posix.isAbsolute(dockerfilePath)) {
exitWithExpectedError(stripIndent`
Error: absolute Dockerfile path detected:
"${dockerfilePath}"
The Dockerfile path should be relative to the source folder.
`);
}
const nativeProjectPath = normalize(projectPath);
const nativeDockerfilePath = join(projectPath, toNativePath(dockerfilePath));
if (!contains(nativeProjectPath, nativeDockerfilePath)) {
// Note that testing the existence of nativeDockerfilePath in the
// filesystem (after joining its path to the source folder) is not
// sufficient, because the user could have added '../' to the path.
exitWithExpectedError(stripIndent`
Error: the specified Dockerfile must be in a subfolder of the source folder:
if (path.isAbsolute(nativeDockerfilePath)) {
throw new ExpectedError(stripIndent`
Error: the specified Dockerfile cannot be an absolute path. The path must be
relative to, and not a parent folder of, the project's source folder.
Specified dockerfile: "${nativeDockerfilePath}"
Source folder: "${nativeProjectPath}"
Project's source folder: "${nativeProjectPath}"
`);
}
if (!existsSync(nativeDockerfilePath)) {
exitWithExpectedError(stripIndent`
Error: Dockerfile not found: "${nativeDockerfilePath}"
// note that path.normalize('a/../../b') results in '../b'
if (nativeDockerfilePath.startsWith('..')) {
throw new ExpectedError(stripIndent`
Error: the specified Dockerfile cannot be in a parent folder of the project's
source folder. Note that the path should be relative to the project's source
folder, not the current folder.
Specified dockerfile: "${nativeDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
`);
}
const { dir, ext, name } = parse(nativeDockerfilePath);
const fullDockerfilePath = path.join(nativeProjectPath, nativeDockerfilePath);
if (!(await fs.exists(fullDockerfilePath))) {
throw new ExpectedError(stripIndent`
Error: specified Dockerfile not found:
Specified dockerfile: "${fullDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
Note that the specified Dockerfile path should be relative to the source folder.
`);
}
if (!contains(nativeProjectPath, fullDockerfilePath)) {
throw new ExpectedError(stripIndent`
Error: the specified Dockerfile must be in a subfolder of the source folder:
Specified dockerfile: "${fullDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
`);
}
const { dir, ext, name } = path.parse(fullDockerfilePath);
if (ext) {
const nativePathMinusExt = join(dir, name);
if (existsSync(nativePathMinusExt)) {
exitWithExpectedError(stripIndent`
Error: "${name}" exists on the same folder as "${dockerfilePath}".
When an alternative Dockerfile name is specified, a file with the same
base name (minus the file extension) must not exist in the same folder.
This is because the base name file will be auto generated and added to
the tar stream that is sent to the docker daemon, resulting in duplicate
Dockerfiles and undefined behavior.
const nativePathMinusExt = path.join(dir, name);
if (await fs.exists(nativePathMinusExt)) {
throw new ExpectedError(stripIndent`
Error: "${name}" exists on the same folder as "${nativeDockerfilePath}".
When an alternative Dockerfile name is specified, a file with the same base name
(minus the file extension) must not exist in the same folder. This is because
the base name file will be auto generated and added to the tar stream that is
sent to balenaEngine or the Docker daemon, resulting in duplicate Dockerfiles
and undefined behavior.
`);
}
}
return posix.normalize(toPosixPath(dockerfilePath));
return toPosixPath(nativeDockerfilePath);
}
export interface ProjectValidationResult {
dockerfilePath: string;
registrySecrets: RegistrySecrets;
}
/**
* Perform "sanity checks" on the project directory, e.g. for the existence
* of a 'Dockerfile[.*]' or 'docker-compose.yml' file or 'package.json' file.
* Also validate registry secrets if any, and perform checks around an
* alternative specified dockerfile (--dockerfile) if any.
*
* Return the parsed registry secrets if any, and the "tar stream path" for
* an alternative specified Dockerfile if any (see validateSpecifiedDockerfile()).
*/
export async function validateProjectDirectory(
sdk: BalenaSDK,
opts: {
dockerfilePath?: string;
noParentCheck: boolean;
projectPath: string;
registrySecretsPath?: string;
},
): Promise<ProjectValidationResult> {
if (
!(await fs.exists(opts.projectPath)) ||
!(await fs.stat(opts.projectPath)).isDirectory()
) {
throw new ExpectedError(
`Could not access source folder: "${opts.projectPath}"`,
);
}
const result: ProjectValidationResult = {
dockerfilePath: opts.dockerfilePath || '',
registrySecrets: {},
};
if (opts.dockerfilePath) {
result.dockerfilePath = await validateSpecifiedDockerfile(
opts.projectPath,
opts.dockerfilePath,
);
} else {
const files = await fs.readdir(opts.projectPath);
const projectMatch = (file: string) =>
/^(Dockerfile|Dockerfile\.\S+|docker-compose.ya?ml|package.json)$/.test(
file,
);
if (!_.some(files, projectMatch)) {
throw new ExpectedError(stripIndent`
Error: no "Dockerfile[.*]", "docker-compose.yml" or "package.json" file
found in source folder "${opts.projectPath}"
`);
}
if (!opts.noParentCheck) {
const checkCompose = async (folder: string) => {
return _.some(
await Promise.all(
compositionFileNames.map(filename =>
fs.exists(path.join(folder, filename)),
),
),
);
};
const [hasCompose, hasParentCompose] = await Promise.all([
checkCompose(opts.projectPath),
checkCompose(path.join(opts.projectPath, '..')),
]);
if (!hasCompose && hasParentCompose) {
Logger.getLogger().logWarn(stripIndent`
"docker-compose.y[a]ml" file found in parent directory: please check
that the correct folder was specified. (Suppress with '--noparent-check'.)
`);
}
}
}
result.registrySecrets = await getRegistrySecrets(
sdk,
opts.registrySecretsPath,
);
return result;
}

View File

@ -29,7 +29,11 @@ import * as semver from 'resin-semver';
import { Readable } from 'stream';
import { BALENA_ENGINE_TMP_PATH } from '../../config';
import { checkBuildSecretsRequirements, makeBuildTasks } from '../compose_ts';
import {
checkBuildSecretsRequirements,
loadProject,
makeBuildTasks,
} from '../compose_ts';
import { workaroundWindowsDnsIssue } from '../helpers';
import Logger = require('../logger');
import { DeviceAPI, DeviceInfo } from './api';
@ -49,6 +53,7 @@ export interface DeviceDeployOptions {
dockerfilePath?: string;
registrySecrets: RegistrySecrets;
nocache: boolean;
noParentCheck: boolean;
nolive: boolean;
detached: boolean;
services?: string[];
@ -61,11 +66,6 @@ interface ParsedEnvironment {
[serviceName: string]: { [key: string]: string };
}
async function checkSource(source: string): Promise<boolean> {
const { fs } = await import('mz');
return (await fs.exists(source)) && (await fs.stat(source)).isDirectory();
}
async function environmentFromInput(
envs: string[],
serviceNames: string[],
@ -117,15 +117,10 @@ async function environmentFromInput(
}
export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
const { loadProject, tarDirectory } = await import('../compose');
const { tarDirectory } = await import('../compose');
const { exitWithExpectedError } = await import('../patterns');
const { displayDeviceLogs } = await import('./logs');
if (!(await checkSource(opts.source))) {
exitWithExpectedError(`Could not access source directory: ${opts.source}`);
}
const api = new DeviceAPI(globalLogger, opts.deviceHost);
// First check that we can access the device with a ping
@ -171,13 +166,12 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
globalLogger.logInfo(`Starting build on device ${opts.deviceHost}`);
const project = await loadProject(
globalLogger,
opts.source, // project path
'local', // project name
undefined, // name of a pre-built image
opts.dockerfilePath, // alternative Dockerfile; OK to be undefined
);
const project = await loadProject(globalLogger, {
dockerfilePath: opts.dockerfilePath,
noParentCheck: opts.noParentCheck,
projectName: 'local',
projectPath: opts.source,
});
// Attempt to attach to the device's docker daemon
const docker = connectToDocker(

View File

@ -19,43 +19,43 @@
require('../config-tests'); // required for side effects
import { expect } from 'chai';
import * as _ from 'lodash';
import { fs } from 'mz';
import * as path from 'path';
import { URL } from 'url';
import { BalenaAPIMock } from '../balena-api-mock';
import { DockerMock, dockerResponsePath } from '../docker-mock';
import {
cleanOutput,
ExpectedTarStreamFiles,
ExpectedTarStreamFilesByService,
expectStreamNoCRLF,
inspectTarStream,
runCommand,
TarStreamFiles,
} from '../helpers';
testDockerBuildStream,
} from '../docker-build';
import { DockerMock, dockerResponsePath } from '../docker-mock';
import { cleanOutput, runCommand } from '../helpers';
const repoPath = path.normalize(path.join(__dirname, '..', '..'));
const projectsPath = path.join(repoPath, 'tests', 'test-data', 'projects');
const expectedResponses = {
const commonResponseLines: { [key: string]: string[] } = {
'build-POST.json': [
'[Info] Building for amd64/nuc',
'[Info] Docker Desktop detected (daemon architecture: "x86_64")',
'[Info] Docker itself will determine and enable architecture emulation if required,',
'[Info] without balena-cli intervention and regardless of the --emulated option.',
'[Build] main Image size: 1.14 MB',
'[Success] Build succeeded!',
],
};
const commonQueryParams = [
['t', '${tag}'],
['buildargs', '{}'],
['labels', ''],
];
describe('balena build', function() {
let api: BalenaAPIMock;
let docker: DockerMock;
const commonQueryParams = [
['t', 'basic_main'],
['buildargs', '{}'],
['labels', ''],
];
const isWindows = process.platform === 'win32';
this.beforeEach(() => {
api = new BalenaAPIMock();
@ -65,7 +65,6 @@ describe('balena build', function() {
docker.expectGetPing();
docker.expectGetInfo();
docker.expectGetVersion();
docker.expectGetImages();
});
this.afterEach(() => {
@ -76,7 +75,7 @@ describe('balena build', function() {
it('should create the expected tar stream (single container)', async () => {
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
const expectedFiles: TarStreamFiles = {
const expectedFiles: ExpectedTarStreamFiles = {
'src/start.sh': { fileSize: 89, type: 'file' },
'src/windows-crlf.sh': { fileSize: 70, type: 'file' },
Dockerfile: { fileSize: 88, type: 'file' },
@ -87,55 +86,44 @@ describe('balena build', function() {
path.join(dockerResponsePath, responseFilename),
'utf8',
);
docker.expectPostBuild({
tag: 'basic_main',
responseCode: 200,
responseBody,
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(commonQueryParams);
},
checkBuildRequestBody: (buildRequestBody: string) =>
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
});
const { out, err } = await runCommand(
`build ${projectPath} --deviceType nuc --arch amd64`,
);
const extraLines = [
`[Info] Creating default composition with source: ${projectPath}`,
const expectedResponseLines = [
...commonResponseLines[responseFilename],
`[Info] No "docker-compose.yml" file found at "${projectPath}"`,
`[Info] Creating default composition with source: "${projectPath}"`,
'[Build] main Image size: 1.14 MB',
];
if (process.platform === 'win32') {
extraLines.push(
if (isWindows) {
expectedResponseLines.push(
`[Warn] CRLF (Windows) line endings detected in file: ${path.join(
projectPath,
'src',
'windows-crlf.sh',
)}`,
'[Warn] Windows-format line endings were detected in some files. Consider using the `--convert-eol` option.',
);
}
expect(err).to.have.members([]);
expect(
cleanOutput(out).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members([
...expectedResponses[responseFilename],
...extraLines,
]);
await testDockerBuildStream({
commandLine: `build ${projectPath} --deviceType nuc --arch amd64`,
dockerMock: docker,
expectedFilesByService: { main: expectedFiles },
expectedQueryParamsByService: { main: commonQueryParams },
expectedResponseLines,
projectPath,
responseBody,
responseCode: 200,
services: ['main'],
});
});
it('should create the expected tar stream (single container, --convert-eol)', async () => {
const windows = process.platform === 'win32';
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
const expectedFiles: TarStreamFiles = {
const expectedFiles: ExpectedTarStreamFiles = {
'src/start.sh': { fileSize: 89, type: 'file' },
'src/windows-crlf.sh': {
fileSize: windows ? 68 : 70,
fileSize: isWindows ? 68 : 70,
testStream: isWindows ? expectStreamNoCRLF : undefined,
type: 'file',
testStream: windows ? expectStreamNoCRLF : undefined,
},
Dockerfile: { fileSize: 88, type: 'file' },
'Dockerfile-alt': { fileSize: 30, type: 'file' },
@ -145,29 +133,14 @@ describe('balena build', function() {
path.join(dockerResponsePath, responseFilename),
'utf8',
);
docker.expectPostBuild({
tag: 'basic_main',
responseCode: 200,
responseBody,
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(commonQueryParams);
},
checkBuildRequestBody: (buildRequestBody: string) =>
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
});
const { out, err } = await runCommand(
`build ${projectPath} --deviceType nuc --arch amd64 --convert-eol`,
);
const extraLines = [
`[Info] Creating default composition with source: ${projectPath}`,
const expectedResponseLines = [
...commonResponseLines[responseFilename],
`[Info] No "docker-compose.yml" file found at "${projectPath}"`,
`[Info] Creating default composition with source: "${projectPath}"`,
'[Build] main Image size: 1.14 MB',
];
if (windows) {
extraLines.push(
if (isWindows) {
expectedResponseLines.push(
`[Info] Converting line endings CRLF -> LF for file: ${path.join(
projectPath,
'src',
@ -176,12 +149,101 @@ describe('balena build', function() {
);
}
expect(err).to.have.members([]);
expect(
cleanOutput(out).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members([
...expectedResponses[responseFilename],
...extraLines,
]);
await testDockerBuildStream({
commandLine: `build ${projectPath} --deviceType nuc --arch amd64 --convert-eol`,
dockerMock: docker,
expectedFilesByService: { main: expectedFiles },
expectedQueryParamsByService: { main: commonQueryParams },
expectedResponseLines,
projectPath,
responseBody,
responseCode: 200,
services: ['main'],
});
});
it('should create the expected tar stream (docker-compose)', async () => {
const projectPath = path.join(projectsPath, 'docker-compose', 'basic');
const service1Dockerfile = (
await fs.readFile(
path.join(projectPath, 'service1', 'Dockerfile.template'),
'utf8',
)
).replace('%%BALENA_MACHINE_NAME%%', 'nuc');
const expectedFilesByService: ExpectedTarStreamFilesByService = {
service1: {
Dockerfile: {
contents: service1Dockerfile,
fileSize: service1Dockerfile.length,
type: 'file',
},
'Dockerfile.template': { fileSize: 144, type: 'file' },
'file1.sh': { fileSize: 12, type: 'file' },
},
service2: {
'Dockerfile-alt': { fileSize: 40, type: 'file' },
'file2-crlf.sh': {
fileSize: isWindows ? 12 : 14,
testStream: isWindows ? expectStreamNoCRLF : undefined,
type: 'file',
},
},
};
const responseFilename = 'build-POST.json';
const responseBody = await fs.readFile(
path.join(dockerResponsePath, responseFilename),
'utf8',
);
const expectedQueryParamsByService = {
service1: commonQueryParams,
service2: [...commonQueryParams, ['dockerfile', 'Dockerfile-alt']],
};
const expectedResponseLines: string[] = [
...commonResponseLines[responseFilename],
`[Build] service1 Image size: 1.14 MB`,
`[Build] service2 Image size: 1.14 MB`,
];
if (isWindows) {
expectedResponseLines.push(
`[Info] Converting line endings CRLF -> LF for file: ${path.join(
projectPath,
'service2',
'file2-crlf.sh',
)}`,
);
}
await testDockerBuildStream({
commandLine: `build ${projectPath} --deviceType nuc --arch amd64 --convert-eol`,
dockerMock: docker,
expectedFilesByService,
expectedQueryParamsByService,
expectedResponseLines,
projectPath,
responseBody,
responseCode: 200,
services: ['service1', 'service2'],
});
});
});
describe('balena build: project validation', function() {
it('should raise ExpectedError if a Dockerfile cannot be found', async () => {
const projectPath = path.join(
projectsPath,
'docker-compose',
'basic',
'service2',
);
const expectedErrorLines = [
'Error: no "Dockerfile[.*]", "docker-compose.yml" or "package.json" file',
`found in source folder "${projectPath}"`,
];
const { out, err } = await runCommand(`build ${projectPath} -a testApp`);
expect(
cleanOutput(err).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members(expectedErrorLines);
expect(out).to.be.empty;
});
});

View File

@ -21,20 +21,16 @@ require('../config-tests'); // required for side effects
import { expect } from 'chai';
import { fs } from 'mz';
import * as path from 'path';
import { URL } from 'url';
import { BalenaAPIMock } from '../balena-api-mock';
import { ExpectedTarStreamFiles, testDockerBuildStream } from '../docker-build';
import { DockerMock, dockerResponsePath } from '../docker-mock';
import {
cleanOutput,
inspectTarStream,
runCommand,
TarStreamFiles,
} from '../helpers';
import { cleanOutput, runCommand } from '../helpers';
const repoPath = path.normalize(path.join(__dirname, '..', '..'));
const projectsPath = path.join(repoPath, 'tests', 'test-data', 'projects');
const expectedResponses = {
const commonResponseLines = {
'build-POST.json': [
'[Info] Building for armv7hf/raspberrypi3',
'[Info] Docker Desktop detected (daemon architecture: "x86_64")',
@ -49,15 +45,16 @@ const expectedResponses = {
],
};
const commonQueryParams = [
['t', '${tag}'],
['buildargs', '{}'],
['labels', ''],
];
describe('balena deploy', function() {
let api: BalenaAPIMock;
let docker: DockerMock;
const commonQueryParams = [
['t', 'basic_main'],
['buildargs', '{}'],
['labels', ''],
];
const isWindows = process.platform === 'win32';
this.beforeEach(() => {
api = new BalenaAPIMock();
@ -80,8 +77,7 @@ describe('balena deploy', function() {
docker.expectGetPing();
docker.expectGetInfo();
docker.expectGetVersion();
docker.expectGetImages({ persist: true });
docker.expectGetVersion({ persist: true });
docker.expectPostImagesTag();
docker.expectPostImagesPush();
docker.expectDeleteImages();
@ -95,7 +91,7 @@ describe('balena deploy', function() {
it('should create the expected --build tar stream (single container)', async () => {
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
const expectedFiles: TarStreamFiles = {
const expectedFiles: ExpectedTarStreamFiles = {
'src/start.sh': { fileSize: 89, type: 'file' },
'src/windows-crlf.sh': { fileSize: 70, type: 'file' },
Dockerfile: { fileSize: 88, type: 'file' },
@ -106,43 +102,55 @@ describe('balena deploy', function() {
path.join(dockerResponsePath, responseFilename),
'utf8',
);
docker.expectPostBuild({
tag: 'basic_main',
responseCode: 200,
responseBody,
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(commonQueryParams);
},
checkBuildRequestBody: (buildRequestBody: string) =>
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
});
const { out, err } = await runCommand(
`deploy testApp --build --source ${projectPath}`,
);
const extraLines = [
`[Info] Creating default composition with source: ${projectPath}`,
const expectedResponseLines = [
...commonResponseLines[responseFilename],
`[Info] No "docker-compose.yml" file found at "${projectPath}"`,
`[Info] Creating default composition with source: "${projectPath}"`,
];
if (process.platform === 'win32') {
extraLines.push(
if (isWindows) {
expectedResponseLines.push(
`[Warn] CRLF (Windows) line endings detected in file: ${path.join(
projectPath,
'src',
'windows-crlf.sh',
)}`,
'[Warn] Windows-format line endings were detected in some files. Consider using the `--convert-eol` option.',
);
}
expect(err).to.have.members([]);
expect(
cleanOutput(out).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members([
...expectedResponses[responseFilename],
...extraLines,
]);
await testDockerBuildStream({
commandLine: `deploy testApp --build --source ${projectPath}`,
dockerMock: docker,
expectedFilesByService: { main: expectedFiles },
expectedQueryParamsByService: { main: commonQueryParams },
expectedResponseLines,
projectPath,
responseBody,
responseCode: 200,
services: ['main'],
});
});
});
describe('balena deploy: project validation', function() {
it('should raise ExpectedError if a Dockerfile cannot be found', async () => {
const projectPath = path.join(
projectsPath,
'docker-compose',
'basic',
'service2',
);
const expectedErrorLines = [
'Error: no "Dockerfile[.*]", "docker-compose.yml" or "package.json" file',
`found in source folder "${projectPath}"`,
];
const { out, err } = await runCommand(
`deploy testApp --source ${projectPath}`,
);
expect(
cleanOutput(err).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members(expectedErrorLines);
expect(out).to.be.empty;
});
});

View File

@ -21,22 +21,20 @@ require('../config-tests'); // required for side effects
import { expect } from 'chai';
import { fs } from 'mz';
import * as path from 'path';
import { URL } from 'url';
import { BalenaAPIMock } from '../balena-api-mock';
import { BuilderMock, builderResponsePath } from '../builder-mock';
import {
cleanOutput,
ExpectedTarStreamFiles,
expectStreamNoCRLF,
inspectTarStream,
runCommand,
TarStreamFiles,
} from '../helpers';
testPushBuildStream,
} from '../docker-build';
import { cleanOutput, runCommand } from '../helpers';
const repoPath = path.normalize(path.join(__dirname, '..', '..'));
const projectsPath = path.join(repoPath, 'tests', 'test-data', 'projects');
const expectedResponses = {
const commonResponseLines = {
'build-POST-v3.json': [
'[Info] Starting build for testApp, user gh_user',
'[Info] Dashboard link: https://dashboard.balena-cloud.com/apps/1301645/devices',
@ -66,28 +64,22 @@ const expectedResponses = {
'[Info] ├─────────┼────────────┼────────────┤',
'[Info] │ main │ 1.32 MB │ 11 seconds │',
'[Info] └─────────┴────────────┴────────────┘',
'[Info] Build finished in 20 seconds',
],
};
function tweakOutput(out: string[]): string[] {
return cleanOutput(out).map(line =>
line.replace(/\s{2,}/g, ' ').replace(/in \d+? seconds/, 'in 20 seconds'),
);
}
const commonQueryParams = [
['owner', 'bob'],
['app', 'testApp'],
['dockerfilePath', ''],
['emulated', 'false'],
['nocache', 'false'],
['headless', 'false'],
];
describe('balena push', function() {
let api: BalenaAPIMock;
let builder: BuilderMock;
const commonQueryParams = [
['owner', 'bob'],
['app', 'testApp'],
['dockerfilePath', ''],
['emulated', 'false'],
['nocache', 'false'],
['headless', 'false'],
];
const isWindows = process.platform === 'win32';
this.beforeEach(() => {
api = new BalenaAPIMock();
@ -105,7 +97,7 @@ describe('balena push', function() {
it('should create the expected tar stream (single container)', async () => {
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
const expectedFiles: TarStreamFiles = {
const expectedFiles: ExpectedTarStreamFiles = {
'src/start.sh': { fileSize: 89, type: 'file' },
'src/windows-crlf.sh': { fileSize: 70, type: 'file' },
Dockerfile: { fileSize: 88, type: 'file' },
@ -116,44 +108,33 @@ describe('balena push', function() {
path.join(builderResponsePath, responseFilename),
'utf8',
);
builder.expectPostBuild({
responseCode: 200,
responseBody,
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(commonQueryParams);
},
checkBuildRequestBody: (buildRequestBody: string | Buffer) =>
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
});
const { out, err } = await runCommand(
`push testApp --source ${projectPath}`,
);
const extraLines = [];
if (process.platform === 'win32') {
extraLines.push(
const expectedResponseLines = [...commonResponseLines[responseFilename]];
if (isWindows) {
expectedResponseLines.push(
`[Warn] CRLF (Windows) line endings detected in file: ${path.join(
projectPath,
'src',
'windows-crlf.sh',
)}`,
'[Warn] Windows-format line endings were detected in some files. Consider using the `--convert-eol` option.',
);
}
expect(err).to.have.members([]);
expect(tweakOutput(out)).to.include.members([
...expectedResponses[responseFilename],
...extraLines,
]);
await testPushBuildStream({
builderMock: builder,
commandLine: `push testApp --source ${projectPath}`,
expectedFiles,
expectedQueryParams: commonQueryParams,
expectedResponseLines,
projectPath,
responseBody,
responseCode: 200,
});
});
it('should create the expected tar stream (alternative Dockerfile)', async () => {
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
const expectedFiles: TarStreamFiles = {
const expectedFiles: ExpectedTarStreamFiles = {
'src/start.sh': { fileSize: 89, type: 'file' },
'src/windows-crlf.sh': { fileSize: 70, type: 'file' },
Dockerfile: { fileSize: 88, type: 'file' },
@ -164,44 +145,30 @@ describe('balena push', function() {
path.join(builderResponsePath, responseFilename),
'utf8',
);
const expectedQueryParams = commonQueryParams.map(i =>
i[0] === 'dockerfilePath' ? ['dockerfilePath', 'Dockerfile-alt'] : i,
);
builder.expectPostBuild({
responseCode: 200,
await testPushBuildStream({
builderMock: builder,
commandLine: `push testApp --source ${projectPath} --dockerfile Dockerfile-alt`,
expectedFiles,
expectedQueryParams,
expectedResponseLines: commonResponseLines[responseFilename],
projectPath,
responseBody,
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(
commonQueryParams.map(i =>
i[0] === 'dockerfilePath'
? ['dockerfilePath', 'Dockerfile-alt']
: i,
),
);
},
checkBuildRequestBody: (buildRequestBody: string | Buffer) =>
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
responseCode: 200,
});
const { out, err } = await runCommand(
`push testApp --source ${projectPath} --dockerfile Dockerfile-alt`,
);
expect(err).to.have.members([]);
expect(tweakOutput(out)).to.include.members(
expectedResponses[responseFilename],
);
});
it('should create the expected tar stream (single container, --convert-eol)', async () => {
const windows = process.platform === 'win32';
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
const expectedFiles: TarStreamFiles = {
const expectedFiles: ExpectedTarStreamFiles = {
'src/start.sh': { fileSize: 89, type: 'file' },
'src/windows-crlf.sh': {
fileSize: windows ? 68 : 70,
fileSize: isWindows ? 68 : 70,
type: 'file',
testStream: windows ? expectStreamNoCRLF : undefined,
testStream: isWindows ? expectStreamNoCRLF : undefined,
},
Dockerfile: { fileSize: 88, type: 'file' },
'Dockerfile-alt': { fileSize: 30, type: 'file' },
@ -211,26 +178,9 @@ describe('balena push', function() {
path.join(builderResponsePath, responseFilename),
'utf8',
);
builder.expectPostBuild({
responseCode: 200,
responseBody,
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(commonQueryParams);
},
checkBuildRequestBody: (buildRequestBody: string | Buffer) =>
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
});
const { out, err } = await runCommand(
`push testApp --source ${projectPath} --convert-eol`,
);
const extraLines = [];
if (windows) {
extraLines.push(
const expectedResponseLines = [...commonResponseLines[responseFilename]];
if (isWindows) {
expectedResponseLines.push(
`[Info] Converting line endings CRLF -> LF for file: ${path.join(
projectPath,
'src',
@ -239,10 +189,151 @@ describe('balena push', function() {
);
}
expect(err).to.have.members([]);
expect(tweakOutput(out)).to.include.members([
...expectedResponses[responseFilename],
...extraLines,
]);
await testPushBuildStream({
builderMock: builder,
commandLine: `push testApp --source ${projectPath} --convert-eol`,
expectedFiles,
expectedQueryParams: commonQueryParams,
expectedResponseLines,
projectPath,
responseBody,
responseCode: 200,
});
});
it('should create the expected tar stream (docker-compose)', async () => {
const projectPath = path.join(projectsPath, 'docker-compose', 'basic');
const expectedFiles: ExpectedTarStreamFiles = {
'docker-compose.yml': { fileSize: 245, type: 'file' },
'service1/Dockerfile.template': { fileSize: 144, type: 'file' },
'service1/file1.sh': { fileSize: 12, type: 'file' },
'service2/Dockerfile-alt': { fileSize: 40, type: 'file' },
'service2/file2-crlf.sh': {
fileSize: isWindows ? 12 : 14,
testStream: isWindows ? expectStreamNoCRLF : undefined,
type: 'file',
},
};
const responseFilename = 'build-POST-v3.json';
const responseBody = await fs.readFile(
path.join(builderResponsePath, responseFilename),
'utf8',
);
const expectedResponseLines: string[] = [
...commonResponseLines[responseFilename],
];
if (isWindows) {
expectedResponseLines.push(
`[Info] Converting line endings CRLF -> LF for file: ${path.join(
projectPath,
'service2',
'file2-crlf.sh',
)}`,
);
}
await testPushBuildStream({
builderMock: builder,
commandLine: `push testApp --source ${projectPath} --convert-eol`,
expectedFiles,
expectedQueryParams: commonQueryParams,
expectedResponseLines,
projectPath,
responseBody,
responseCode: 200,
});
});
});
describe('balena push: project validation', function() {
it('should raise ExpectedError if the project folder is not a directory', async () => {
const projectPath = path.join(
projectsPath,
'docker-compose',
'basic',
'docker-compose.yml',
);
const expectedErrorLines = [
`Could not access source folder: "${projectPath}"`,
];
const { out, err } = await runCommand(
`push testApp --source ${projectPath}`,
);
expect(
cleanOutput(err).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members(expectedErrorLines);
expect(out).to.be.empty;
});
it('should raise ExpectedError if a Dockerfile cannot be found', async () => {
const projectPath = path.join(
projectsPath,
'docker-compose',
'basic',
'service2',
);
const expectedErrorLines = [
'Error: no "Dockerfile[.*]", "docker-compose.yml" or "package.json" file',
`found in source folder "${projectPath}"`,
];
const { out, err } = await runCommand(
`push testApp --source ${projectPath}`,
);
expect(
cleanOutput(err).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members(expectedErrorLines);
expect(out).to.be.empty;
});
it('should log a warning if a docker-compose.yml exists in a parent folder', async () => {
const projectPath = path.join(
projectsPath,
'docker-compose',
'basic',
'service1',
);
const expectedErrorLines = [
'The --nolive flag is only valid when pushing to a local mode device',
];
const expectedOutputLines = [
'[Warn] "docker-compose.y[a]ml" file found in parent directory: please check',
"[Warn] that the correct folder was specified. (Suppress with '--noparent-check'.)",
];
const { out, err } = await runCommand(
`push testApp --source ${projectPath} --nolive`,
);
expect(
cleanOutput(err).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members(expectedErrorLines);
expect(
cleanOutput(out).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members(expectedOutputLines);
});
it('should suppress a parent folder check with --noparent-check', async () => {
const projectPath = path.join(
projectsPath,
'docker-compose',
'basic',
'service1',
);
const expectedErrorLines = [
'The --nolive flag is only valid when pushing to a local mode device',
];
const expectedOutputLines = [
'[Warn] "docker-compose.y[a]ml" file found in parent directory: please check',
"[Warn] that the correct folder was specified. (Suppress with '--noparent-check'.)",
];
const { out, err } = await runCommand(
`push testApp --source ${projectPath} --nolive --noparent-check`,
);
expect(
cleanOutput(err).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members(expectedErrorLines);
expect(out).to.be.empty;
});
});

232
tests/docker-build.ts Normal file
View File

@ -0,0 +1,232 @@
/**
* @license
* Copyright 2019-2020 Balena Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { expect } from 'chai';
import { stripIndent } from 'common-tags';
import * as _ from 'lodash';
import { fs } from 'mz';
import * as path from 'path';
import { PathUtils } from 'resin-multibuild';
import { Readable } from 'stream';
import * as tar from 'tar-stream';
import { streamToBuffer } from 'tar-utils';
import { URL } from 'url';
import { BuilderMock } from './builder-mock';
import { DockerMock } from './docker-mock';
import { cleanOutput, fillTemplateArray, runCommand } from './helpers';
export interface ExpectedTarStreamFile {
contents?: string;
fileSize: number;
testStream?: (
header: tar.Headers,
stream: Readable,
expected?: ExpectedTarStreamFile,
) => Promise<void>;
type: tar.Headers['type'];
}
export interface ExpectedTarStreamFiles {
[filePath: string]: ExpectedTarStreamFile;
}
export interface ExpectedTarStreamFilesByService {
[service: string]: ExpectedTarStreamFiles;
}
/**
* Run a few chai.expect() test assertions on a tar stream/buffer produced by
* the balena push, build and deploy commands, intercepted at HTTP level on
* their way from the CLI to the Docker daemon or balenaCloud builders.
*
* @param tarRequestBody Intercepted buffer of tar stream to be sent to builders/Docker
* @param expectedFiles Details of files expected to be found in the buffer
* @param projectPath Path of test project that was tarred, to compare file contents
*/
export async function inspectTarStream(
tarRequestBody: string | Buffer,
expectedFiles: ExpectedTarStreamFiles,
projectPath: string,
): Promise<void> {
// string to stream: https://stackoverflow.com/a/22085851
const sourceTarStream = new Readable();
sourceTarStream._read = () => undefined;
sourceTarStream.push(tarRequestBody);
sourceTarStream.push(null);
const found: ExpectedTarStreamFiles = await new Promise((resolve, reject) => {
const foundFiles: ExpectedTarStreamFiles = {};
const extract = tar.extract();
extract.on('error', reject);
extract.on(
'entry',
async (header: tar.Headers, stream: Readable, next: tar.Callback) => {
try {
// TODO: test the .balena folder instead of ignoring it
if (header.name.startsWith('.balena/')) {
stream.resume();
} else {
expect(foundFiles).to.not.have.property(header.name);
foundFiles[header.name] = {
fileSize: header.size || 0,
type: header.type,
};
const expected = expectedFiles[header.name];
if (expected && expected.testStream) {
await expected.testStream(header, stream, expected);
} else {
await defaultTestStream(header, stream, expected, projectPath);
}
}
} catch (err) {
reject(err);
}
next();
},
);
extract.once('finish', () => {
resolve(foundFiles);
});
sourceTarStream.on('error', reject);
sourceTarStream.pipe(extract);
});
expect(found).to.deep.equal(
_.mapValues(expectedFiles, v => _.omit(v, 'testStream', 'contents')),
);
}
/** Check that a tar stream entry matches the project contents in the filesystem */
async function defaultTestStream(
header: tar.Headers,
stream: Readable,
expected: ExpectedTarStreamFile | undefined,
projectPath: string,
): Promise<void> {
let expectedContents: Buffer | undefined;
if (expected?.contents) {
expectedContents = Buffer.from(expected.contents);
}
const [buf, buf2] = await Promise.all([
streamToBuffer(stream),
expectedContents ||
fs.readFile(path.join(projectPath, PathUtils.toNativePath(header.name))),
]);
const msg = stripIndent`
contents mismatch for tar stream entry "${header.name}"
stream length=${buf.length}, filesystem length=${buf2.length}`;
expect(buf.equals(buf2), msg).to.be.true;
}
/** Test a tar stream entry for the absence of Windows CRLF line breaks */
export async function expectStreamNoCRLF(
_header: tar.Headers,
stream: Readable,
): Promise<void> {
const chai = await import('chai');
const buf = await streamToBuffer(stream);
await chai.expect(buf.includes('\r\n')).to.be.false;
}
/**
* Common test logic for the 'build' and 'deploy' commands
*/
export async function testDockerBuildStream(o: {
commandLine: string;
dockerMock: DockerMock;
expectedFilesByService: ExpectedTarStreamFilesByService;
expectedQueryParamsByService: { [service: string]: string[][] };
expectedResponseLines: string[];
projectPath: string;
responseCode: number;
responseBody: string;
services: string[]; // e.g. ['main'] or ['service1', 'service2']
}) {
const expectedResponseLines = fillTemplateArray(o.expectedResponseLines, o);
for (const service of o.services) {
// tagPrefix is, for example, 'myApp' if the path is 'path/to/myApp'
const tagPrefix = o.projectPath.split(path.sep).pop();
const tag = `${tagPrefix}_${service}`;
const expectedFiles = o.expectedFilesByService[service];
const expectedQueryParams = fillTemplateArray(
o.expectedQueryParamsByService[service],
_.assign({ tag }, o),
);
const projectPath =
service === 'main' ? o.projectPath : path.join(o.projectPath, service);
o.dockerMock.expectPostBuild(
_.assign({}, o, {
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(expectedQueryParams);
},
checkBuildRequestBody: (buildRequestBody: string) =>
inspectTarStream(buildRequestBody, expectedFiles, projectPath),
tag,
}),
);
o.dockerMock.expectGetImages();
}
const { out, err } = await runCommand(o.commandLine);
expect(err).to.be.empty;
expect(
cleanOutput(out).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members(expectedResponseLines);
}
/**
* Common test logic for the 'push' command
*/
export async function testPushBuildStream(o: {
commandLine: string;
builderMock: BuilderMock;
expectedFiles: ExpectedTarStreamFiles;
expectedQueryParams: string[][];
expectedResponseLines: string[];
projectPath: string;
responseCode: number;
responseBody: string;
}) {
const expectedQueryParams = fillTemplateArray(o.expectedQueryParams, o);
const expectedResponseLines = fillTemplateArray(o.expectedResponseLines, o);
o.builderMock.expectPostBuild(
_.assign({}, o, {
checkURI: async (uri: string) => {
const url = new URL(uri, 'http://test.net/');
const queryParams = Array.from(url.searchParams.entries());
expect(queryParams).to.have.deep.members(expectedQueryParams);
},
checkBuildRequestBody: (buildRequestBody: string) =>
inspectTarStream(buildRequestBody, o.expectedFiles, o.projectPath),
}),
);
const { out, err } = await runCommand(o.commandLine);
expect(err).to.be.empty;
expect(
cleanOutput(out).map(line => line.replace(/\s{2,}/g, ' ')),
).to.include.members(expectedResponseLines);
}

View File

@ -18,16 +18,10 @@
// tslint:disable-next-line:no-var-requires
require('./config-tests'); // required for side effects
import { stripIndent } from 'common-tags';
import intercept = require('intercept-stdout');
import * as _ from 'lodash';
import { fs } from 'mz';
import * as nock from 'nock';
import * as path from 'path';
import { PathUtils } from 'resin-multibuild';
import { Readable } from 'stream';
import * as tar from 'tar-stream';
import { streamToBuffer } from 'tar-utils';
import * as balenaCLI from '../build/app';
@ -114,101 +108,34 @@ export function monochrome(text: string): string {
return text.replace(/\u001b\[\??\d+?[a-zA-Z]\r?/g, '');
}
export interface TarStreamFiles {
[filePath: string]: {
fileSize: number;
type: tar.Headers['type'];
testStream?: (header: tar.Headers, stream: Readable) => Promise<void>;
};
/**
* Dynamic template string resolution.
* Usage example:
* const templateString = 'hello ${name}!';
* const templateVars = { name: 'world' };
* console.log( fillTemplate(templateString, templateVars) );
* // hello world!
*/
export function fillTemplate(
templateString: string,
templateVars: object,
): string {
const escaped = templateString.replace(/\\/g, '\\\\').replace(/`/g, '\\`');
const resolved = new Function(
...Object.keys(templateVars),
`return \`${escaped}\`;`,
).call(null, ...Object.values(templateVars));
const unescaped = resolved.replace(/\\`/g, '`').replace(/\\\\/g, '\\');
return unescaped;
}
/**
* Run a few chai.expect() test assertions on a tar stream/buffer produced by
* the balena push, build and deploy commands, intercepted at HTTP level on
* their way from the CLI to the Docker daemon or balenaCloud builders.
*
* @param tarRequestBody Intercepted buffer of tar stream to be sent to builders/Docker
* @param expectedFiles Details of files expected to be found in the buffer
* @param projectPath Path of test project that was tarred, to compare file contents
* @param expect chai.expect function
*/
export async function inspectTarStream(
tarRequestBody: string | Buffer,
expectedFiles: TarStreamFiles,
projectPath: string,
expect: Chai.ExpectStatic,
): Promise<void> {
// string to stream: https://stackoverflow.com/a/22085851
const sourceTarStream = new Readable();
sourceTarStream._read = () => undefined;
sourceTarStream.push(tarRequestBody);
sourceTarStream.push(null);
const found: TarStreamFiles = await new Promise((resolve, reject) => {
const foundFiles: TarStreamFiles = {};
const extract = tar.extract();
extract.on('error', reject);
extract.on(
'entry',
async (header: tar.Headers, stream: Readable, next: tar.Callback) => {
try {
// TODO: test the .balena folder instead of ignoring it
if (header.name.startsWith('.balena/')) {
stream.resume();
} else {
expect(foundFiles).to.not.have.property(header.name);
foundFiles[header.name] = {
fileSize: header.size || 0,
type: header.type,
};
const expected = expectedFiles[header.name];
if (expected && expected.testStream) {
await expected.testStream(header, stream);
} else {
await defaultTestStream(header, stream, projectPath, expect);
}
}
} catch (err) {
reject(err);
}
next();
},
);
extract.once('finish', () => {
resolve(foundFiles);
});
sourceTarStream.on('error', reject);
sourceTarStream.pipe(extract);
});
expect(found).to.deep.equal(
_.mapValues(expectedFiles, v => _.omit(v, 'testStream')),
export function fillTemplateArray(
templateStringArray: Array<string | string[]>,
templateVars: object,
) {
return templateStringArray.map(i =>
Array.isArray(i)
? fillTemplateArray(i, templateVars)
: fillTemplate(i, templateVars),
);
}
/** Check that a tar stream entry matches the project contents in the filesystem */
async function defaultTestStream(
header: tar.Headers,
stream: Readable,
projectPath: string,
expect: Chai.ExpectStatic,
): Promise<void> {
const [buf, buf2] = await Promise.all([
streamToBuffer(stream),
fs.readFile(path.join(projectPath, PathUtils.toNativePath(header.name))),
]);
const msg = stripIndent`
contents mismatch for tar stream entry "${header.name}"
stream length=${buf.length}, filesystem length=${buf2.length}`;
expect(buf.equals(buf2), msg).to.be.true;
}
/** Test a tar stream entry for the absence of Windows CRLF line breaks */
export async function expectStreamNoCRLF(
_header: tar.Headers,
stream: Readable,
): Promise<void> {
const chai = await import('chai');
const buf = await streamToBuffer(stream);
await chai.expect(buf.includes('\r\n')).to.be.false;
}

View File

@ -0,0 +1,14 @@
version: '2'
volumes:
resin-data:
services:
service1:
volumes:
- 'resin-data:/data'
build: ./service1
service2:
volumes:
- 'resin-data:/data'
build:
context: ./service2
dockerfile: Dockerfile-alt

View File

@ -0,0 +1,3 @@
FROM balenalib/%%BALENA_MACHINE_NAME%%-alpine
COPY ./file1.sh /
CMD i=1; while :; do echo "service1 $i $(uname -a)"; sleep 10; i=$((i+1)); done

View File

@ -0,0 +1,2 @@
line1
line2

View File

@ -0,0 +1 @@
alternative Dockerfile (basic/service2)

View File

@ -0,0 +1,2 @@
line1
line2