mirror of
https://github.com/balena-io/balena-cli.git
synced 2024-12-23 15:32:22 +00:00
Merge pull request #1583 from balena-io/1273-convert-crlf-on-push
Convert CRLF on push
This commit is contained in:
commit
f58a49d6c3
@ -1802,6 +1802,10 @@ separated by a colon, e.g:
|
||||
Note that if the service name cannot be found in the composition, the entire
|
||||
left hand side of the = character will be treated as the variable name.
|
||||
|
||||
#### --convert-eol, -l
|
||||
|
||||
Convert line endings from CRLF (Windows format) to LF (Unix format). Source files are not modified.
|
||||
|
||||
# Settings
|
||||
|
||||
## settings
|
||||
@ -1927,6 +1931,10 @@ Display full log output
|
||||
|
||||
Path to a YAML or JSON file with passwords for a private Docker registry
|
||||
|
||||
#### --convert-eol, -l
|
||||
|
||||
Convert line endings from CRLF (Windows format) to LF (Unix format). Source files are not modified.
|
||||
|
||||
#### --docker, -P <docker>
|
||||
|
||||
Path to a local docker socket (e.g. /var/run/docker.sock)
|
||||
@ -2052,6 +2060,10 @@ Display full log output
|
||||
|
||||
Path to a YAML or JSON file with passwords for a private Docker registry
|
||||
|
||||
#### --convert-eol, -l
|
||||
|
||||
Convert line endings from CRLF (Windows format) to LF (Unix format). Source files are not modified.
|
||||
|
||||
#### --docker, -P <docker>
|
||||
|
||||
Path to a local docker socket (e.g. /var/run/docker.sock)
|
||||
|
@ -41,8 +41,10 @@ buildProject = (docker, logger, composeOpts, opts) ->
|
||||
opts.buildEmulated
|
||||
opts.buildOpts
|
||||
composeOpts.inlineLogs
|
||||
opts.convertEol
|
||||
)
|
||||
.then ->
|
||||
logger.outputDeferredMessages()
|
||||
logger.logSuccess('Build succeeded!')
|
||||
.tapCatch (e) ->
|
||||
logger.logError('Build failed')
|
||||
@ -117,6 +119,9 @@ module.exports =
|
||||
options.source ?= params.source
|
||||
delete params.source
|
||||
|
||||
options.convertEol = options['convert-eol'] || false
|
||||
delete options['convert-eol']
|
||||
|
||||
Promise.resolve(validateComposeOptions(sdk, options))
|
||||
.then ->
|
||||
{ application, arch, deviceType } = options
|
||||
@ -150,6 +155,7 @@ module.exports =
|
||||
deviceType
|
||||
buildEmulated: !!options.emulated
|
||||
buildOpts
|
||||
convertEol: options.convertEol
|
||||
})
|
||||
)
|
||||
.asCallback(done)
|
||||
|
@ -4,6 +4,7 @@ Promise = require('bluebird')
|
||||
dockerUtils = require('../utils/docker')
|
||||
compose = require('../utils/compose')
|
||||
{ registrySecretsHelp } = require('../utils/messages')
|
||||
{ ExpectedError } = require('../errors')
|
||||
|
||||
###
|
||||
Opts must be an object with the following keys:
|
||||
@ -60,6 +61,7 @@ deployProject = (docker, logger, composeOpts, opts) ->
|
||||
opts.buildEmulated
|
||||
opts.buildOpts
|
||||
composeOpts.inlineLogs
|
||||
opts.convertEol
|
||||
)
|
||||
.then (builtImages) ->
|
||||
_.keyBy(builtImages, 'serviceName')
|
||||
@ -114,6 +116,7 @@ deployProject = (docker, logger, composeOpts, opts) ->
|
||||
)
|
||||
)
|
||||
.then (release) ->
|
||||
logger.outputDeferredMessages()
|
||||
logger.logSuccess('Deploy succeeded!')
|
||||
logger.logSuccess("Release: #{release.commit}")
|
||||
console.log()
|
||||
@ -175,7 +178,7 @@ module.exports =
|
||||
signature: 'nologupload'
|
||||
description: "Don't upload build logs to the dashboard with image (if building)"
|
||||
boolean: true
|
||||
}
|
||||
},
|
||||
]
|
||||
action: (params, options, done) ->
|
||||
# compositions with many services trigger misleading warnings
|
||||
@ -196,6 +199,11 @@ module.exports =
|
||||
appName = appName_raw || appName || options.application
|
||||
delete options.application
|
||||
|
||||
options.convertEol = options['convert-eol'] || false
|
||||
delete options['convert-eol']
|
||||
if options.convertEol and not options.build
|
||||
return done(new ExpectedError('The --eol-conversion flag is only valid with --build.'))
|
||||
|
||||
Promise.resolve(validateComposeOptions(sdk, options))
|
||||
.then ->
|
||||
if not appName?
|
||||
@ -213,9 +221,9 @@ module.exports =
|
||||
return app
|
||||
)
|
||||
.then (app) ->
|
||||
[ app, image, !!options.build, !options.nologupload ]
|
||||
[ app, image, !!options.build, !options.nologupload]
|
||||
|
||||
.then ([ app, image, shouldPerformBuild, shouldUploadLogs ]) ->
|
||||
.then ([ app, image, shouldPerformBuild, shouldUploadLogs, convertEol ]) ->
|
||||
Promise.join(
|
||||
dockerUtils.getDocker(options)
|
||||
dockerUtils.generateBuildOpts(options)
|
||||
@ -229,6 +237,7 @@ module.exports =
|
||||
shouldUploadLogs
|
||||
buildEmulated: !!options.emulated
|
||||
buildOpts
|
||||
convertEol: options.convertEol
|
||||
})
|
||||
)
|
||||
.asCallback(done)
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright 2016-2019 Balena Ltd.
|
||||
Copyright 2016-2020 Balena Ltd.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
@ -113,6 +113,7 @@ export const push: CommandDefinition<
|
||||
service?: string | string[];
|
||||
system?: boolean;
|
||||
env?: string | string[];
|
||||
'convert-eol'?: boolean;
|
||||
}
|
||||
> = {
|
||||
signature: 'push <applicationOrDevice>',
|
||||
@ -243,6 +244,13 @@ export const push: CommandDefinition<
|
||||
left hand side of the = character will be treated as the variable name.
|
||||
`,
|
||||
},
|
||||
{
|
||||
signature: 'convert-eol',
|
||||
alias: 'l',
|
||||
description: stripIndent`
|
||||
Convert line endings from CRLF (Windows format) to LF (Unix format). Source files are not modified.`,
|
||||
boolean: true,
|
||||
},
|
||||
],
|
||||
async action(params, options, done) {
|
||||
const sdk = (await import('balena-sdk')).fromSharedOptions();
|
||||
@ -317,6 +325,7 @@ export const push: CommandDefinition<
|
||||
nocache: options.nocache || false,
|
||||
registrySecrets,
|
||||
headless: options.detached || false,
|
||||
convertEol: options['convert-eol'] || false,
|
||||
};
|
||||
const args = {
|
||||
app,
|
||||
@ -327,7 +336,6 @@ export const push: CommandDefinition<
|
||||
sdk,
|
||||
opts,
|
||||
};
|
||||
|
||||
return await remote.startRemoteBuild(args);
|
||||
},
|
||||
).nodeify(done);
|
||||
@ -356,6 +364,7 @@ export const push: CommandDefinition<
|
||||
typeof options.env === 'string'
|
||||
? [options.env]
|
||||
: options.env || [],
|
||||
convertEol: options['convert-eol'] || false,
|
||||
}),
|
||||
)
|
||||
.catch(BuildError, e => {
|
||||
|
@ -1,6 +1,6 @@
|
||||
###*
|
||||
# @license
|
||||
# Copyright 2017-2019 Balena Ltd.
|
||||
# Copyright 2017-2020 Balena Ltd.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -52,6 +52,12 @@ exports.appendOptions = (opts) ->
|
||||
parameter: 'secrets.yml|.json'
|
||||
description: 'Path to a YAML or JSON file with passwords for a private Docker registry'
|
||||
},
|
||||
{
|
||||
signature: 'convert-eol'
|
||||
description: 'Convert line endings from CRLF (Windows format) to LF (Unix format). Source files are not modified.'
|
||||
boolean: true
|
||||
alias: 'l'
|
||||
}
|
||||
]
|
||||
|
||||
exports.generateOpts = (options) ->
|
||||
@ -131,7 +137,11 @@ exports.loadProject = (logger, projectPath, projectName, image, dockerfilePath)
|
||||
logger.logDebug('Creating project...')
|
||||
createProject(projectPath, composeStr, projectName)
|
||||
|
||||
exports.tarDirectory = tarDirectory = (dir, preFinalizeCallback = null) ->
|
||||
|
||||
exports.tarDirectory = tarDirectory = (dir, { preFinalizeCallback, convertEol } = {}) ->
|
||||
preFinalizeCallback ?= null
|
||||
convertEol ?= false
|
||||
|
||||
tar = require('tar-stream')
|
||||
klaw = require('klaw')
|
||||
path = require('path')
|
||||
@ -139,6 +149,7 @@ exports.tarDirectory = tarDirectory = (dir, preFinalizeCallback = null) ->
|
||||
streamToPromise = require('stream-to-promise')
|
||||
{ FileIgnorer } = require('./ignore')
|
||||
{ toPosixPath } = require('resin-multibuild').PathUtils
|
||||
{ readFileWithEolConversion } = require('./eol-conversion')
|
||||
|
||||
getFiles = ->
|
||||
streamToPromise(klaw(dir))
|
||||
@ -155,7 +166,7 @@ exports.tarDirectory = tarDirectory = (dir, preFinalizeCallback = null) ->
|
||||
.filter(ignore.filter)
|
||||
.map (file) ->
|
||||
relPath = path.relative(path.resolve(dir), file)
|
||||
Promise.join relPath, fs.stat(file), fs.readFile(file),
|
||||
Promise.join relPath, fs.stat(file), readFileWithEolConversion(file, convertEol),
|
||||
(filename, stats, data) ->
|
||||
pack.entry({ name: toPosixPath(filename), size: stats.size, mode: stats.mode }, data)
|
||||
.then ->
|
||||
@ -179,7 +190,8 @@ exports.buildProject = (
|
||||
projectPath, projectName, composition,
|
||||
arch, deviceType,
|
||||
emulated, buildOpts,
|
||||
inlineLogs
|
||||
inlineLogs,
|
||||
convertEol
|
||||
) ->
|
||||
_ = require('lodash')
|
||||
humanize = require('humanize')
|
||||
@ -214,7 +226,7 @@ exports.buildProject = (
|
||||
return qemu.copyQemu(path.join(projectPath, d.image.context), arch)
|
||||
.then (needsQemu) ->
|
||||
# Tar up the directory, ready for the build stream
|
||||
tarDirectory(projectPath)
|
||||
tarDirectory(projectPath, { convertEol })
|
||||
.then (tarStream) ->
|
||||
Promise.resolve(makeBuildTasks(composition, tarStream, { arch, deviceType }, logger))
|
||||
.map (task) ->
|
||||
|
9
lib/utils/compose.d.ts
vendored
9
lib/utils/compose.d.ts
vendored
@ -1,6 +1,6 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2018 Balena Ltd.
|
||||
* Copyright 2018-2020 Balena Ltd.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@ -49,7 +49,12 @@ export function loadProject(
|
||||
dockerfilePath?: string,
|
||||
): Bluebird<ComposeProject>;
|
||||
|
||||
interface TarDirectoryOptions {
|
||||
preFinalizeCallback?: (pack: Pack) => void;
|
||||
convertEol?: boolean;
|
||||
}
|
||||
|
||||
export function tarDirectory(
|
||||
source: string,
|
||||
preFinalizeCallback?: (pack: Pack) => void,
|
||||
options?: TarDirectoryOptions,
|
||||
): Promise<Stream.Readable>;
|
||||
|
@ -1,6 +1,6 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2018 Balena Ltd.
|
||||
* Copyright 2018-2020 Balena Ltd.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@ -54,6 +54,7 @@ export interface DeviceDeployOptions {
|
||||
services?: string[];
|
||||
system: boolean;
|
||||
env: string[];
|
||||
convertEol: boolean;
|
||||
}
|
||||
|
||||
interface ParsedEnvironment {
|
||||
@ -186,7 +187,9 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
||||
|
||||
await checkBuildSecretsRequirements(docker, opts.source);
|
||||
globalLogger.logDebug('Tarring all non-ignored files...');
|
||||
const tarStream = await tarDirectory(opts.source);
|
||||
const tarStream = await tarDirectory(opts.source, {
|
||||
convertEol: opts.convertEol,
|
||||
});
|
||||
|
||||
// Try to detect the device information
|
||||
const deviceInfo = await api.getDeviceInformation();
|
||||
@ -261,6 +264,7 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
||||
);
|
||||
}
|
||||
globalLogger.logLivepush('Watching for file changes...');
|
||||
globalLogger.outputDeferredMessages();
|
||||
await Promise.all(promises);
|
||||
} else {
|
||||
if (opts.detached) {
|
||||
@ -272,6 +276,7 @@ export async function deployToDevice(opts: DeviceDeployOptions): Promise<void> {
|
||||
// Now all we need to do is stream back the logs
|
||||
const logStream = await api.getLogStream();
|
||||
globalLogger.logInfo('Streaming device logs...');
|
||||
globalLogger.outputDeferredMessages();
|
||||
await displayDeviceLogs(
|
||||
logStream,
|
||||
globalLogger,
|
||||
|
139
lib/utils/eol-conversion.ts
Normal file
139
lib/utils/eol-conversion.ts
Normal file
@ -0,0 +1,139 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2019-2020 Balena Ltd.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import mmmagic = require('mmmagic');
|
||||
import fs = require('mz/fs');
|
||||
import Logger = require('./logger');
|
||||
|
||||
const globalLogger = Logger.getLogger();
|
||||
|
||||
// Define file size threshold (bytes) over which analysis/conversion is not performed.
|
||||
const LARGE_FILE_THRESHOLD = 10 * 1000 * 1000;
|
||||
|
||||
// The list of encodings to convert is intentionally conservative for now
|
||||
const CONVERTIBLE_ENCODINGS = ['ascii', 'utf-8'];
|
||||
|
||||
/**
|
||||
* Attempt to detect the encoding of a data buffer
|
||||
* @param data
|
||||
*/
|
||||
async function detectEncoding(data: Buffer): Promise<string> {
|
||||
// Instantiate mmmagic for mime encoding analysis
|
||||
const magic = new mmmagic.Magic(mmmagic.MAGIC_MIME_ENCODING);
|
||||
|
||||
// Promisify magic.detect
|
||||
// For some reason, got 'Illegal Invocation' when using:
|
||||
// const detectEncoding = promisify(magic.detect);
|
||||
return new Promise((resolve, reject) => {
|
||||
magic.detect(data, (err, encoding) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
// mmmagic reports ascii as 'us-ascii', but node Buffer uses 'ascii'
|
||||
encoding = encoding === 'us-ascii' ? 'ascii' : encoding;
|
||||
return resolve(encoding);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert EOL (CRLF → LF) in place, i.e. modifying the input buffer.
|
||||
* Safe for UTF-8, ASCII and 8-bit encodings (like 'latin-1', 'iso-8859-1', ...),
|
||||
* but not safe for UTF-16 or UTF-32.
|
||||
* Return a new buffer object sharing the same contents memory space as the
|
||||
* input buffer (using Buffer.slice()), in order to safely reflect the new
|
||||
* buffer size.
|
||||
* @param buf
|
||||
*/
|
||||
export function convertEolInPlace(buf: Buffer): Buffer {
|
||||
const CR = 13;
|
||||
const LF = 10;
|
||||
let foundCR = false;
|
||||
let j;
|
||||
// Algorithm gist:
|
||||
// - i and j are running indexes over the same buffer, but think of it as
|
||||
// i pointing to the input buffer, and j pointing to the output buffer.
|
||||
// - i and j are incremented by 1 in every loop iteration, but if a LF is found
|
||||
// after a CR, then j is decremented by 1, and LF is written. Invariant: j <= i.
|
||||
for (let i = (j = 0); i < buf.length; i++, j++) {
|
||||
const b = (buf[j] = buf[i]);
|
||||
if (b === CR) {
|
||||
foundCR = true;
|
||||
} else {
|
||||
if (foundCR && b === LF) {
|
||||
j--; // decrement index of "output buffer"
|
||||
buf[j] = LF; // overwrite previous CR with LF
|
||||
}
|
||||
foundCR = false;
|
||||
}
|
||||
}
|
||||
return buf.slice(0, j);
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop-in replacement for promisified fs.readFile(<string>)
|
||||
* Attempts to convert EOLs from CRLF to LF for supported encodings,
|
||||
* or otherwise logs warnings.
|
||||
* @param filepath
|
||||
* @param convertEol When true, performs conversions, otherwise just warns.
|
||||
*/
|
||||
export async function readFileWithEolConversion(
|
||||
filepath: string,
|
||||
convertEol: boolean,
|
||||
): Promise<Buffer> {
|
||||
const fileBuffer = await fs.readFile(filepath);
|
||||
|
||||
// Skip processing of very large files
|
||||
const fileStats = await fs.stat(filepath);
|
||||
if (fileStats.size > LARGE_FILE_THRESHOLD) {
|
||||
globalLogger.logWarn(`CRLF detection skipped for large file: ${filepath}`);
|
||||
return fileBuffer;
|
||||
}
|
||||
|
||||
// Analyse encoding
|
||||
const encoding = await detectEncoding(fileBuffer);
|
||||
|
||||
// Skip further processing of non-convertible encodings
|
||||
if (!CONVERTIBLE_ENCODINGS.includes(encoding)) {
|
||||
return fileBuffer;
|
||||
}
|
||||
|
||||
// Skip further processing of files that don't contain CRLF
|
||||
if (!fileBuffer.includes('\r\n', 0, encoding)) {
|
||||
return fileBuffer;
|
||||
}
|
||||
|
||||
if (convertEol) {
|
||||
// Convert CRLF->LF
|
||||
globalLogger.logInfo(
|
||||
`Converting line endings CRLF -> LF for file: ${filepath}`,
|
||||
);
|
||||
|
||||
return convertEolInPlace(fileBuffer);
|
||||
} else {
|
||||
// Immediate warning
|
||||
globalLogger.logWarn(
|
||||
`CRLF (Windows) line endings detected in file: ${filepath}`,
|
||||
);
|
||||
// And summary warning later
|
||||
globalLogger.deferredLog(
|
||||
'Windows-format line endings were detected in some files. Consider using the `--convert-eol` option.',
|
||||
Logger.Level.WARN,
|
||||
);
|
||||
|
||||
return fileBuffer;
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright 2016-2019 Balena
|
||||
Copyright 2016-2020 Balena
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
@ -354,7 +354,7 @@ function windowsCmdExeEscapeArg(arg: string): string {
|
||||
return `"${arg.replace(/["]/g, '""')}"`;
|
||||
}
|
||||
|
||||
/*
|
||||
/**
|
||||
* Workaround a window system bug which causes multiple rapid DNS lookups
|
||||
* to fail for mDNS.
|
||||
*
|
||||
|
@ -1,6 +1,6 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2019 Balena Ltd.
|
||||
* Copyright 2019-2020 Balena Ltd.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@ -19,6 +19,17 @@ import _ = require('lodash');
|
||||
import { EOL as eol } from 'os';
|
||||
import { StreamLogger } from 'resin-stream-logger';
|
||||
|
||||
enum Level {
|
||||
BUILD = 'build',
|
||||
INFO = 'info',
|
||||
DEBUG = 'debug',
|
||||
SUCCESS = 'success',
|
||||
WARN = 'warn',
|
||||
ERROR = 'error',
|
||||
LOGS = 'logs',
|
||||
LIVEPUSH = 'livepush',
|
||||
}
|
||||
|
||||
/**
|
||||
* General purpose logger class with support for log streams and colours.
|
||||
* Call `Logger.getLogger()` to retrieve a global shared instance of this
|
||||
@ -27,6 +38,8 @@ import { StreamLogger } from 'resin-stream-logger';
|
||||
* console.
|
||||
*/
|
||||
class Logger {
|
||||
public static readonly Level = Level;
|
||||
|
||||
public streams: {
|
||||
build: NodeJS.ReadWriteStream;
|
||||
info: NodeJS.ReadWriteStream;
|
||||
@ -40,6 +53,8 @@ class Logger {
|
||||
|
||||
public formatMessage: (name: string, message: string) => string;
|
||||
|
||||
protected deferredLogMessages: Array<[string, Level]>;
|
||||
|
||||
protected constructor() {
|
||||
const logger = new StreamLogger();
|
||||
logger.addPrefix('build', chalk.blue('[Build]'));
|
||||
@ -71,6 +86,8 @@ class Logger {
|
||||
});
|
||||
|
||||
this.formatMessage = logger.formatWithPrefix.bind(logger);
|
||||
|
||||
this.deferredLogMessages = [];
|
||||
}
|
||||
|
||||
protected static logger: Logger;
|
||||
@ -114,6 +131,23 @@ class Logger {
|
||||
public logLivepush(msg: string) {
|
||||
return this.streams.livepush.write(msg + eol);
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a message for output later, ignore duplicates.
|
||||
*/
|
||||
public deferredLog(msg: string, level: Level) {
|
||||
if (!this.deferredLogMessages.find(entry => entry[0] === msg)) {
|
||||
this.deferredLogMessages.push([msg, level]);
|
||||
}
|
||||
}
|
||||
|
||||
/** Output any messages that have been queued for deferred output */
|
||||
public outputDeferredMessages() {
|
||||
this.deferredLogMessages.forEach(m => {
|
||||
this.streams[m[1]].write(m[0] + eol);
|
||||
});
|
||||
this.deferredLogMessages = [];
|
||||
}
|
||||
}
|
||||
|
||||
export = Logger;
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
Copyright 2016-2018 Balena Ltd.
|
||||
Copyright 2016-2020 Balena Ltd.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
@ -23,10 +23,13 @@ import * as Stream from 'stream';
|
||||
import streamToPromise = require('stream-to-promise');
|
||||
import { Pack } from 'tar-stream';
|
||||
import { TypedError } from 'typed-error';
|
||||
import Logger = require('./logger');
|
||||
|
||||
import { exitWithExpectedError } from '../utils/patterns';
|
||||
import { tarDirectory } from './compose';
|
||||
|
||||
const globalLogger = Logger.getLogger();
|
||||
|
||||
const DEBUG_MODE = !!process.env.DEBUG;
|
||||
|
||||
const CURSOR_METADATA_REGEX = /([a-z]+)([0-9]+)?/;
|
||||
@ -38,6 +41,7 @@ export interface BuildOpts {
|
||||
nocache: boolean;
|
||||
registrySecrets: RegistrySecrets;
|
||||
headless: boolean;
|
||||
convertEol: boolean;
|
||||
}
|
||||
|
||||
export interface RemoteBuild {
|
||||
@ -136,6 +140,7 @@ export async function startRemoteBuild(build: RemoteBuild): Promise<void> {
|
||||
if (build.hadError) {
|
||||
throw new RemoteBuildFailedError();
|
||||
}
|
||||
globalLogger.outputDeferredMessages();
|
||||
});
|
||||
}
|
||||
|
||||
@ -289,12 +294,14 @@ async function getTarStream(build: RemoteBuild): Promise<Stream.Readable> {
|
||||
|
||||
try {
|
||||
tarSpinner.start();
|
||||
return await tarDirectory(
|
||||
path.resolve(build.source),
|
||||
const preFinalizeCb =
|
||||
Object.keys(build.opts.registrySecrets).length > 0
|
||||
? preFinalizeCallback
|
||||
: undefined,
|
||||
);
|
||||
: undefined;
|
||||
return await tarDirectory(path.resolve(build.source), {
|
||||
preFinalizeCallback: preFinalizeCb,
|
||||
convertEol: build.opts.convertEol,
|
||||
});
|
||||
} finally {
|
||||
tarSpinner.stop();
|
||||
}
|
||||
|
17
npm-shrinkwrap.json
generated
17
npm-shrinkwrap.json
generated
@ -775,6 +775,15 @@
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/mmmagic": {
|
||||
"version": "0.4.16-alpha",
|
||||
"resolved": "https://registry.npmjs.org/@types/mmmagic/-/mmmagic-0.4.16-alpha.tgz",
|
||||
"integrity": "sha1-zM66vnBpBmPWRaMdTLzxzZ3+UIE=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/mocha": {
|
||||
"version": "5.2.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-5.2.7.tgz",
|
||||
@ -8926,6 +8935,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"mmmagic": {
|
||||
"version": "0.5.3",
|
||||
"resolved": "https://registry.npmjs.org/mmmagic/-/mmmagic-0.5.3.tgz",
|
||||
"integrity": "sha512-xLqCu7GJYTzJczg0jafXFuh+iPzQL/ru0YYf4GiTTz8Cehru/wiXtUS8Pp8Xi77zNaiVndJ0OO1yAFci6iHyFg==",
|
||||
"requires": {
|
||||
"nan": "^2.13.2"
|
||||
}
|
||||
},
|
||||
"mocha": {
|
||||
"version": "6.2.2",
|
||||
"resolved": "https://registry.npmjs.org/mocha/-/mocha-6.2.2.tgz",
|
||||
|
@ -109,6 +109,7 @@
|
||||
"@types/lodash": "4.14.112",
|
||||
"@types/mixpanel": "2.14.0",
|
||||
"@types/mkdirp": "0.5.2",
|
||||
"@types/mmmagic": "0.4.16-alpha",
|
||||
"@types/mocha": "^5.2.7",
|
||||
"@types/mz": "0.0.32",
|
||||
"@types/net-keepalive": "^0.4.0",
|
||||
@ -203,6 +204,7 @@
|
||||
"minimatch": "^3.0.4",
|
||||
"mixpanel": "^0.10.3",
|
||||
"mkdirp": "^0.5.1",
|
||||
"mmmagic": "^0.5.3",
|
||||
"moment": "^2.24.0",
|
||||
"moment-duration-format": "^2.3.2",
|
||||
"mz": "^2.7.0",
|
||||
|
@ -20,7 +20,7 @@ import * as path from 'path';
|
||||
|
||||
import { NockMock, ScopeOpts } from './nock-mock';
|
||||
|
||||
const apiResponsePath = path.normalize(
|
||||
export const apiResponsePath = path.normalize(
|
||||
path.join(__dirname, 'test-data', 'api-response'),
|
||||
);
|
||||
|
||||
|
@ -17,10 +17,15 @@
|
||||
|
||||
import Bluebird = require('bluebird');
|
||||
import * as _ from 'lodash';
|
||||
import * as path from 'path';
|
||||
import * as zlib from 'zlib';
|
||||
|
||||
import { NockMock } from './nock-mock';
|
||||
|
||||
export const builderResponsePath = path.normalize(
|
||||
path.join(__dirname, 'test-data', 'builder-response'),
|
||||
);
|
||||
|
||||
export class BuilderMock extends NockMock {
|
||||
constructor() {
|
||||
super('https://builder.balena-cloud.com');
|
||||
@ -31,15 +36,17 @@ export class BuilderMock extends NockMock {
|
||||
persist?: boolean;
|
||||
responseBody: any;
|
||||
responseCode: number;
|
||||
checkURI: (uri: string) => Promise<void>;
|
||||
checkBuildRequestBody: (requestBody: string | Buffer) => Promise<void>;
|
||||
}) {
|
||||
this.optPost(/^\/v3\/build($|[(?])/, opts).reply(async function(
|
||||
_uri,
|
||||
uri,
|
||||
requestBody,
|
||||
callback,
|
||||
) {
|
||||
let error: Error | null = null;
|
||||
try {
|
||||
await opts.checkURI(uri);
|
||||
if (typeof requestBody === 'string') {
|
||||
const gzipped = Buffer.from(requestBody, 'hex');
|
||||
const gunzipped = await Bluebird.fromCallback<Buffer>(cb => {
|
||||
|
@ -15,18 +15,19 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { configureBluebird } from '../../build/app-common';
|
||||
|
||||
configureBluebird();
|
||||
// tslint:disable-next-line:no-var-requires
|
||||
require('../config-tests'); // required for side effects
|
||||
|
||||
import { expect } from 'chai';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { fs } from 'mz';
|
||||
import * as path from 'path';
|
||||
import { URL } from 'url';
|
||||
|
||||
import { BalenaAPIMock } from '../balena-api-mock';
|
||||
import { DockerMock } from '../docker-mock';
|
||||
import { DockerMock, dockerResponsePath } from '../docker-mock';
|
||||
import {
|
||||
cleanOutput,
|
||||
expectStreamNoCRLF,
|
||||
inspectTarStream,
|
||||
runCommand,
|
||||
TarStreamFiles,
|
||||
@ -35,10 +36,27 @@ import {
|
||||
const repoPath = path.normalize(path.join(__dirname, '..', '..'));
|
||||
const projectsPath = path.join(repoPath, 'tests', 'test-data', 'projects');
|
||||
|
||||
const expectedResponses = {
|
||||
'build-POST.json': [
|
||||
'[Info] Building for amd64/nuc',
|
||||
'[Info] Docker Desktop detected (daemon architecture: "x86_64")',
|
||||
'[Info] Docker itself will determine and enable architecture emulation if required,',
|
||||
'[Info] without balena-cli intervention and regardless of the --emulated option.',
|
||||
'[Build] main Image size: 1.14 MB',
|
||||
'[Success] Build succeeded!',
|
||||
],
|
||||
};
|
||||
|
||||
describe('balena build', function() {
|
||||
let api: BalenaAPIMock;
|
||||
let docker: DockerMock;
|
||||
|
||||
const commonQueryParams = [
|
||||
['t', 'basic_main'],
|
||||
['buildargs', '{}'],
|
||||
['labels', ''],
|
||||
];
|
||||
|
||||
this.beforeEach(() => {
|
||||
api = new BalenaAPIMock();
|
||||
docker = new DockerMock();
|
||||
@ -56,36 +74,29 @@ describe('balena build', function() {
|
||||
docker.done();
|
||||
});
|
||||
|
||||
it('should create the expected tar stream', async () => {
|
||||
it('should create the expected tar stream (single container)', async () => {
|
||||
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
|
||||
const expectedFiles: TarStreamFiles = {
|
||||
'src/start.sh': { fileSize: 89, type: 'file' },
|
||||
Dockerfile: { fileSize: 85, type: 'file' },
|
||||
'src/windows-crlf.sh': { fileSize: 70, type: 'file' },
|
||||
Dockerfile: { fileSize: 88, type: 'file' },
|
||||
'Dockerfile-alt': { fileSize: 30, type: 'file' },
|
||||
};
|
||||
const responseBody = stripIndent`
|
||||
{"stream":"Step 1/4 : FROM busybox"}
|
||||
{"stream":"\\n"}
|
||||
{"stream":" ---\\u003e 64f5d945efcc\\n"}
|
||||
{"stream":"Step 2/4 : COPY ./src/start.sh /start.sh"}
|
||||
{"stream":"\\n"}
|
||||
{"stream":" ---\\u003e Using cache\\n"}
|
||||
{"stream":" ---\\u003e 97098fc9d757\\n"}
|
||||
{"stream":"Step 3/4 : RUN chmod a+x /start.sh"}
|
||||
{"stream":"\\n"}
|
||||
{"stream":" ---\\u003e Using cache\\n"}
|
||||
{"stream":" ---\\u003e 33728e2e3f7e\\n"}
|
||||
{"stream":"Step 4/4 : CMD [\\"/start.sh\\"]"}
|
||||
{"stream":"\\n"}
|
||||
{"stream":" ---\\u003e Using cache\\n"}
|
||||
{"stream":" ---\\u003e 2590e3b11eaf\\n"}
|
||||
{"aux":{"ID":"sha256:2590e3b11eaf739491235016b53fec5d209c81837160abdd267c8fe5005ff1bd"}}
|
||||
{"stream":"Successfully built 2590e3b11eaf\\n"}
|
||||
{"stream":"Successfully tagged basic_main:latest\\n"}`;
|
||||
const responseFilename = 'build-POST.json';
|
||||
const responseBody = await fs.readFile(
|
||||
path.join(dockerResponsePath, responseFilename),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
docker.expectPostBuild({
|
||||
tag: 'basic_main',
|
||||
responseCode: 200,
|
||||
responseBody,
|
||||
checkURI: async (uri: string) => {
|
||||
const url = new URL(uri, 'http://test.net/');
|
||||
const queryParams = Array.from(url.searchParams.entries());
|
||||
expect(queryParams).to.have.deep.members(commonQueryParams);
|
||||
},
|
||||
checkBuildRequestBody: (buildRequestBody: string) =>
|
||||
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
|
||||
});
|
||||
@ -99,12 +110,61 @@ describe('balena build', function() {
|
||||
cleanOutput(out).map(line => line.replace(/\s{2,}/g, ' ')),
|
||||
).to.include.members([
|
||||
`[Info] Creating default composition with source: ${projectPath}`,
|
||||
'[Info] Building for amd64/nuc',
|
||||
'[Info] Docker Desktop detected (daemon architecture: "x86_64")',
|
||||
'[Info] Docker itself will determine and enable architecture emulation if required,',
|
||||
'[Info] without balena-cli intervention and regardless of the --emulated option.',
|
||||
'[Build] main Image size: 1.14 MB',
|
||||
'[Success] Build succeeded!',
|
||||
...expectedResponses[responseFilename],
|
||||
`[Warn] CRLF (Windows) line endings detected in file: ${path.join(
|
||||
projectPath,
|
||||
'src',
|
||||
'windows-crlf.sh',
|
||||
)}`,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should create the expected tar stream (single container, --convert-eol)', async () => {
|
||||
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
|
||||
const expectedFiles: TarStreamFiles = {
|
||||
'src/start.sh': { fileSize: 89, type: 'file' },
|
||||
'src/windows-crlf.sh': {
|
||||
fileSize: 68,
|
||||
type: 'file',
|
||||
testStream: expectStreamNoCRLF,
|
||||
},
|
||||
Dockerfile: { fileSize: 88, type: 'file' },
|
||||
'Dockerfile-alt': { fileSize: 30, type: 'file' },
|
||||
};
|
||||
const responseFilename = 'build-POST.json';
|
||||
const responseBody = await fs.readFile(
|
||||
path.join(dockerResponsePath, responseFilename),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
docker.expectPostBuild({
|
||||
tag: 'basic_main',
|
||||
responseCode: 200,
|
||||
responseBody,
|
||||
checkURI: async (uri: string) => {
|
||||
const url = new URL(uri, 'http://test.net/');
|
||||
const queryParams = Array.from(url.searchParams.entries());
|
||||
expect(queryParams).to.have.deep.members(commonQueryParams);
|
||||
},
|
||||
checkBuildRequestBody: (buildRequestBody: string) =>
|
||||
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
|
||||
});
|
||||
|
||||
const { out, err } = await runCommand(
|
||||
`build ${projectPath} --deviceType nuc --arch amd64 --convert-eol`,
|
||||
);
|
||||
|
||||
expect(err).to.have.members([]);
|
||||
expect(
|
||||
cleanOutput(out).map(line => line.replace(/\s{2,}/g, ' ')),
|
||||
).to.include.members([
|
||||
`[Info] Creating default composition with source: ${projectPath}`,
|
||||
`[Info] Converting line endings CRLF -> LF for file: ${path.join(
|
||||
projectPath,
|
||||
'src',
|
||||
'windows-crlf.sh',
|
||||
)}`,
|
||||
...expectedResponses[responseFilename],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
@ -15,16 +15,16 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { configureBluebird } from '../../build/app-common';
|
||||
|
||||
configureBluebird();
|
||||
// tslint:disable-next-line:no-var-requires
|
||||
require('../config-tests'); // required for side effects
|
||||
|
||||
import { expect } from 'chai';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { fs } from 'mz';
|
||||
import * as path from 'path';
|
||||
import { URL } from 'url';
|
||||
|
||||
import { BalenaAPIMock } from '../balena-api-mock';
|
||||
import { DockerMock } from '../docker-mock';
|
||||
import { DockerMock, dockerResponsePath } from '../docker-mock';
|
||||
import {
|
||||
cleanOutput,
|
||||
inspectTarStream,
|
||||
@ -34,11 +34,31 @@ import {
|
||||
|
||||
const repoPath = path.normalize(path.join(__dirname, '..', '..'));
|
||||
const projectsPath = path.join(repoPath, 'tests', 'test-data', 'projects');
|
||||
const expectedResponses = {
|
||||
'build-POST.json': [
|
||||
'[Info] Building for armv7hf/raspberrypi3',
|
||||
'[Info] Docker Desktop detected (daemon architecture: "x86_64")',
|
||||
'[Info] Docker itself will determine and enable architecture emulation if required,',
|
||||
'[Info] without balena-cli intervention and regardless of the --emulated option.',
|
||||
'[Build] main Image size: 1.14 MB',
|
||||
'[Info] Creating release...',
|
||||
'[Info] Pushing images to registry...',
|
||||
'[Info] Saving release...',
|
||||
'[Success] Deploy succeeded!',
|
||||
'[Success] Release: 09f7c3e1fdec609be818002299edfc2a',
|
||||
],
|
||||
};
|
||||
|
||||
describe('balena deploy', function() {
|
||||
let api: BalenaAPIMock;
|
||||
let docker: DockerMock;
|
||||
|
||||
const commonQueryParams = [
|
||||
['t', 'basic_main'],
|
||||
['buildargs', '{}'],
|
||||
['labels', ''],
|
||||
];
|
||||
|
||||
this.beforeEach(() => {
|
||||
api = new BalenaAPIMock();
|
||||
docker = new DockerMock();
|
||||
@ -73,36 +93,29 @@ describe('balena deploy', function() {
|
||||
docker.done();
|
||||
});
|
||||
|
||||
it('should create the expected --build tar stream', async () => {
|
||||
it('should create the expected --build tar stream (single container)', async () => {
|
||||
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
|
||||
const expectedFiles: TarStreamFiles = {
|
||||
'src/start.sh': { fileSize: 89, type: 'file' },
|
||||
Dockerfile: { fileSize: 85, type: 'file' },
|
||||
'src/windows-crlf.sh': { fileSize: 70, type: 'file' },
|
||||
Dockerfile: { fileSize: 88, type: 'file' },
|
||||
'Dockerfile-alt': { fileSize: 30, type: 'file' },
|
||||
};
|
||||
const responseBody = stripIndent`
|
||||
{"stream":"Step 1/4 : FROM busybox"}
|
||||
{"stream":"\\n"}
|
||||
{"stream":" ---\\u003e 64f5d945efcc\\n"}
|
||||
{"stream":"Step 2/4 : COPY ./src/start.sh /start.sh"}
|
||||
{"stream":"\\n"}
|
||||
{"stream":" ---\\u003e Using cache\\n"}
|
||||
{"stream":" ---\\u003e 97098fc9d757\\n"}
|
||||
{"stream":"Step 3/4 : RUN chmod a+x /start.sh"}
|
||||
{"stream":"\\n"}
|
||||
{"stream":" ---\\u003e Using cache\\n"}
|
||||
{"stream":" ---\\u003e 33728e2e3f7e\\n"}
|
||||
{"stream":"Step 4/4 : CMD [\\"/start.sh\\"]"}
|
||||
{"stream":"\\n"}
|
||||
{"stream":" ---\\u003e Using cache\\n"}
|
||||
{"stream":" ---\\u003e 2590e3b11eaf\\n"}
|
||||
{"aux":{"ID":"sha256:2590e3b11eaf739491235016b53fec5d209c81837160abdd267c8fe5005ff1bd"}}
|
||||
{"stream":"Successfully built 2590e3b11eaf\\n"}
|
||||
{"stream":"Successfully tagged basic_main:latest\\n"}`;
|
||||
const responseFilename = 'build-POST.json';
|
||||
const responseBody = await fs.readFile(
|
||||
path.join(dockerResponsePath, responseFilename),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
docker.expectPostBuild({
|
||||
tag: 'basic_main',
|
||||
responseCode: 200,
|
||||
responseBody,
|
||||
checkURI: async (uri: string) => {
|
||||
const url = new URL(uri, 'http://test.net/');
|
||||
const queryParams = Array.from(url.searchParams.entries());
|
||||
expect(queryParams).to.have.deep.members(commonQueryParams);
|
||||
},
|
||||
checkBuildRequestBody: (buildRequestBody: string) =>
|
||||
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
|
||||
});
|
||||
@ -116,16 +129,12 @@ describe('balena deploy', function() {
|
||||
cleanOutput(out).map(line => line.replace(/\s{2,}/g, ' ')),
|
||||
).to.include.members([
|
||||
`[Info] Creating default composition with source: ${projectPath}`,
|
||||
'[Info] Building for armv7hf/raspberrypi3',
|
||||
'[Info] Docker Desktop detected (daemon architecture: "x86_64")',
|
||||
'[Info] Docker itself will determine and enable architecture emulation if required,',
|
||||
'[Info] without balena-cli intervention and regardless of the --emulated option.',
|
||||
'[Build] main Image size: 1.14 MB',
|
||||
'[Info] Creating release...',
|
||||
'[Info] Pushing images to registry...',
|
||||
'[Info] Saving release...',
|
||||
'[Success] Deploy succeeded!',
|
||||
'[Success] Release: 09f7c3e1fdec609be818002299edfc2a',
|
||||
...expectedResponses[responseFilename],
|
||||
`[Warn] CRLF (Windows) line endings detected in file: ${path.join(
|
||||
projectPath,
|
||||
'src',
|
||||
'windows-crlf.sh',
|
||||
)}`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
@ -18,7 +18,7 @@
|
||||
import { expect } from 'chai';
|
||||
import * as path from 'path';
|
||||
|
||||
import { BalenaAPIMock } from '../../balena-api-mock';
|
||||
import { apiResponsePath, BalenaAPIMock } from '../../balena-api-mock';
|
||||
import { cleanOutput, runCommand } from '../../helpers';
|
||||
|
||||
const HELP_RESPONSE = `
|
||||
@ -31,10 +31,6 @@ Examples:
|
||||
\t$ balena device 7cf02a6
|
||||
`;
|
||||
|
||||
const apiResponsePath = path.normalize(
|
||||
path.join(__dirname, '..', '..', 'test-data', 'api-response'),
|
||||
);
|
||||
|
||||
describe('balena device', function() {
|
||||
let api: BalenaAPIMock;
|
||||
|
||||
|
@ -18,7 +18,7 @@
|
||||
import { expect } from 'chai';
|
||||
import * as path from 'path';
|
||||
|
||||
import { BalenaAPIMock } from '../../balena-api-mock';
|
||||
import { apiResponsePath, BalenaAPIMock } from '../../balena-api-mock';
|
||||
import { cleanOutput, runCommand } from '../../helpers';
|
||||
|
||||
const HELP_RESPONSE = `
|
||||
@ -40,10 +40,6 @@ Options:
|
||||
--application, -a, --app <application> application name
|
||||
`;
|
||||
|
||||
const apiResponsePath = path.normalize(
|
||||
path.join(__dirname, '..', '..', 'test-data', 'api-response'),
|
||||
);
|
||||
|
||||
describe('balena devices', function() {
|
||||
let api: BalenaAPIMock;
|
||||
|
||||
|
@ -15,19 +15,19 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { configureBluebird } from '../../build/app-common';
|
||||
|
||||
configureBluebird();
|
||||
// tslint:disable-next-line:no-var-requires
|
||||
require('../config-tests'); // required for side effects
|
||||
|
||||
import { expect } from 'chai';
|
||||
import { fs } from 'mz';
|
||||
import * as path from 'path';
|
||||
import { URL } from 'url';
|
||||
|
||||
import { BalenaAPIMock } from '../balena-api-mock';
|
||||
import { BuilderMock } from '../builder-mock';
|
||||
// import { DockerMock } from '../docker-mock';
|
||||
import { BuilderMock, builderResponsePath } from '../builder-mock';
|
||||
import {
|
||||
cleanOutput,
|
||||
expectStreamNoCRLF,
|
||||
inspectTarStream,
|
||||
runCommand,
|
||||
TarStreamFiles,
|
||||
@ -35,14 +35,60 @@ import {
|
||||
|
||||
const repoPath = path.normalize(path.join(__dirname, '..', '..'));
|
||||
const projectsPath = path.join(repoPath, 'tests', 'test-data', 'projects');
|
||||
const builderResponsePath = path.normalize(
|
||||
path.join(__dirname, '..', 'test-data', 'builder-response'),
|
||||
);
|
||||
|
||||
const expectedResponses = {
|
||||
'build-POST-v3.json': [
|
||||
'[Info] Starting build for testApp, user gh_user',
|
||||
'[Info] Dashboard link: https://dashboard.balena-cloud.com/apps/1301645/devices',
|
||||
'[Info] Building on arm01',
|
||||
'[Info] Pulling previous images for caching purposes...',
|
||||
'[Success] Successfully pulled cache images',
|
||||
'[main] Step 1/4 : FROM busybox',
|
||||
'[main] ---> 76aea0766768',
|
||||
'[main] Step 2/4 : COPY ./src/start.sh /start.sh',
|
||||
'[main] ---> b563ad6a0801',
|
||||
'[main] Step 3/4 : RUN chmod a+x /start.sh',
|
||||
'[main] ---> Running in 10d4ddc40bfc',
|
||||
'[main] Removing intermediate container 10d4ddc40bfc',
|
||||
'[main] ---> 82e98871a32c',
|
||||
'[main] Step 4/4 : CMD ["/start.sh"]',
|
||||
'[main] ---> Running in 0682894e13eb',
|
||||
'[main] Removing intermediate container 0682894e13eb',
|
||||
'[main] ---> 889ccb6afc7c',
|
||||
'[main] Successfully built 889ccb6afc7c',
|
||||
'[Info] Uploading images',
|
||||
'[Success] Successfully uploaded images',
|
||||
'[Info] Built on arm01',
|
||||
'[Success] Release successfully created!',
|
||||
'[Info] Release: 05a24b5b034c9f95f25d4d74f0593bea (id: 1220245)',
|
||||
'[Info] ┌─────────┬────────────┬────────────┐',
|
||||
'[Info] │ Service │ Image Size │ Build Time │',
|
||||
'[Info] ├─────────┼────────────┼────────────┤',
|
||||
'[Info] │ main │ 1.32 MB │ 11 seconds │',
|
||||
'[Info] └─────────┴────────────┴────────────┘',
|
||||
'[Info] Build finished in 20 seconds',
|
||||
],
|
||||
};
|
||||
|
||||
function tweakOutput(out: string[]): string[] {
|
||||
return cleanOutput(out).map(line =>
|
||||
line.replace(/\s{2,}/g, ' ').replace(/in \d+? seconds/, 'in 20 seconds'),
|
||||
);
|
||||
}
|
||||
|
||||
describe('balena push', function() {
|
||||
let api: BalenaAPIMock;
|
||||
let builder: BuilderMock;
|
||||
|
||||
const commonQueryParams = [
|
||||
['owner', 'bob'],
|
||||
['app', 'testApp'],
|
||||
['dockerfilePath', ''],
|
||||
['emulated', 'false'],
|
||||
['nocache', 'false'],
|
||||
['headless', 'false'],
|
||||
];
|
||||
|
||||
this.beforeEach(() => {
|
||||
api = new BalenaAPIMock();
|
||||
builder = new BuilderMock();
|
||||
@ -57,20 +103,28 @@ describe('balena push', function() {
|
||||
builder.done();
|
||||
});
|
||||
|
||||
it('should create the expected tar stream', async () => {
|
||||
it('should create the expected tar stream (single container)', async () => {
|
||||
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
|
||||
const expectedFiles: TarStreamFiles = {
|
||||
'src/start.sh': { fileSize: 89, type: 'file' },
|
||||
Dockerfile: { fileSize: 85, type: 'file' },
|
||||
'src/windows-crlf.sh': { fileSize: 70, type: 'file' },
|
||||
Dockerfile: { fileSize: 88, type: 'file' },
|
||||
'Dockerfile-alt': { fileSize: 30, type: 'file' },
|
||||
};
|
||||
const responseFilename = 'build-POST-v3.json';
|
||||
const responseBody = await fs.readFile(
|
||||
path.join(builderResponsePath, 'build-POST-v3.json'),
|
||||
path.join(builderResponsePath, responseFilename),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
builder.expectPostBuild({
|
||||
responseCode: 200,
|
||||
responseBody,
|
||||
checkURI: async (uri: string) => {
|
||||
const url = new URL(uri, 'http://test.net/');
|
||||
const queryParams = Array.from(url.searchParams.entries());
|
||||
expect(queryParams).to.have.deep.members(commonQueryParams);
|
||||
},
|
||||
checkBuildRequestBody: (buildRequestBody: string | Buffer) =>
|
||||
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
|
||||
});
|
||||
@ -80,42 +134,100 @@ describe('balena push', function() {
|
||||
);
|
||||
|
||||
expect(err).to.have.members([]);
|
||||
expect(
|
||||
cleanOutput(out).map(line =>
|
||||
line
|
||||
.replace(/\s{2,}/g, ' ')
|
||||
.replace(/in \d+? seconds/, 'in 20 seconds'),
|
||||
),
|
||||
).to.include.members([
|
||||
'[Info] Starting build for testApp, user gh_user',
|
||||
'[Info] Dashboard link: https://dashboard.balena-cloud.com/apps/1301645/devices',
|
||||
'[Info] Building on arm01',
|
||||
'[Info] Pulling previous images for caching purposes...',
|
||||
'[Success] Successfully pulled cache images',
|
||||
'[main] Step 1/4 : FROM busybox',
|
||||
'[main] ---> 76aea0766768',
|
||||
'[main] Step 2/4 : COPY ./src/start.sh /start.sh',
|
||||
'[main] ---> b563ad6a0801',
|
||||
'[main] Step 3/4 : RUN chmod a+x /start.sh',
|
||||
'[main] ---> Running in 10d4ddc40bfc',
|
||||
'[main] Removing intermediate container 10d4ddc40bfc',
|
||||
'[main] ---> 82e98871a32c',
|
||||
'[main] Step 4/4 : CMD ["/start.sh"]',
|
||||
'[main] ---> Running in 0682894e13eb',
|
||||
'[main] Removing intermediate container 0682894e13eb',
|
||||
'[main] ---> 889ccb6afc7c',
|
||||
'[main] Successfully built 889ccb6afc7c',
|
||||
'[Info] Uploading images',
|
||||
'[Success] Successfully uploaded images',
|
||||
'[Info] Built on arm01',
|
||||
'[Success] Release successfully created!',
|
||||
'[Info] Release: 05a24b5b034c9f95f25d4d74f0593bea (id: 1220245)',
|
||||
'[Info] ┌─────────┬────────────┬────────────┐',
|
||||
'[Info] │ Service │ Image Size │ Build Time │',
|
||||
'[Info] ├─────────┼────────────┼────────────┤',
|
||||
'[Info] │ main │ 1.32 MB │ 11 seconds │',
|
||||
'[Info] └─────────┴────────────┴────────────┘',
|
||||
'[Info] Build finished in 20 seconds',
|
||||
expect(tweakOutput(out)).to.include.members([
|
||||
...expectedResponses[responseFilename],
|
||||
`[Warn] CRLF (Windows) line endings detected in file: ${path.join(
|
||||
projectPath,
|
||||
'src',
|
||||
'windows-crlf.sh',
|
||||
)}`,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should create the expected tar stream (alternative Dockerfile)', async () => {
|
||||
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
|
||||
const expectedFiles: TarStreamFiles = {
|
||||
'src/start.sh': { fileSize: 89, type: 'file' },
|
||||
'src/windows-crlf.sh': { fileSize: 70, type: 'file' },
|
||||
Dockerfile: { fileSize: 88, type: 'file' },
|
||||
'Dockerfile-alt': { fileSize: 30, type: 'file' },
|
||||
};
|
||||
const responseFilename = 'build-POST-v3.json';
|
||||
const responseBody = await fs.readFile(
|
||||
path.join(builderResponsePath, responseFilename),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
builder.expectPostBuild({
|
||||
responseCode: 200,
|
||||
responseBody,
|
||||
checkURI: async (uri: string) => {
|
||||
const url = new URL(uri, 'http://test.net/');
|
||||
const queryParams = Array.from(url.searchParams.entries());
|
||||
expect(queryParams).to.have.deep.members(
|
||||
commonQueryParams.map(i =>
|
||||
i[0] === 'dockerfilePath'
|
||||
? ['dockerfilePath', 'Dockerfile-alt']
|
||||
: i,
|
||||
),
|
||||
);
|
||||
},
|
||||
checkBuildRequestBody: (buildRequestBody: string | Buffer) =>
|
||||
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
|
||||
});
|
||||
|
||||
const { out, err } = await runCommand(
|
||||
`push testApp --source ${projectPath} --dockerfile Dockerfile-alt`,
|
||||
);
|
||||
|
||||
expect(err).to.have.members([]);
|
||||
expect(tweakOutput(out)).to.include.members(
|
||||
expectedResponses[responseFilename],
|
||||
);
|
||||
});
|
||||
|
||||
it('should create the expected tar stream (single container, --convert-eol)', async () => {
|
||||
const projectPath = path.join(projectsPath, 'no-docker-compose', 'basic');
|
||||
const expectedFiles: TarStreamFiles = {
|
||||
'src/start.sh': { fileSize: 89, type: 'file' },
|
||||
'src/windows-crlf.sh': {
|
||||
fileSize: 68,
|
||||
type: 'file',
|
||||
testStream: expectStreamNoCRLF,
|
||||
},
|
||||
Dockerfile: { fileSize: 88, type: 'file' },
|
||||
'Dockerfile-alt': { fileSize: 30, type: 'file' },
|
||||
};
|
||||
const responseFilename = 'build-POST-v3.json';
|
||||
const responseBody = await fs.readFile(
|
||||
path.join(builderResponsePath, responseFilename),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
builder.expectPostBuild({
|
||||
responseCode: 200,
|
||||
responseBody,
|
||||
checkURI: async (uri: string) => {
|
||||
const url = new URL(uri, 'http://test.net/');
|
||||
const queryParams = Array.from(url.searchParams.entries());
|
||||
expect(queryParams).to.have.deep.members(commonQueryParams);
|
||||
},
|
||||
checkBuildRequestBody: (buildRequestBody: string | Buffer) =>
|
||||
inspectTarStream(buildRequestBody, expectedFiles, projectPath, expect),
|
||||
});
|
||||
|
||||
const { out, err } = await runCommand(
|
||||
`push testApp --source ${projectPath} --convert-eol`,
|
||||
);
|
||||
|
||||
expect(err).to.have.members([]);
|
||||
expect(tweakOutput(out)).to.include.members([
|
||||
...expectedResponses[responseFilename],
|
||||
`[Info] Converting line endings CRLF -> LF for file: ${path.join(
|
||||
projectPath,
|
||||
'src',
|
||||
'windows-crlf.sh',
|
||||
)}`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
33
tests/config-tests.ts
Normal file
33
tests/config-tests.ts
Normal file
@ -0,0 +1,33 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2020 Balena Ltd.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { configureBluebird, setMaxListeners } from '../build/app-common';
|
||||
|
||||
configureBluebird();
|
||||
setMaxListeners(35); // it appears that 'nock' adds a bunch of listeners - bug?
|
||||
// SL: Looks like it's not nock causing this, as have seen the problem triggered from help.spec,
|
||||
// which is not using nock. Perhaps mocha/chai? (unlikely), or something in the CLI?
|
||||
|
||||
import { config as chaiCfg } from 'chai';
|
||||
|
||||
function configChai() {
|
||||
chaiCfg.showDiff = true;
|
||||
// enable diff comparison of large objects / arrays
|
||||
chaiCfg.truncateThreshold = 0;
|
||||
}
|
||||
|
||||
configChai();
|
@ -20,7 +20,7 @@ import * as path from 'path';
|
||||
|
||||
import { NockMock, ScopeOpts } from './nock-mock';
|
||||
|
||||
const dockerResponsePath = path.normalize(
|
||||
export const dockerResponsePath = path.normalize(
|
||||
path.join(__dirname, 'test-data', 'docker-response'),
|
||||
);
|
||||
|
||||
@ -70,14 +70,16 @@ export class DockerMock extends NockMock {
|
||||
responseBody: any;
|
||||
responseCode: number;
|
||||
tag: string;
|
||||
checkURI: (uri: string) => Promise<void>;
|
||||
checkBuildRequestBody: (requestBody: string) => Promise<void>;
|
||||
}) {
|
||||
this.optPost(
|
||||
new RegExp(`^/build\\?t=${_.escapeRegExp(opts.tag)}&`),
|
||||
opts,
|
||||
).reply(async function(_uri, requestBody, cb) {
|
||||
).reply(async function(uri, requestBody, cb) {
|
||||
let error: Error | null = null;
|
||||
try {
|
||||
await opts.checkURI(uri);
|
||||
if (typeof requestBody === 'string') {
|
||||
await opts.checkBuildRequestBody(requestBody);
|
||||
} else {
|
||||
|
@ -1,6 +1,6 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2019 Balena Ltd.
|
||||
* Copyright 2019-2020 Balena Ltd.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@ -15,6 +15,10 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// tslint:disable-next-line:no-var-requires
|
||||
require('./config-tests'); // required for side effects
|
||||
|
||||
import { stripIndent } from 'common-tags';
|
||||
import intercept = require('intercept-stdout');
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
@ -26,12 +30,6 @@ import * as tar from 'tar-stream';
|
||||
import { streamToBuffer } from 'tar-utils';
|
||||
|
||||
import * as balenaCLI from '../build/app';
|
||||
import { configureBluebird, setMaxListeners } from '../build/app-common';
|
||||
|
||||
configureBluebird();
|
||||
setMaxListeners(35); // it appears that 'nock' adds a bunch of listeners - bug?
|
||||
// SL: Looks like it's not nock causing this, as have seen the problem triggered from help.spec,
|
||||
// which is not using nock. Perhaps mocha/chai? (unlikely), or something in the CLI?
|
||||
|
||||
export const runCommand = async (cmd: string) => {
|
||||
const preArgs = [process.argv[0], path.join(process.cwd(), 'bin', 'balena')];
|
||||
@ -120,6 +118,7 @@ export interface TarStreamFiles {
|
||||
[filePath: string]: {
|
||||
fileSize: number;
|
||||
type: tar.Headers['type'];
|
||||
testStream?: (header: tar.Headers, stream: Readable) => Promise<void>;
|
||||
};
|
||||
}
|
||||
|
||||
@ -162,13 +161,12 @@ export async function inspectTarStream(
|
||||
fileSize: header.size || 0,
|
||||
type: header.type,
|
||||
};
|
||||
const [buf, buf2] = await Promise.all([
|
||||
streamToBuffer(stream),
|
||||
fs.readFile(
|
||||
path.join(projectPath, PathUtils.toNativePath(header.name)),
|
||||
),
|
||||
]);
|
||||
expect(buf.equals(buf2)).to.be.true;
|
||||
const expected = expectedFiles[header.name];
|
||||
if (expected && expected.testStream) {
|
||||
await expected.testStream(header, stream);
|
||||
} else {
|
||||
await defaultTestStream(header, stream, projectPath, expect);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
@ -183,5 +181,34 @@ export async function inspectTarStream(
|
||||
sourceTarStream.pipe(extract);
|
||||
});
|
||||
|
||||
expect(found).to.deep.equal(expectedFiles);
|
||||
expect(found).to.deep.equal(
|
||||
_.mapValues(expectedFiles, v => _.omit(v, 'testStream')),
|
||||
);
|
||||
}
|
||||
|
||||
/** Check that a tar stream entry matches the project contents in the filesystem */
|
||||
async function defaultTestStream(
|
||||
header: tar.Headers,
|
||||
stream: Readable,
|
||||
projectPath: string,
|
||||
expect: Chai.ExpectStatic,
|
||||
): Promise<void> {
|
||||
const [buf, buf2] = await Promise.all([
|
||||
streamToBuffer(stream),
|
||||
fs.readFile(path.join(projectPath, PathUtils.toNativePath(header.name))),
|
||||
]);
|
||||
const msg = stripIndent`
|
||||
contents mismatch for tar stream entry "${header.name}"
|
||||
stream length=${buf.length}, filesystem length=${buf2.length}`;
|
||||
expect(buf.equals(buf2), msg).to.be.true;
|
||||
}
|
||||
|
||||
/** Test a tar stream entry for the absence of Windows CRLF line breaks */
|
||||
export async function expectStreamNoCRLF(
|
||||
_header: tar.Headers,
|
||||
stream: Readable,
|
||||
): Promise<void> {
|
||||
const chai = await import('chai');
|
||||
const buf = await streamToBuffer(stream);
|
||||
await chai.expect(buf.includes('\r\n')).to.be.false;
|
||||
}
|
||||
|
18
tests/test-data/docker-response/build-POST.json
Normal file
18
tests/test-data/docker-response/build-POST.json
Normal file
@ -0,0 +1,18 @@
|
||||
{"stream":"Step 1/4 : FROM busybox"}
|
||||
{"stream":"\n"}
|
||||
{"stream":" ---\u003e 64f5d945efcc\n"}
|
||||
{"stream":"Step 2/4 : COPY ./src/start.sh /start.sh"}
|
||||
{"stream":"\n"}
|
||||
{"stream":" ---\u003e Using cache\n"}
|
||||
{"stream":" ---\u003e 97098fc9d757\n"}
|
||||
{"stream":"Step 3/4 : RUN chmod a+x /start.sh"}
|
||||
{"stream":"\n"}
|
||||
{"stream":" ---\u003e Using cache\n"}
|
||||
{"stream":" ---\u003e 33728e2e3f7e\n"}
|
||||
{"stream":"Step 4/4 : CMD [\"/start.sh\"]"}
|
||||
{"stream":"\n"}
|
||||
{"stream":" ---\u003e Using cache\n"}
|
||||
{"stream":" ---\u003e 2590e3b11eaf\n"}
|
||||
{"aux":{"ID":"sha256:2590e3b11eaf739491235016b53fec5d209c81837160abdd267c8fe5005ff1bd"}}
|
||||
{"stream":"Successfully built 2590e3b11eaf\n"}
|
||||
{"stream":"Successfully tagged basic_main:latest\n"}
|
@ -1,4 +1,4 @@
|
||||
FROM busybox
|
||||
COPY ./src/start.sh /start.sh
|
||||
RUN chmod a+x /start.sh
|
||||
CMD ["/start.sh"]
|
||||
COPY ./src /usr/src/
|
||||
RUN chmod a+x /usr/src/*.sh
|
||||
CMD ["/usr/src/start.sh"]
|
||||
|
@ -0,0 +1 @@
|
||||
alternative Dockerfile (basic)
|
@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
echo 'this file was saved with Windows CRLF line endings'
|
55
tests/utils/eol-conversion.spec.ts
Normal file
55
tests/utils/eol-conversion.spec.ts
Normal file
@ -0,0 +1,55 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2020 Balena Ltd.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { convertEolInPlace } from '../../build/utils/eol-conversion';
|
||||
|
||||
describe('convertEolInPlace() function', function() {
|
||||
it('should return expected values', () => {
|
||||
// pairs of [given input, expected output]
|
||||
const testVector = [
|
||||
['', ''],
|
||||
['\r', '\r'],
|
||||
['\n', '\n'],
|
||||
['\r\r', '\r\r'],
|
||||
['\n\r', '\n\r'],
|
||||
['\r\n', '\n'],
|
||||
['\r\n\n', '\n\n'],
|
||||
['\r\n\r', '\n\r'],
|
||||
['\r\n\r\n', '\n\n'],
|
||||
['\r\n\n\r', '\n\n\r'],
|
||||
['abc\r\ndef\r\n', 'abc\ndef\n'],
|
||||
['abc\r\ndef\n\r', 'abc\ndef\n\r'],
|
||||
['abc\r\ndef\n', 'abc\ndef\n'],
|
||||
['abc\r\ndef\r', 'abc\ndef\r'],
|
||||
['abc\r\ndef', 'abc\ndef'],
|
||||
['\r\ndef\r\n', '\ndef\n'],
|
||||
['\rdef\r', '\rdef\r'],
|
||||
];
|
||||
const js = JSON.stringify;
|
||||
|
||||
for (const [input, expected] of testVector) {
|
||||
const result = convertEolInPlace(Buffer.from(input));
|
||||
const resultStr = result.toString();
|
||||
const msg = `input=${js(input)} result=${js(resultStr)} expected=${js(
|
||||
expected,
|
||||
)}`;
|
||||
expect(resultStr).to.equal(expected, msg);
|
||||
}
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue
Block a user