mirror of
https://github.com/balena-os/balena-supervisor.git
synced 2024-12-23 23:42:29 +00:00
Merge pull request #1663 from balena-os/remove-deprecated-dependencies
Remove mz, mkdirp, body-parser dependencies
This commit is contained in:
commit
dba9bf1576
39
package-lock.json
generated
39
package-lock.json
generated
@ -592,21 +592,21 @@
|
||||
"integrity": "sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/mkdirp": {
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/mkdirp/-/mkdirp-0.5.2.tgz",
|
||||
"integrity": "sha512-U5icWpv7YnZYGsN4/cmh3WD2onMY0aJIiTE6+51TwJCttdHvtCYmkBNOobHlXwrJRL0nkH9jH4kD+1FAdMN4Tg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/mocha": {
|
||||
"version": "5.2.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-5.2.7.tgz",
|
||||
"integrity": "sha512-NYrtPht0wGzhwe9+/idPaBB+TqkY9AhTvOLMkThm0IoEfLaiVQZwBwyJ5puCkO3AUCWrmcoePjp2mbFocKy4SQ==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/mock-fs": {
|
||||
"version": "4.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/mock-fs/-/mock-fs-4.13.0.tgz",
|
||||
"integrity": "sha512-FUqxhURwqFtFBCuUj3uQMp7rPSQs//b3O9XecAVxhqS9y4/W8SIJEZFq2mmpnFVZBXwR/2OyPLE97CpyYiB8Mw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/morgan": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.0.tgz",
|
||||
@ -1569,12 +1569,21 @@
|
||||
}
|
||||
},
|
||||
"blinking": {
|
||||
"version": "0.0.3",
|
||||
"resolved": "https://registry.npmjs.org/blinking/-/blinking-0.0.3.tgz",
|
||||
"integrity": "sha1-c6LX+J2z2lSzYFxJiqXYYGv8Hnc=",
|
||||
"version": "0.0.4",
|
||||
"resolved": "https://registry.npmjs.org/blinking/-/blinking-0.0.4.tgz",
|
||||
"integrity": "sha512-kIC2FbDXmd9ydtCYQjrSEpw/jpvqNKfn+uDSO03sLyEju9Q/ZZyJqvdB/sSSGsyzx3bc5J4MmcqvOY/iL7OgnA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/bluebird": "^3.5.33",
|
||||
"bluebird": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/bluebird": {
|
||||
"version": "3.5.33",
|
||||
"resolved": "https://registry.npmjs.org/@types/bluebird/-/bluebird-3.5.33.tgz",
|
||||
"integrity": "sha512-ndEo1xvnYeHxm7I/5sF6tBvnsA4Tdi3zj1keRKRs12SP+2ye2A27NDJ1B6PqkfMbGAcT+mqQVqbZRIrhfOp5PQ==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"bluebird": {
|
||||
@ -7062,6 +7071,12 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"mock-fs": {
|
||||
"version": "4.14.0",
|
||||
"resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-4.14.0.tgz",
|
||||
"integrity": "sha512-qYvlv/exQ4+svI3UOvPUpLDF0OMX5euvUH0Ny4N5QyRyhNdgAgUrVH3iUINSzEPLvx0kbo/Bp28GJKIqvE7URw==",
|
||||
"dev": true
|
||||
},
|
||||
"morgan": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz",
|
||||
|
@ -55,10 +55,9 @@
|
||||
"@types/lockfile": "^1.0.1",
|
||||
"@types/lodash": "^4.14.159",
|
||||
"@types/memoizee": "^0.4.4",
|
||||
"@types/mkdirp": "^0.5.2",
|
||||
"@types/mocha": "^5.2.7",
|
||||
"@types/mock-fs": "^4.13.0",
|
||||
"@types/morgan": "^1.9.0",
|
||||
"@types/mz": "0.0.32",
|
||||
"@types/node": "^12.12.54",
|
||||
"@types/request": "^2.48.5",
|
||||
"@types/rewire": "^2.5.28",
|
||||
@ -74,9 +73,8 @@
|
||||
"@types/webpack": "^4.41.21",
|
||||
"@types/yargs": "^15.0.12",
|
||||
"balena-register-device": "^6.1.6",
|
||||
"blinking": "~0.0.3",
|
||||
"blinking": "^0.0.4",
|
||||
"bluebird": "^3.7.2",
|
||||
"body-parser": "^1.19.0",
|
||||
"chai-as-promised": "^7.1.1",
|
||||
"chai-events": "0.0.1",
|
||||
"chai-like": "^1.1.1",
|
||||
@ -105,10 +103,9 @@
|
||||
"lodash": "^4.17.20",
|
||||
"memoizee": "^0.4.14",
|
||||
"mixpanel": "^0.10.3",
|
||||
"mkdirp": "^0.5.5",
|
||||
"mocha": "^5.2.0",
|
||||
"mock-fs": "^4.14.0",
|
||||
"morgan": "^1.10.0",
|
||||
"mz": "^2.7.0",
|
||||
"network-checker": "^0.1.1",
|
||||
"nodemon": "^2.0.4",
|
||||
"pinejs-client-request": "^7.2.1",
|
||||
|
@ -1,5 +1,4 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as bodyParser from 'body-parser';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import * as express from 'express';
|
||||
import { isLeft } from 'fp-ts/lib/Either';
|
||||
@ -578,8 +577,8 @@ export const initialized = (async () => {
|
||||
})();
|
||||
|
||||
export const router = express.Router();
|
||||
router.use(bodyParser.urlencoded({ limit: '10mb', extended: true }));
|
||||
router.use(bodyParser.json({ limit: '10mb' }));
|
||||
router.use(express.urlencoded({ limit: '10mb', extended: true }));
|
||||
router.use(express.json({ limit: '10mb' }));
|
||||
|
||||
router.post('/v1/update', (req, res, next) => {
|
||||
eventTracker.track('Update notification');
|
||||
|
@ -1,4 +1,3 @@
|
||||
import * as bodyParser from 'body-parser';
|
||||
import * as express from 'express';
|
||||
import * as _ from 'lodash';
|
||||
|
||||
@ -61,8 +60,8 @@ const localModeManager = new LocalModeManager();
|
||||
|
||||
export const router = (() => {
|
||||
const $router = express.Router();
|
||||
$router.use(bodyParser.urlencoded({ extended: true, limit: '10mb' }));
|
||||
$router.use(bodyParser.json({ limit: '10mb' }));
|
||||
$router.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
||||
$router.use(express.json({ limit: '10mb' }));
|
||||
|
||||
createV1Api($router);
|
||||
createV2Api($router);
|
||||
|
@ -1,15 +1,15 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
|
||||
import * as constants from '../lib/constants';
|
||||
import { docker } from '../lib/docker-utils';
|
||||
import { ENOENT, NotFoundError } from '../lib/errors';
|
||||
import { NotFoundError } from '../lib/errors';
|
||||
import logTypes = require('../lib/log-types');
|
||||
import log from '../lib/supervisor-console';
|
||||
import { exists } from '../lib/fs-utils';
|
||||
|
||||
import * as logger from '../logger';
|
||||
import { Network } from './network';
|
||||
|
||||
import log from '../lib/supervisor-console';
|
||||
import { ResourceRecreationAttemptError } from './errors';
|
||||
|
||||
export function getAll(): Bluebird<Network[]> {
|
||||
@ -69,23 +69,22 @@ export async function remove(network: Network) {
|
||||
await network.remove();
|
||||
}
|
||||
|
||||
export function supervisorNetworkReady(): Bluebird<boolean> {
|
||||
return Bluebird.resolve(
|
||||
fs.stat(`/sys/class/net/${constants.supervisorNetworkInterface}`),
|
||||
)
|
||||
.then(() => {
|
||||
return docker.getNetwork(constants.supervisorNetworkInterface).inspect();
|
||||
})
|
||||
.then((network) => {
|
||||
export async function supervisorNetworkReady(): Promise<boolean> {
|
||||
const networkExists = exists(
|
||||
`/sys/class/net/${constants.supervisorNetworkInterface}`,
|
||||
);
|
||||
if (!networkExists) {
|
||||
return false;
|
||||
}
|
||||
const network = await docker
|
||||
.getNetwork(constants.supervisorNetworkInterface)
|
||||
.inspect();
|
||||
return (
|
||||
network.Options['com.docker.network.bridge.name'] ===
|
||||
constants.supervisorNetworkInterface &&
|
||||
network.IPAM.Config[0].Subnet === constants.supervisorNetworkSubnet &&
|
||||
network.IPAM.Config[0].Gateway === constants.supervisorNetworkGateway
|
||||
);
|
||||
})
|
||||
.catchReturn(NotFoundError, false)
|
||||
.catchReturn(ENOENT, false);
|
||||
}
|
||||
|
||||
export function ensureSupervisorNetwork(): Bluebird<void> {
|
||||
@ -109,11 +108,13 @@ export function ensureSupervisorNetwork(): Bluebird<void> {
|
||||
) {
|
||||
return removeIt();
|
||||
} else {
|
||||
return Bluebird.resolve(
|
||||
fs.stat(`/sys/class/net/${constants.supervisorNetworkInterface}`),
|
||||
)
|
||||
.catch(ENOENT, removeIt)
|
||||
.return();
|
||||
return exists(
|
||||
`/sys/class/net/${constants.supervisorNetworkInterface}`,
|
||||
).then((networkExists) => {
|
||||
if (!networkExists) {
|
||||
return removeIt();
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.catch(NotFoundError, () => {
|
||||
|
@ -4,7 +4,7 @@ import { EventEmitter } from 'events';
|
||||
import { isLeft } from 'fp-ts/lib/Either';
|
||||
import * as JSONStream from 'JSONStream';
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import StrictEventEmitter from 'strict-event-emitter-types';
|
||||
|
||||
import * as config from '../config';
|
||||
|
@ -18,9 +18,9 @@ import {
|
||||
} from './types/service';
|
||||
import * as ComposeUtils from './utils';
|
||||
|
||||
import * as constants from '../lib/constants';
|
||||
import * as updateLock from '../lib/update-lock';
|
||||
import { sanitiseComposeConfig } from './sanitise';
|
||||
import { getPathOnHost } from '../lib/fs-utils';
|
||||
|
||||
import log from '../lib/supervisor-console';
|
||||
import { EnvVarObject } from '../lib/types';
|
||||
@ -900,16 +900,14 @@ export class Service {
|
||||
}
|
||||
|
||||
public handoverCompleteFullPathsOnHost(): string[] {
|
||||
return [
|
||||
path.join(this.handoverCompletePathOnHost(), 'handover-complete'),
|
||||
path.join(this.handoverCompletePathOnHost(), 'resin-kill-me'),
|
||||
];
|
||||
}
|
||||
|
||||
private handoverCompletePathOnHost(): string {
|
||||
return path.join(
|
||||
constants.rootMountPoint,
|
||||
updateLock.lockPath(this.appId || 0, this.serviceName || ''),
|
||||
const lockPath = updateLock.lockPath(
|
||||
this.appId || 0,
|
||||
this.serviceName || '',
|
||||
);
|
||||
return getPathOnHost(
|
||||
...['handover-complete', 'resin-kill-me'].map((tail) =>
|
||||
path.join(lockPath, tail),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,7 @@
|
||||
import * as _ from 'lodash';
|
||||
import { child_process } from 'mz';
|
||||
|
||||
import * as constants from '../../lib/constants';
|
||||
import { writeFileAtomic } from '../../lib/fs-utils';
|
||||
import { writeFileAtomic, exec } from '../../lib/fs-utils';
|
||||
|
||||
export interface ConfigOptions {
|
||||
[key: string]: string | string[];
|
||||
@ -15,7 +14,7 @@ export async function remountAndWriteAtomic(
|
||||
data: string | Buffer,
|
||||
): Promise<void> {
|
||||
// Here's the dangerous part:
|
||||
await child_process.exec(
|
||||
await exec(
|
||||
`mount -t vfat -o remount,rw ${constants.bootBlockDevice} ${bootMountPoint}`,
|
||||
);
|
||||
await writeFileAtomic(file, data);
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as _ from 'lodash';
|
||||
import { child_process, fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import {
|
||||
@ -8,6 +8,7 @@ import {
|
||||
bootMountPoint,
|
||||
remountAndWriteAtomic,
|
||||
} from './backend';
|
||||
import { exec, exists } from '../../lib/fs-utils';
|
||||
import * as constants from '../../lib/constants';
|
||||
import * as logger from '../../logger';
|
||||
import log from '../../lib/supervisor-console';
|
||||
@ -56,7 +57,7 @@ export class ConfigFs extends ConfigBackend {
|
||||
|
||||
const amlSrcPath = path.join(this.SystemAmlFiles, `${aml}.aml`);
|
||||
// log to system log if the AML doesn't exist...
|
||||
if (!(await fs.exists(amlSrcPath))) {
|
||||
if (!(await exists(amlSrcPath))) {
|
||||
log.error(`Missing AML for \'${aml}\'. Unable to load.`);
|
||||
if (logger) {
|
||||
logger.logSystemMessage(
|
||||
@ -80,9 +81,7 @@ export class ConfigFs extends ConfigBackend {
|
||||
log.info(`Loading AML ${aml}`);
|
||||
// we use `cat` here as this didn't work when using `cp` and all
|
||||
// examples of this loading mechanism use `cat`.
|
||||
await child_process.exec(
|
||||
`cat ${amlSrcPath} > ${path.join(amlDstPath, 'aml')}`,
|
||||
);
|
||||
await exec(`cat ${amlSrcPath} > ${path.join(amlDstPath, 'aml')}`);
|
||||
|
||||
const [oemId, oemTableId, oemRevision] = await Promise.all([
|
||||
fs.readFile(path.join(amlDstPath, 'oem_id'), 'utf8'),
|
||||
@ -101,7 +100,7 @@ export class ConfigFs extends ConfigBackend {
|
||||
|
||||
private async readConfigJSON(): Promise<ConfigfsConfig> {
|
||||
// if we don't yet have a config file, just return an empty result...
|
||||
if (!(await fs.exists(this.ConfigFilePath))) {
|
||||
if (!(await exists(this.ConfigFilePath))) {
|
||||
log.info('Empty ConfigFS config file');
|
||||
return {};
|
||||
}
|
||||
@ -134,7 +133,7 @@ export class ConfigFs extends ConfigBackend {
|
||||
await super.initialise();
|
||||
|
||||
// load the acpi_configfs module...
|
||||
await child_process.exec('modprobe acpi_configfs');
|
||||
await exec('modprobe acpi_configfs');
|
||||
|
||||
// read the existing config file...
|
||||
const config = await this.readConfigJSON();
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
|
||||
import {
|
||||
ConfigOptions,
|
||||
@ -9,6 +9,7 @@ import {
|
||||
} from './backend';
|
||||
import * as constants from '../../lib/constants';
|
||||
import log from '../../lib/supervisor-console';
|
||||
import { exists } from '../../lib/fs-utils';
|
||||
|
||||
/**
|
||||
* A backend to handle Raspberry Pi host configuration
|
||||
@ -61,7 +62,7 @@ export class ConfigTxt extends ConfigBackend {
|
||||
public async getBootConfig(): Promise<ConfigOptions> {
|
||||
let configContents = '';
|
||||
|
||||
if (await fs.exists(ConfigTxt.bootConfigPath)) {
|
||||
if (await exists(ConfigTxt.bootConfigPath)) {
|
||||
configContents = await fs.readFile(ConfigTxt.bootConfigPath, 'utf-8');
|
||||
} else {
|
||||
await fs.writeFile(ConfigTxt.bootConfigPath, '');
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as semver from 'semver';
|
||||
|
||||
import {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
|
||||
import {
|
||||
ConfigOptions,
|
||||
@ -10,6 +10,7 @@ import {
|
||||
import * as constants from '../../lib/constants';
|
||||
import log from '../../lib/supervisor-console';
|
||||
import { ExtraUEnvError } from '../../lib/errors';
|
||||
import { exists } from '../../lib/fs-utils';
|
||||
|
||||
/**
|
||||
* Entry describes the configurable items in an extra_uEnv file
|
||||
@ -63,7 +64,7 @@ export class ExtraUEnv extends ConfigBackend {
|
||||
(deviceType.endsWith('-nano') ||
|
||||
deviceType.endsWith('-nano-emmc') ||
|
||||
deviceType.endsWith('-tx2')) &&
|
||||
(await fs.exists(ExtraUEnv.bootConfigPath))
|
||||
(await exists(ExtraUEnv.bootConfigPath))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1,9 +1,10 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as constants from '../../lib/constants';
|
||||
import { exists } from '../../lib/fs-utils';
|
||||
import log from '../../lib/supervisor-console';
|
||||
import {
|
||||
bootMountPoint,
|
||||
@ -126,7 +127,7 @@ export class SplashImage extends ConfigBackend {
|
||||
|
||||
// The default boot image file has already
|
||||
// been created
|
||||
if (await fs.exists(SplashImage.DEFAULT)) {
|
||||
if (await exists(SplashImage.DEFAULT)) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as constants from '../lib/constants';
|
||||
@ -86,11 +86,6 @@ export default class ConfigJsonConfigBackend {
|
||||
});
|
||||
}
|
||||
|
||||
public async path(): Promise<string> {
|
||||
await this.init();
|
||||
return await this.pathOnHost();
|
||||
}
|
||||
|
||||
private write(): Promise<void> {
|
||||
let atomicWritePossible = true;
|
||||
return this.pathOnHost()
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
import * as memoizee from 'memoizee';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import { URL } from 'url';
|
||||
|
||||
import supervisorVersion = require('../lib/supervisor-version');
|
||||
|
@ -1,5 +1,4 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as bodyParser from 'body-parser';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { EventEmitter } from 'events';
|
||||
import * as express from 'express';
|
||||
@ -95,8 +94,8 @@ function validateState(state: any): asserts state is TargetState {
|
||||
// device api stuff in ./device-api
|
||||
function createDeviceStateRouter() {
|
||||
router = express.Router();
|
||||
router.use(bodyParser.urlencoded({ limit: '10mb', extended: true }));
|
||||
router.use(bodyParser.json({ limit: '10mb' }));
|
||||
router.use(express.urlencoded({ limit: '10mb', extended: true }));
|
||||
router.use(express.json({ limit: '10mb' }));
|
||||
|
||||
const rebootOrShutdown = async (
|
||||
req: express.Request,
|
||||
@ -463,15 +462,11 @@ export async function loadInitialState() {
|
||||
// breaks loose due to the liberal any casting
|
||||
function emitAsync<T extends keyof DeviceStateEvents>(
|
||||
ev: T,
|
||||
...args: DeviceStateEvents[T] extends (...args: any) => void
|
||||
? Parameters<DeviceStateEvents[T]>
|
||||
...args: DeviceStateEvents[T] extends (...args: infer TArgs) => void
|
||||
? TArgs
|
||||
: Array<DeviceStateEvents[T]>
|
||||
) {
|
||||
if (_.isArray(args)) {
|
||||
return setImmediate(() => events.emit(ev as any, ...(args as any)));
|
||||
} else {
|
||||
return setImmediate(() => events.emit(ev as any, args));
|
||||
}
|
||||
return setImmediate(() => events.emit(ev as any, ...args));
|
||||
}
|
||||
|
||||
const readLockTarget = () =>
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
|
||||
import { Image, imageFromService } from '../compose/images';
|
||||
import * as deviceState from '../device-state';
|
||||
|
@ -1,20 +1,14 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import * as _ from 'lodash';
|
||||
import * as mkdirCb from 'mkdirp';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as config from './config';
|
||||
import * as constants from './lib/constants';
|
||||
import * as dbus from './lib/dbus';
|
||||
import { ENOENT } from './lib/errors';
|
||||
import { writeFileAtomic } from './lib/fs-utils';
|
||||
|
||||
const mkdirp = Bluebird.promisify(mkdirCb) as (
|
||||
path: string,
|
||||
opts?: any,
|
||||
) => Bluebird<mkdirCb.Made>;
|
||||
import { writeFileAtomic, mkdirp, unlinkAll } from './lib/fs-utils';
|
||||
|
||||
const redsocksHeader = stripIndent`
|
||||
base {
|
||||
@ -131,13 +125,7 @@ function generateRedsocksConfEntries(conf: ProxyConfig): string {
|
||||
|
||||
async function setProxy(maybeConf: ProxyConfig | null): Promise<void> {
|
||||
if (_.isEmpty(maybeConf)) {
|
||||
try {
|
||||
await Promise.all([fs.unlink(redsocksConfPath), fs.unlink(noProxyPath)]);
|
||||
} catch (e) {
|
||||
if (!ENOENT(e)) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
await unlinkAll(redsocksConfPath, noProxyPath);
|
||||
} else {
|
||||
// We know that maybeConf is not null due to the _.isEmpty check above,
|
||||
// but the compiler doesn't
|
||||
|
@ -1,42 +1,84 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import * as Path from 'path';
|
||||
import * as constants from './constants';
|
||||
import { ENOENT } from './errors';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
import { exec as execSync } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
|
||||
export function writeAndSyncFile(
|
||||
path: string,
|
||||
import * as constants from './constants';
|
||||
|
||||
export const exec = promisify(execSync);
|
||||
|
||||
export async function writeAndSyncFile(
|
||||
pathName: string,
|
||||
data: string | Buffer,
|
||||
): Bluebird<void> {
|
||||
return Bluebird.resolve(fs.open(path, 'w')).then((fd) => {
|
||||
_.isString(data)
|
||||
? fs.write(fd, data, 0, 'utf8')
|
||||
: fs
|
||||
.write(fd, data, 0, data.length)
|
||||
.then(() => fs.fsync(fd))
|
||||
.then(() => fs.close(fd));
|
||||
});
|
||||
): Promise<void> {
|
||||
const file = await fs.open(pathName, 'w');
|
||||
if (typeof data === 'string') {
|
||||
await file.write(data, 0, 'utf8');
|
||||
} else {
|
||||
await file.write(data, 0, data.length);
|
||||
}
|
||||
await file.sync();
|
||||
await file.close();
|
||||
}
|
||||
|
||||
export function writeFileAtomic(
|
||||
path: string,
|
||||
export async function writeFileAtomic(
|
||||
pathName: string,
|
||||
data: string | Buffer,
|
||||
): Bluebird<void> {
|
||||
return Bluebird.resolve(writeAndSyncFile(`${path}.new`, data)).then(() =>
|
||||
fs.rename(`${path}.new`, path),
|
||||
): Promise<void> {
|
||||
await writeAndSyncFile(`${pathName}.new`, data);
|
||||
await fs.rename(`${pathName}.new`, pathName);
|
||||
}
|
||||
|
||||
export async function safeRename(src: string, dest: string): Promise<void> {
|
||||
await fs.rename(src, dest);
|
||||
const file = await fs.open(path.dirname(dest), 'r');
|
||||
await file.sync();
|
||||
await file.close();
|
||||
}
|
||||
|
||||
export async function exists(p: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path exists as a direct child of the device's root mountpoint,
|
||||
* which is equal to constants.rootMountPoint (`/mnt/root`).
|
||||
*/
|
||||
export function pathExistsOnHost(pathName: string): Promise<boolean> {
|
||||
return exists(path.join(constants.rootMountPoint, pathName));
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively create directories until input directory.
|
||||
* Equivalent to mkdirp package, which uses this under the hood.
|
||||
*/
|
||||
export async function mkdirp(pathName: string): Promise<void> {
|
||||
await fs.mkdir(pathName, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Safe unlink with built-in catch for invalid paths, to remove need
|
||||
* for catch implementation everywhere else unlink is needed.
|
||||
*/
|
||||
export async function unlinkAll(...paths: string[]): Promise<void> {
|
||||
await Promise.all(
|
||||
paths.map((pathName) =>
|
||||
fs.unlink(pathName).catch(() => {
|
||||
/* Ignore nonexistent paths */
|
||||
}),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
export function safeRename(src: string, dest: string): Bluebird<void> {
|
||||
return Bluebird.resolve(fs.rename(src, dest))
|
||||
.then(() => fs.open(Path.dirname(dest), 'r'))
|
||||
.tap(fs.fsync)
|
||||
.then(fs.close);
|
||||
}
|
||||
|
||||
export function pathExistsOnHost(p: string): Bluebird<boolean> {
|
||||
return Bluebird.resolve(fs.stat(Path.join(constants.rootMountPoint, p)))
|
||||
.return(true)
|
||||
.catchReturn(ENOENT, false);
|
||||
/**
|
||||
* Get one or more paths as they exist in relation to host OS's root.
|
||||
*/
|
||||
export function getPathOnHost(...paths: string[]): string[] {
|
||||
return paths.map((p: string) => path.join(constants.rootMountPoint, p));
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as _ from 'lodash';
|
||||
import { child_process } from 'mz';
|
||||
import { spawn } from 'child_process';
|
||||
import { Readable } from 'stream';
|
||||
import { TypedError } from 'typed-error';
|
||||
|
||||
@ -168,7 +168,7 @@ const iptablesRestoreAdaptor: RuleAdaptor = async (
|
||||
stdinStream.push(null);
|
||||
|
||||
// run the restore...
|
||||
const proc = child_process.spawn(cmd, args, { shell: true });
|
||||
const proc = spawn(cmd, args, { shell: true });
|
||||
|
||||
// pipe the rules...
|
||||
stdinStream.pipe(proc.stdin);
|
||||
|
@ -1,11 +1,12 @@
|
||||
import * as _ from 'lodash';
|
||||
import { promises as fs, exists } from 'mz/fs';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
import { TypedError } from 'typed-error';
|
||||
|
||||
import log from './supervisor-console';
|
||||
import { shouldReportInterface } from '../network';
|
||||
import { exists } from './fs-utils';
|
||||
|
||||
import { TypedError } from 'typed-error';
|
||||
export class MacAddressError extends TypedError {}
|
||||
|
||||
export async function getAll(sysClassNet: string): Promise<string | undefined> {
|
||||
|
@ -1,11 +1,9 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
import * as mkdirp from 'mkdirp';
|
||||
import { child_process, fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as rimraf from 'rimraf';
|
||||
|
||||
const mkdirpAsync = Bluebird.promisify(mkdirp);
|
||||
const rimrafAsync = Bluebird.promisify(rimraf);
|
||||
|
||||
import * as apiBinder from '../api-binder';
|
||||
@ -23,7 +21,7 @@ import {
|
||||
InternalInconsistencyError,
|
||||
} from '../lib/errors';
|
||||
import { docker } from '../lib/docker-utils';
|
||||
import { pathExistsOnHost } from '../lib/fs-utils';
|
||||
import { exec, pathExistsOnHost, mkdirp } from '../lib/fs-utils';
|
||||
import { log } from '../lib/supervisor-console';
|
||||
import type {
|
||||
AppsJsonFormat,
|
||||
@ -298,8 +296,8 @@ export async function loadBackupFromMigration(
|
||||
const backupPath = path.join(constants.rootMountPoint, 'mnt/data/backup');
|
||||
// We clear this path in case it exists from an incomplete run of this function
|
||||
await rimrafAsync(backupPath);
|
||||
await mkdirpAsync(backupPath);
|
||||
await child_process.exec(`tar -xzf backup.tgz -C ${backupPath}`, {
|
||||
await mkdirp(backupPath);
|
||||
await exec(`tar -xzf backup.tgz -C ${backupPath}`, {
|
||||
cwd: path.join(constants.rootMountPoint, 'mnt/data'),
|
||||
});
|
||||
|
||||
|
@ -1,7 +1,8 @@
|
||||
import * as _ from 'lodash';
|
||||
import { child_process, fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
|
||||
import { InternalInconsistencyError } from './errors';
|
||||
import { exec } from './fs-utils';
|
||||
import log from './supervisor-console';
|
||||
|
||||
// Retrieve the data for the OS once only per path
|
||||
@ -69,7 +70,7 @@ const L4T_REGEX = /^.*-l4t-r(\d+\.\d+(\.?\d+)?).*$/;
|
||||
export async function getL4tVersion(): Promise<string | undefined> {
|
||||
// We call `uname -r` on the host, and look for l4t
|
||||
try {
|
||||
const [stdout] = await child_process.exec('uname -r');
|
||||
const { stdout } = await exec('uname -r');
|
||||
const match = L4T_REGEX.exec(stdout.toString().trim());
|
||||
if (match == null) {
|
||||
return;
|
||||
|
@ -1,6 +1,8 @@
|
||||
import * as systeminformation from 'systeminformation';
|
||||
import * as _ from 'lodash';
|
||||
import { fs, child_process } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
|
||||
import { exec } from './fs-utils';
|
||||
|
||||
export async function getCpuUsage(): Promise<number> {
|
||||
const cpuData = await systeminformation.currentLoad();
|
||||
@ -78,7 +80,7 @@ export async function getCpuId(): Promise<string | undefined> {
|
||||
const undervoltageRegex = /under.*voltage/;
|
||||
export async function undervoltageDetected(): Promise<boolean> {
|
||||
try {
|
||||
const [dmesgStdout] = await child_process.exec('dmesg');
|
||||
const { stdout: dmesgStdout } = await exec('dmesg');
|
||||
return undervoltageRegex.test(dmesgStdout.toString());
|
||||
} catch {
|
||||
return false;
|
||||
|
@ -1,12 +1,12 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as lockFileLib from 'lockfile';
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as Lock from 'rwlock';
|
||||
|
||||
import constants = require('./constants');
|
||||
import { ENOENT, UpdatesLockedError } from './errors';
|
||||
import { getPathOnHost } from './fs-utils';
|
||||
|
||||
type asyncLockFile = typeof lockFileLib & {
|
||||
unlockAsync(path: string): Bluebird<void>;
|
||||
@ -19,17 +19,19 @@ export type LockCallback = (
|
||||
fn: () => PromiseLike<void>,
|
||||
) => Bluebird<void>;
|
||||
|
||||
function baseLockPath(appId: number): string {
|
||||
return path.join('/tmp/balena-supervisor/services', appId.toString());
|
||||
}
|
||||
|
||||
export function lockPath(appId: number, serviceName: string): string {
|
||||
return path.join(baseLockPath(appId), serviceName);
|
||||
export function lockPath(appId: number, serviceName?: string): string {
|
||||
return path.join(
|
||||
'/tmp/balena-supervisor/services',
|
||||
appId.toString(),
|
||||
serviceName ?? '',
|
||||
);
|
||||
}
|
||||
|
||||
function lockFilesOnHost(appId: number, serviceName: string): string[] {
|
||||
return ['updates.lock', 'resin-updates.lock'].map((filename) =>
|
||||
path.join(constants.rootMountPoint, lockPath(appId, serviceName), filename),
|
||||
return getPathOnHost(
|
||||
...['updates.lock', 'resin-updates.lock'].map((filename) =>
|
||||
path.join(lockPath(appId), serviceName, filename),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@ -79,10 +81,7 @@ export function lock(
|
||||
}
|
||||
return writeLock(appId)
|
||||
.tap((release: () => void) => {
|
||||
const lockDir = path.join(
|
||||
constants.rootMountPoint,
|
||||
baseLockPath(appId),
|
||||
);
|
||||
const [lockDir] = getPathOnHost(lockPath(appId));
|
||||
|
||||
return Bluebird.resolve(fs.readdir(lockDir))
|
||||
.catchReturn(ENOENT, [])
|
||||
|
@ -1,6 +1,6 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs, watch } from 'fs';
|
||||
import * as networkCheck from 'network-checker';
|
||||
import * as os from 'os';
|
||||
import * as url from 'url';
|
||||
@ -74,7 +74,7 @@ export const startConnectivityCheck = _.once(
|
||||
log.debug('VPN status path exists.');
|
||||
})
|
||||
.then(() => {
|
||||
fs.watch(constants.vpnStatusPath, vpnStatusInotifyCallback);
|
||||
watch(constants.vpnStatusPath, vpnStatusInotifyCallback);
|
||||
});
|
||||
|
||||
if (enable) {
|
||||
|
@ -1,7 +1,10 @@
|
||||
import * as Promise from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
import * as express from 'express';
|
||||
import { fs, child_process as childProcess } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as url from 'url';
|
||||
|
||||
import * as request from './lib/request';
|
||||
import * as constants from './lib/constants';
|
||||
import {
|
||||
@ -9,26 +12,20 @@ import {
|
||||
validStringOrUndefined,
|
||||
validObjectOrUndefined,
|
||||
} from './lib/validation';
|
||||
import * as path from 'path';
|
||||
import * as mkdirp from 'mkdirp';
|
||||
import * as bodyParser from 'body-parser';
|
||||
import * as url from 'url';
|
||||
import { log } from './lib/supervisor-console';
|
||||
import * as dockerUtils from './lib/docker-utils';
|
||||
import { InternalInconsistencyError } from './lib/errors';
|
||||
import * as apiHelper from './lib/api-helper';
|
||||
import { exec, mkdirp } from './lib/fs-utils';
|
||||
|
||||
import { normalise } from './compose/images';
|
||||
import { log } from './lib/supervisor-console';
|
||||
import * as db from './db';
|
||||
import * as config from './config';
|
||||
import * as dockerUtils from './lib/docker-utils';
|
||||
import * as logger from './logger';
|
||||
import { InternalInconsistencyError } from './lib/errors';
|
||||
|
||||
import * as apiBinder from './api-binder';
|
||||
import * as apiHelper from './lib/api-helper';
|
||||
import * as dbFormat from './device-state/db-format';
|
||||
import * as deviceConfig from './device-config';
|
||||
|
||||
const mkdirpAsync = Promise.promisify(mkdirp);
|
||||
|
||||
const isDefined = _.negate(_.isUndefined);
|
||||
|
||||
const parseDeviceFields = function (device) {
|
||||
@ -52,8 +49,8 @@ const getTarArchive = (source, destination) =>
|
||||
fs
|
||||
.lstat(destination)
|
||||
.catch(() =>
|
||||
mkdirpAsync(path.dirname(destination)).then(() =>
|
||||
childProcess.exec(`tar -cvf '${destination}' *`, { cwd: source }),
|
||||
mkdirp(path.dirname(destination)).then(() =>
|
||||
exec(`tar -cvf '${destination}' *`, { cwd: source }),
|
||||
),
|
||||
);
|
||||
|
||||
@ -92,8 +89,8 @@ const formatCurrentAsState = (device) => ({
|
||||
|
||||
const createProxyvisorRouter = function (proxyvisor) {
|
||||
const router = express.Router();
|
||||
router.use(bodyParser.urlencoded({ limit: '10mb', extended: true }));
|
||||
router.use(bodyParser.json({ limit: '10mb' }));
|
||||
router.use(express.urlencoded({ limit: '10mb', extended: true }));
|
||||
router.use(express.json({ limit: '10mb' }));
|
||||
router.get('/v1/devices', async (_req, res) => {
|
||||
try {
|
||||
const fields = await db.models('dependentDevice').select();
|
||||
|
@ -4,11 +4,12 @@ import * as _ from 'lodash';
|
||||
import { Builder } from 'resin-docker-build';
|
||||
|
||||
import { promises as fs } from 'fs';
|
||||
import { child_process } from 'mz';
|
||||
import * as Path from 'path';
|
||||
import { Duplex, Readable } from 'stream';
|
||||
import * as tar from 'tar-stream';
|
||||
|
||||
import { exec } from '../src/lib/fs-utils';
|
||||
|
||||
export function getDocker(deviceAddress: string): Docker {
|
||||
return new Docker({
|
||||
host: deviceAddress,
|
||||
@ -146,7 +147,7 @@ async function tarDirectory(
|
||||
// Absolutely no escaping in this function, just be careful
|
||||
async function runSshCommand(address: string, command: string) {
|
||||
// TODO: Make the port configurable
|
||||
const [stdout] = await child_process.exec(
|
||||
const { stdout } = await exec(
|
||||
'ssh -p 22222 -o LogLevel=ERROR ' +
|
||||
'-o StrictHostKeyChecking=no ' +
|
||||
'-o UserKnownHostsFile=/dev/null ' +
|
||||
|
@ -1,10 +1,8 @@
|
||||
import * as packageJson from '../package.json';
|
||||
|
||||
import * as livepush from 'livepush';
|
||||
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as yargs from 'yargs';
|
||||
|
||||
import * as packageJson from '../package.json';
|
||||
import * as device from './device';
|
||||
import * as init from './init';
|
||||
import { startLivepush } from './livepush';
|
||||
|
@ -1,7 +1,4 @@
|
||||
import ChaiConfig = require('./lib/chai-config');
|
||||
|
||||
const { expect } = ChaiConfig;
|
||||
|
||||
import { expect } from 'chai';
|
||||
import constants = require('../src/lib/constants');
|
||||
|
||||
describe('constants', function () {
|
||||
|
@ -1,13 +1,10 @@
|
||||
import * as Bluebird from 'bluebird';
|
||||
import * as Knex from 'knex';
|
||||
import { fs } from 'mz';
|
||||
|
||||
import ChaiConfig = require('./lib/chai-config');
|
||||
import { expect } from 'chai';
|
||||
import prepare = require('./lib/prepare');
|
||||
|
||||
import * as constants from '../src/lib/constants';
|
||||
|
||||
const { expect } = ChaiConfig;
|
||||
import { exists } from '../src/lib/fs-utils';
|
||||
|
||||
async function createOldDatabase(path: string) {
|
||||
const knex = Knex({
|
||||
@ -63,7 +60,7 @@ describe('Database Migrations', () => {
|
||||
|
||||
const testDb = await import('../src/db');
|
||||
await testDb.initialized;
|
||||
await fs.stat(databasePath);
|
||||
expect(await exists(databasePath)).to.be.true;
|
||||
});
|
||||
|
||||
it('adds new fields and removes old ones in an old database', async () => {
|
||||
@ -109,9 +106,8 @@ describe('Database', () => {
|
||||
it('initializes correctly, running the migrations', () => {
|
||||
return expect(db.initialized).to.be.fulfilled;
|
||||
});
|
||||
it('creates a database at the path from an env var', () => {
|
||||
const promise = fs.stat(process.env.DATABASE_PATH!);
|
||||
return expect(promise).to.be.fulfilled;
|
||||
it('creates a database at the path from an env var', async () => {
|
||||
expect(await exists(process.env.DATABASE_PATH!)).to.be.true;
|
||||
});
|
||||
it('creates a deviceConfig table with a single default value', async () => {
|
||||
const deviceConfig = await db.models('deviceConfig').select();
|
||||
|
@ -1,8 +1,8 @@
|
||||
import * as _ from 'lodash';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import { SinonStub, stub } from 'sinon';
|
||||
|
||||
import chai = require('./lib/chai-config');
|
||||
import * as chai from 'chai';
|
||||
import prepare = require('./lib/prepare');
|
||||
import * as conf from '../src/config';
|
||||
|
||||
@ -20,12 +20,6 @@ describe('Config', () => {
|
||||
await conf.initialized;
|
||||
});
|
||||
|
||||
it('uses the correct config.json path', async () => {
|
||||
expect(await conf.configJsonBackend.path()).to.equal(
|
||||
'test/data/config.json',
|
||||
);
|
||||
});
|
||||
|
||||
it('reads and exposes values from the config.json', async () => {
|
||||
const id = await conf.get('applicationId');
|
||||
return expect(id).to.equal(78373);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as _ from 'lodash';
|
||||
import { stub } from 'sinon';
|
||||
|
||||
import chai = require('./lib/chai-config');
|
||||
import * as chai from 'chai';
|
||||
import { StatusCodeError } from '../src/lib/errors';
|
||||
import prepare = require('./lib/prepare');
|
||||
import * as dockerUtils from '../src/lib/docker-utils';
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as _ from 'lodash';
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
|
||||
import * as validation from '../src/lib/validation';
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { fs } from 'mz';
|
||||
import { expect } from './lib/chai-config';
|
||||
import { promises as fs } from 'fs';
|
||||
import { expect } from 'chai';
|
||||
|
||||
import blink = require('../src/lib/blink');
|
||||
import constants = require('../src/lib/constants');
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as mixpanel from 'mixpanel';
|
||||
import { SinonStub, stub, spy, SinonSpy } from 'sinon';
|
||||
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
|
||||
import log from '../src/lib/supervisor-console';
|
||||
import supervisorVersion = require('../src/lib/supervisor-version');
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as os from 'os';
|
||||
import { stub, spy } from 'sinon';
|
||||
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import Log from '../src/lib/supervisor-console';
|
||||
import * as network from '../src/network';
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import { Server } from 'net';
|
||||
import { SinonSpy, SinonStub, spy, stub } from 'sinon';
|
||||
|
||||
@ -7,7 +7,7 @@ import prepare = require('./lib/prepare');
|
||||
import * as config from '../src/config';
|
||||
import * as deviceState from '../src/device-state';
|
||||
import Log from '../src/lib/supervisor-console';
|
||||
import chai = require('./lib/chai-config');
|
||||
import { expect } from 'chai';
|
||||
import balenaAPI = require('./lib/mocked-balena-api');
|
||||
import { schema } from '../src/config/schema';
|
||||
import ConfigJsonConfigBackend from '../src/config/configJson';
|
||||
@ -21,7 +21,6 @@ import { DeviceNotFoundError } from '../src/lib/errors';
|
||||
|
||||
import { eventTrackSpy } from './lib/mocked-event-tracker';
|
||||
|
||||
const { expect } = chai;
|
||||
let ApiBinder: typeof import('../src/api-binder');
|
||||
|
||||
class ExpectedError extends TypedError {}
|
||||
|
@ -2,7 +2,7 @@ import * as https from 'https';
|
||||
import * as stream from 'stream';
|
||||
import * as zlib from 'zlib';
|
||||
import * as Promise from 'bluebird';
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as sinon from 'sinon';
|
||||
|
||||
import { ContainerLogs } from '../src/logging/container';
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { child_process, fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import { SinonStub, stub, spy, SinonSpy } from 'sinon';
|
||||
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as deviceConfig from '../src/device-config';
|
||||
import * as fsUtils from '../src/lib/fs-utils';
|
||||
import * as logger from '../src/logger';
|
||||
@ -131,7 +131,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
it('writes the target config.txt', async () => {
|
||||
stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
const current = {
|
||||
HOST_CONFIG_initramfs: 'initramf.gz 0x00800000',
|
||||
HOST_CONFIG_dtparam: '"i2c=on","audio=on"',
|
||||
@ -154,7 +154,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
// @ts-ignore accessing private value
|
||||
await deviceConfig.setBootConfig(configTxtBackend, target);
|
||||
expect(child_process.exec).to.be.calledOnce;
|
||||
expect(fsUtils.exec).to.be.calledOnce;
|
||||
expect(logSpy).to.be.calledTwice;
|
||||
expect(logSpy.getCall(1).args[2]).to.equal('Apply boot config success');
|
||||
expect(fsUtils.writeFileAtomic).to.be.calledWith(
|
||||
@ -171,12 +171,12 @@ describe('Device Backend Config', () => {
|
||||
|
||||
// Restore stubs
|
||||
(fsUtils.writeFileAtomic as SinonStub).restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
});
|
||||
|
||||
it('ensures required fields are written to config.txt', async () => {
|
||||
stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
stub(config, 'get').withArgs('deviceType').resolves('fincm3');
|
||||
const current = {
|
||||
HOST_CONFIG_initramfs: 'initramf.gz 0x00800000',
|
||||
@ -200,7 +200,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
// @ts-ignore accessing private value
|
||||
await deviceConfig.setBootConfig(configTxtBackend, target);
|
||||
expect(child_process.exec).to.be.calledOnce;
|
||||
expect(fsUtils.exec).to.be.calledOnce;
|
||||
expect(logSpy).to.be.calledTwice;
|
||||
expect(logSpy.getCall(1).args[2]).to.equal('Apply boot config success');
|
||||
expect(fsUtils.writeFileAtomic).to.be.calledWith(
|
||||
@ -218,7 +218,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
// Restore stubs
|
||||
(fsUtils.writeFileAtomic as SinonStub).restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
(config.get as SinonStub).restore();
|
||||
});
|
||||
|
||||
@ -269,7 +269,7 @@ describe('Device Backend Config', () => {
|
||||
describe('Extlinux files', () => {
|
||||
it('should correctly write to extlinux.conf files', async () => {
|
||||
stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
|
||||
const current = {};
|
||||
const target = {
|
||||
@ -284,7 +284,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
// @ts-ignore accessing private value
|
||||
await deviceConfig.setBootConfig(extlinuxBackend, target);
|
||||
expect(child_process.exec).to.be.calledOnce;
|
||||
expect(fsUtils.exec).to.be.calledOnce;
|
||||
expect(logSpy).to.be.calledTwice;
|
||||
expect(logSpy.getCall(1).args[2]).to.equal('Apply boot config success');
|
||||
expect(fsUtils.writeFileAtomic).to.be.calledWith(
|
||||
@ -303,7 +303,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
// Restore stubs
|
||||
(fsUtils.writeFileAtomic as SinonStub).restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
});
|
||||
});
|
||||
|
||||
@ -395,7 +395,7 @@ describe('Device Backend Config', () => {
|
||||
describe('ConfigFS files', () => {
|
||||
it('should correctly write to configfs.json files', async () => {
|
||||
stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
|
||||
const current = {};
|
||||
const target = {
|
||||
@ -414,7 +414,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
// @ts-ignore accessing private value
|
||||
await deviceConfig.setBootConfig(configFsBackend, target);
|
||||
expect(child_process.exec).to.be.calledOnce;
|
||||
expect(fsUtils.exec).to.be.calledOnce;
|
||||
expect(logSpy).to.be.calledTwice;
|
||||
expect(logSpy.getCall(1).args[2]).to.equal('Apply boot config success');
|
||||
expect(fsUtils.writeFileAtomic).to.be.calledWith(
|
||||
@ -424,13 +424,13 @@ describe('Device Backend Config', () => {
|
||||
|
||||
// Restore stubs
|
||||
(fsUtils.writeFileAtomic as SinonStub).restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
});
|
||||
|
||||
it('should correctly load the configfs.json file', async () => {
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(fs, 'exists').resolves(true);
|
||||
stub(fsUtils, 'exists').resolves(true);
|
||||
stub(fs, 'mkdir').resolves();
|
||||
stub(fs, 'readdir').resolves([]);
|
||||
stub(fs, 'readFile').callsFake((file) => {
|
||||
@ -445,20 +445,20 @@ describe('Device Backend Config', () => {
|
||||
});
|
||||
|
||||
await configFsBackend.initialise();
|
||||
expect(child_process.exec).to.be.calledWith('modprobe acpi_configfs');
|
||||
expect(child_process.exec).to.be.calledWith(
|
||||
expect(fsUtils.exec).to.be.calledWith('modprobe acpi_configfs');
|
||||
expect(fsUtils.exec).to.be.calledWith(
|
||||
`mount -t vfat -o remount,rw ${constants.bootBlockDevice} ./test/data/mnt/boot`,
|
||||
);
|
||||
expect(child_process.exec).to.be.calledWith(
|
||||
expect(fsUtils.exec).to.be.calledWith(
|
||||
'cat test/data/boot/acpi-tables/spidev1.1.aml > test/data/sys/kernel/config/acpi/table/spidev1.1/aml',
|
||||
);
|
||||
expect((fs.exists as SinonSpy).callCount).to.equal(2);
|
||||
expect((fsUtils.exists as SinonSpy).callCount).to.equal(2);
|
||||
expect((fs.readFile as SinonSpy).callCount).to.equal(4);
|
||||
|
||||
// Restore stubs
|
||||
(fsUtils.writeFileAtomic as SinonStub).restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fs.exists as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
(fsUtils.exists as SinonStub).restore();
|
||||
(fs.mkdir as SinonStub).restore();
|
||||
(fs.readdir as SinonStub).restore();
|
||||
(fs.readFile as SinonStub).restore();
|
||||
@ -537,18 +537,18 @@ describe('Device Backend Config', () => {
|
||||
beforeEach(() => {
|
||||
// Setup stubs
|
||||
stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore stubs
|
||||
(fsUtils.writeFileAtomic as SinonStub).restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
});
|
||||
|
||||
it('should correctly write to resin-logo.png', async () => {
|
||||
// Devices with balenaOS < 2.51 use resin-logo.png
|
||||
stub(fs, 'readdir').resolves(['resin-logo.png']);
|
||||
stub(fs, 'readdir').resolves(['resin-logo.png'] as any);
|
||||
|
||||
const current = {};
|
||||
const target = {
|
||||
@ -568,7 +568,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
await deviceConfig.setBootConfig(splashImageBackend, target);
|
||||
|
||||
expect(child_process.exec).to.be.calledOnce;
|
||||
expect(fsUtils.exec).to.be.calledOnce;
|
||||
expect(logSpy).to.be.calledTwice;
|
||||
expect(logSpy.getCall(1).args[2]).to.equal('Apply boot config success');
|
||||
expect(fsUtils.writeFileAtomic).to.be.calledOnceWith(
|
||||
@ -581,7 +581,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
it('should correctly write to balena-logo.png', async () => {
|
||||
// Devices with balenaOS >= 2.51 use balena-logo.png
|
||||
stub(fs, 'readdir').resolves(['balena-logo.png']);
|
||||
stub(fs, 'readdir').resolves(['balena-logo.png'] as any);
|
||||
|
||||
const current = {};
|
||||
const target = {
|
||||
@ -601,7 +601,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
await deviceConfig.setBootConfig(splashImageBackend, target);
|
||||
|
||||
expect(child_process.exec).to.be.calledOnce;
|
||||
expect(fsUtils.exec).to.be.calledOnce;
|
||||
expect(logSpy).to.be.calledTwice;
|
||||
expect(logSpy.getCall(1).args[2]).to.equal('Apply boot config success');
|
||||
expect(fsUtils.writeFileAtomic).to.be.calledOnceWith(
|
||||
@ -634,7 +634,7 @@ describe('Device Backend Config', () => {
|
||||
|
||||
await deviceConfig.setBootConfig(splashImageBackend, target);
|
||||
|
||||
expect(child_process.exec).to.be.calledOnce;
|
||||
expect(fsUtils.exec).to.be.calledOnce;
|
||||
expect(logSpy).to.be.calledTwice;
|
||||
expect(logSpy.getCall(1).args[2]).to.equal('Apply boot config success');
|
||||
expect(fsUtils.writeFileAtomic).to.be.calledOnceWith(
|
||||
@ -646,7 +646,7 @@ describe('Device Backend Config', () => {
|
||||
});
|
||||
|
||||
it('should correctly read the splash logo if different from the default', async () => {
|
||||
stub(fs, 'readdir').resolves(['balena-logo.png']);
|
||||
stub(fs, 'readdir').resolves(['balena-logo.png'] as any);
|
||||
|
||||
const readFileStub: SinonStub = stub(fs, 'readFile').resolves(
|
||||
Buffer.from(png, 'base64') as any,
|
||||
|
@ -10,7 +10,7 @@ import * as deviceState from '../src/device-state';
|
||||
import * as dockerUtils from '../src/lib/docker-utils';
|
||||
import * as images from '../src/compose/images';
|
||||
|
||||
import chai = require('./lib/chai-config');
|
||||
import * as chai from 'chai';
|
||||
import prepare = require('./lib/prepare');
|
||||
import * as db from '../src/db';
|
||||
import * as dbFormat from '../src/device-state/db-format';
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as conversion from '../src/lib/conversions';
|
||||
|
||||
describe('conversions', function () {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { PortMap, PortRange } from '../src/compose/ports';
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
|
||||
// Force cast `PortMap` as a public version so we can test it
|
||||
const PortMapPublic = (PortMap as any) as new (
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { stub } from 'sinon';
|
||||
import * as _ from 'lodash';
|
||||
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as config from '../src/config';
|
||||
import * as configUtils from '../src/config/utils';
|
||||
import { ExtraUEnv } from '../src/config/backends/extra-uEnv';
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { SinonStub, stub } from 'sinon';
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as _ from 'lodash';
|
||||
|
||||
import * as apiBinder from '../src/api-binder';
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as ComposeUtils from '../src/compose/utils';
|
||||
|
||||
describe('Composition utilities', () =>
|
||||
|
@ -1,7 +1,5 @@
|
||||
import { assert, expect } from 'chai';
|
||||
import { SinonStub, stub } from 'sinon';
|
||||
|
||||
import { child_process } from 'mz';
|
||||
import * as semver from 'semver';
|
||||
|
||||
import * as constants from '../src/lib/constants';
|
||||
@ -12,6 +10,7 @@ import {
|
||||
} from '../src/lib/contracts';
|
||||
import * as osRelease from '../src/lib/os-release';
|
||||
import supervisorVersion = require('../src/lib/supervisor-version');
|
||||
import * as fsUtils from '../src/lib/fs-utils';
|
||||
|
||||
describe('Container contracts', () => {
|
||||
before(() => {
|
||||
@ -405,9 +404,10 @@ describe('L4T version detection', () => {
|
||||
let execStub: SinonStub;
|
||||
|
||||
const seedExec = (version: string) => {
|
||||
execStub = stub(child_process, 'exec').returns(
|
||||
Promise.resolve([Buffer.from(version), Buffer.from('')]),
|
||||
);
|
||||
execStub = stub(fsUtils, 'exec').resolves({
|
||||
stdout: Buffer.from(version),
|
||||
stderr: Buffer.from(''),
|
||||
});
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { SinonStub, stub } from 'sinon';
|
||||
import constants = require('../src/lib/constants');
|
||||
import { spawnJournalctl } from '../src/lib/journald';
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
|
||||
describe('journald', () => {
|
||||
let spawn: SinonStub;
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { child_process, fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { SinonStub, stub } from 'sinon';
|
||||
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as fsUtils from '../src/lib/fs-utils';
|
||||
import { Extlinux } from '../src/config/backends/extlinux';
|
||||
|
||||
@ -182,7 +182,7 @@ describe('Extlinux Configuration', () => {
|
||||
|
||||
it('sets new config values', async () => {
|
||||
stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
|
||||
await backend.setBootConfig({
|
||||
fdt: '/boot/mycustomdtb.dtb',
|
||||
@ -205,7 +205,7 @@ describe('Extlinux Configuration', () => {
|
||||
|
||||
// Restore stubs
|
||||
(fsUtils.writeFileAtomic as SinonStub).restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
});
|
||||
|
||||
it('only allows supported configuration options', () => {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { AppendDirective } from '../src/config/backends/extlinux-file';
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
|
||||
describe('APPEND directive', () => {
|
||||
const supportedConfigValues = ['isolcpus'];
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { FDTDirective } from '../src/config/backends/extlinux-file';
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
|
||||
describe('FDT directive', () => {
|
||||
const directive = new FDTDirective();
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { child_process, fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { SinonStub, spy, stub } from 'sinon';
|
||||
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as fsUtils from '../src/lib/fs-utils';
|
||||
import Log from '../src/lib/supervisor-console';
|
||||
import { ExtraUEnv } from '../src/config/backends/extra-uEnv';
|
||||
@ -67,7 +67,7 @@ describe('extra_uEnv Configuration', () => {
|
||||
});
|
||||
|
||||
it('only matches supported devices', async () => {
|
||||
const existsStub = stub(fs, 'exists');
|
||||
const existsStub = stub(fsUtils, 'exists');
|
||||
for (const device of MATCH_TESTS) {
|
||||
// Test device that has extra_uEnv.txt
|
||||
let hasExtraUEnv = true;
|
||||
@ -109,7 +109,7 @@ describe('extra_uEnv Configuration', () => {
|
||||
|
||||
it('sets new config values', async () => {
|
||||
stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
const logWarningStub = spy(Log, 'warn');
|
||||
|
||||
// This config contains a value set from something else
|
||||
@ -138,13 +138,13 @@ describe('extra_uEnv Configuration', () => {
|
||||
|
||||
// Restore stubs
|
||||
(fsUtils.writeFileAtomic as SinonStub).restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
logWarningStub.restore();
|
||||
});
|
||||
|
||||
it('sets new config values containing collections', async () => {
|
||||
stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
const logWarningStub = spy(Log, 'warn');
|
||||
|
||||
// @ts-ignore accessing private value
|
||||
@ -173,7 +173,7 @@ describe('extra_uEnv Configuration', () => {
|
||||
|
||||
// Restore stubs
|
||||
(fsUtils.writeFileAtomic as SinonStub).restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
logWarningStub.restore();
|
||||
// @ts-ignore accessing private value
|
||||
ExtraUEnv.supportedConfigs = previousSupportedConfigs;
|
||||
|
@ -3,7 +3,7 @@ import { promises as fs } from 'fs';
|
||||
|
||||
import { resolve } from 'path';
|
||||
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import Log from '../src/lib/supervisor-console';
|
||||
import { Odmdata } from '../src/config/backends/odmdata';
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { expect } from 'chai';
|
||||
import { stub } from 'sinon';
|
||||
import * as systeminformation from 'systeminformation';
|
||||
import { fs } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
|
||||
import * as sysInfo from '../src/lib/system-info';
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as _ from 'lodash';
|
||||
|
||||
import { docker } from '../src/lib/docker-utils';
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as Images from '../src/compose/images';
|
||||
import * as mockedDockerode from './lib/mocked-dockerode';
|
||||
import * as mockedDatabase from './lib/mocked-database';
|
||||
|
@ -4,7 +4,7 @@ import { Promise } from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
import rewire = require('rewire');
|
||||
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import { sleep } from './lib/helpers';
|
||||
import * as TargetState from '../src/device-state/target-state';
|
||||
import Log from '../src/lib/supervisor-console';
|
||||
|
@ -13,6 +13,7 @@ import * as supertest from 'supertest';
|
||||
import * as path from 'path';
|
||||
import { promises as fs } from 'fs';
|
||||
|
||||
import { exists, unlinkAll } from '../src/lib/fs-utils';
|
||||
import * as appMock from './lib/application-state-mock';
|
||||
import * as mockedDockerode from './lib/mocked-dockerode';
|
||||
import mockedAPI = require('./lib/mocked-device-api');
|
||||
@ -826,7 +827,7 @@ describe('SupervisorAPI [V1 Endpoints]', () => {
|
||||
});
|
||||
|
||||
it('returns current host config (hostname only)', async () => {
|
||||
await Promise.all([fs.unlink(redsocksPath), fs.unlink(noProxyPath)]);
|
||||
await unlinkAll(redsocksPath, noProxyPath);
|
||||
|
||||
await request
|
||||
.get('/v1/device/host-config')
|
||||
@ -839,7 +840,7 @@ describe('SupervisorAPI [V1 Endpoints]', () => {
|
||||
});
|
||||
|
||||
it('errors if no hostname file exists', async () => {
|
||||
await fs.unlink(hostnamePath);
|
||||
await unlinkAll(hostnamePath);
|
||||
|
||||
await request
|
||||
.get('/v1/device/host-config')
|
||||
@ -896,7 +897,7 @@ describe('SupervisorAPI [V1 Endpoints]', () => {
|
||||
});
|
||||
|
||||
it('updates the hostname with provided string if string is not empty', async () => {
|
||||
await Promise.all([fs.unlink(redsocksPath), fs.unlink(noProxyPath)]);
|
||||
await unlinkAll(redsocksPath, noProxyPath);
|
||||
|
||||
const patchBody = { network: { hostname: 'newdevice' } };
|
||||
|
||||
@ -924,8 +925,7 @@ describe('SupervisorAPI [V1 Endpoints]', () => {
|
||||
});
|
||||
|
||||
it('updates hostname to first 7 digits of device uuid when sent invalid hostname', async () => {
|
||||
await Promise.all([fs.unlink(redsocksPath), fs.unlink(noProxyPath)]);
|
||||
|
||||
await unlinkAll(redsocksPath, noProxyPath);
|
||||
await request
|
||||
.patch('/v1/device/host-config')
|
||||
.send({ network: { hostname: '' } })
|
||||
@ -965,8 +965,8 @@ describe('SupervisorAPI [V1 Endpoints]', () => {
|
||||
.then(async (response) => {
|
||||
validatePatchResponse(response);
|
||||
|
||||
expect(fs.stat(redsocksPath)).to.be.rejected;
|
||||
expect(fs.stat(noProxyPath)).to.be.rejected;
|
||||
expect(await exists(redsocksPath)).to.be.false;
|
||||
expect(await exists(noProxyPath)).to.be.false;
|
||||
});
|
||||
|
||||
expect(restartServiceSpy.callCount).to.equal(2);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { fs, child_process } from 'mz';
|
||||
import { promises as fs } from 'fs';
|
||||
import { SinonStub, stub } from 'sinon';
|
||||
|
||||
import { expect } from './lib/chai-config';
|
||||
import { expect } from 'chai';
|
||||
import * as fsUtils from '../src/lib/fs-utils';
|
||||
import { SplashImage } from '../src/config/backends/splash-image';
|
||||
import log from '../src/lib/supervisor-console';
|
||||
@ -20,27 +20,27 @@ describe('Splash image configuration', () => {
|
||||
beforeEach(() => {
|
||||
// Setup stubs
|
||||
writeFileAtomicStub = stub(fsUtils, 'writeFileAtomic').resolves();
|
||||
stub(child_process, 'exec').resolves();
|
||||
stub(fsUtils, 'exec').resolves();
|
||||
readFileStub = stub(fs, 'readFile').resolves(
|
||||
Buffer.from(logo, 'base64') as any,
|
||||
);
|
||||
readFileStub
|
||||
.withArgs('test/data/mnt/boot/splash/balena-logo-default.png')
|
||||
.resolves(Buffer.from(defaultLogo, 'base64') as any);
|
||||
readDirStub = stub(fs, 'readdir').resolves(['balena-logo.png']);
|
||||
readDirStub = stub(fs, 'readdir').resolves(['balena-logo.png'] as any);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore stubs
|
||||
writeFileAtomicStub.restore();
|
||||
(child_process.exec as SinonStub).restore();
|
||||
(fsUtils.exec as SinonStub).restore();
|
||||
readFileStub.restore();
|
||||
readDirStub.restore();
|
||||
});
|
||||
|
||||
describe('initialise', () => {
|
||||
it('should make a copy of the existing boot image on initialise if not yet created', async () => {
|
||||
stub(fs, 'exists').resolves(false);
|
||||
stub(fsUtils, 'exists').resolves(false);
|
||||
|
||||
// Do the initialization
|
||||
await backend.initialise();
|
||||
@ -55,25 +55,25 @@ describe('Splash image configuration', () => {
|
||||
Buffer.from(logo, 'base64'),
|
||||
);
|
||||
|
||||
(fs.exists as SinonStub).restore();
|
||||
(fsUtils.exists as SinonStub).restore();
|
||||
});
|
||||
|
||||
it('should skip initialization if the default image already exists', async () => {
|
||||
stub(fs, 'exists').resolves(true);
|
||||
stub(fsUtils, 'exists').resolves(true);
|
||||
|
||||
// Do the initialization
|
||||
await backend.initialise();
|
||||
|
||||
expect(fs.exists).to.be.calledOnceWith(
|
||||
expect(fsUtils.exists).to.be.calledOnceWith(
|
||||
'test/data/mnt/boot/splash/balena-logo-default.png',
|
||||
);
|
||||
expect(fs.readFile).to.not.have.been.called;
|
||||
|
||||
(fs.exists as SinonStub).restore();
|
||||
(fsUtils.exists as SinonStub).restore();
|
||||
});
|
||||
|
||||
it('should fail initialization if there is no default image on the device', async () => {
|
||||
stub(fs, 'exists').resolves(false);
|
||||
stub(fsUtils, 'exists').resolves(false);
|
||||
readDirStub.resolves([]);
|
||||
readFileStub.rejects();
|
||||
stub(log, 'warn');
|
||||
|
@ -1,8 +1,8 @@
|
||||
import * as _ from 'lodash';
|
||||
import { Router } from 'express';
|
||||
import { fs } from 'mz';
|
||||
import rewire = require('rewire');
|
||||
|
||||
import { unlinkAll } from '../../src/lib/fs-utils';
|
||||
import * as applicationManager from '../../src/compose/application-manager';
|
||||
import * as networkManager from '../../src/compose/network-manager';
|
||||
import * as serviceManager from '../../src/compose/service-manager';
|
||||
@ -149,12 +149,8 @@ async function create(): Promise<SupervisorAPI> {
|
||||
}
|
||||
|
||||
async function cleanUp(): Promise<void> {
|
||||
try {
|
||||
// clean up test data
|
||||
await fs.unlink(DB_PATH);
|
||||
} catch (e) {
|
||||
/* noop */
|
||||
}
|
||||
// Clean up test data
|
||||
await unlinkAll(DB_PATH);
|
||||
// Restore created SinonStubs
|
||||
return restoreStubs();
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
import _ = require('lodash');
|
||||
import { expect } from 'chai';
|
||||
import { stub } from 'sinon';
|
||||
import { child_process } from 'mz';
|
||||
import * as childProcess from 'child_process';
|
||||
|
||||
import * as firewall from '../../src/lib/firewall';
|
||||
import * as iptables from '../../src/lib/iptables';
|
||||
@ -130,7 +130,7 @@ export const whilstMocked = async (
|
||||
) => {
|
||||
const getOriginalDefaultRuleAdaptor = iptables.getDefaultRuleAdaptor;
|
||||
|
||||
const spawnStub = stub(child_process, 'spawn').callsFake(() => {
|
||||
const spawnStub = stub(childProcess, 'spawn').callsFake(() => {
|
||||
const fakeProc = new EventEmitter();
|
||||
(fakeProc as any).stdout = new EventEmitter();
|
||||
|
||||
|
@ -1,3 +1,3 @@
|
||||
--exit
|
||||
--timeout 30000
|
||||
build/test/**/*.js
|
||||
build/test/**/*.spec.js
|
||||
|
@ -1,7 +1,6 @@
|
||||
import ChaiConfig = require('../lib/chai-config');
|
||||
const { expect } = ChaiConfig;
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { Network } from '../../src/compose/network';
|
||||
import { Network } from '../../../src/compose/network';
|
||||
import { NetworkInspectInfo } from 'dockerode';
|
||||
|
||||
describe('compose/network', () => {
|
157
test/src/lib/fs-utils.spec.ts
Normal file
157
test/src/lib/fs-utils.spec.ts
Normal file
@ -0,0 +1,157 @@
|
||||
import { expect } from 'chai';
|
||||
import * as path from 'path';
|
||||
import { promises as fs } from 'fs';
|
||||
import { spy, SinonSpy } from 'sinon';
|
||||
import mock = require('mock-fs');
|
||||
|
||||
import * as fsUtils from '../../../src/lib/fs-utils';
|
||||
import { rootMountPoint } from '../../../src/lib/constants';
|
||||
|
||||
describe('lib/fs-utils', () => {
|
||||
const testFileName1 = 'file.1';
|
||||
const testFileName2 = 'file.2';
|
||||
const testFile1 = path.join(rootMountPoint, testFileName1);
|
||||
const testFile2 = path.join(rootMountPoint, testFileName2);
|
||||
|
||||
const mockFs = () => {
|
||||
mock({
|
||||
[testFile1]: 'foo',
|
||||
[testFile2]: 'bar',
|
||||
});
|
||||
};
|
||||
|
||||
const unmockFs = () => {
|
||||
mock.restore();
|
||||
};
|
||||
|
||||
describe('writeAndSyncFile', () => {
|
||||
before(mockFs);
|
||||
after(unmockFs);
|
||||
|
||||
it('should write and sync string data', async () => {
|
||||
await fsUtils.writeAndSyncFile(testFile1, 'foo bar');
|
||||
expect(await fs.readFile(testFile1, 'utf-8')).to.equal('foo bar');
|
||||
});
|
||||
|
||||
it('should write and sync buffers', async () => {
|
||||
await fsUtils.writeAndSyncFile(testFile1, Buffer.from('bar foo'));
|
||||
expect(await fs.readFile(testFile1, 'utf-8')).to.equal('bar foo');
|
||||
});
|
||||
});
|
||||
|
||||
describe('writeFileAtomic', () => {
|
||||
before(() => {
|
||||
spy(fs, 'rename');
|
||||
mockFs();
|
||||
});
|
||||
|
||||
after(() => {
|
||||
(fs.rename as SinonSpy).restore();
|
||||
unmockFs();
|
||||
});
|
||||
|
||||
it('should write string data atomically', async () => {
|
||||
await fsUtils.writeFileAtomic(testFile1, 'foo baz');
|
||||
expect(await fs.readFile(testFile1, 'utf-8')).to.equal('foo baz');
|
||||
expect(fs.rename).to.have.been.calledWith(`${testFile1}.new`, testFile1);
|
||||
});
|
||||
|
||||
it('should write buffer data atomically', async () => {
|
||||
await fsUtils.writeFileAtomic(testFile1, 'baz foo');
|
||||
expect(await fs.readFile(testFile1, 'utf-8')).to.equal('baz foo');
|
||||
expect(fs.rename).to.have.been.calledWith(`${testFile1}.new`, testFile1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('safeRename', () => {
|
||||
beforeEach(mockFs);
|
||||
afterEach(unmockFs);
|
||||
|
||||
it('should rename a file', async () => {
|
||||
await fsUtils.safeRename(testFile1, testFile1 + 'rename');
|
||||
const dirContents = await fs.readdir(rootMountPoint);
|
||||
expect(dirContents).to.have.length(2);
|
||||
expect(dirContents).to.not.include(testFileName1);
|
||||
expect(dirContents).to.include(testFileName1 + 'rename');
|
||||
});
|
||||
|
||||
it('should replace an existing file', async () => {
|
||||
await fsUtils.safeRename(testFile1, testFile2);
|
||||
const dirContents = await fs.readdir(rootMountPoint);
|
||||
expect(dirContents).to.have.length(1);
|
||||
expect(dirContents).to.include(testFileName2);
|
||||
expect(dirContents).to.not.include(testFileName1);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* TODO: Un-skip this test after all fs tests that write to a test file system use
|
||||
* mock-fs instead. Hypothesis: exists isn't handling the relative directory it's
|
||||
* being passed well. When all unit tests use mock-fs, we can set process.env.ROOT_MOUNTPOINT
|
||||
* to `/mnt/root` so we can have an absolute path in all these tests.
|
||||
*/
|
||||
describe.skip('exists', () => {
|
||||
before(mockFs);
|
||||
after(unmockFs);
|
||||
|
||||
it('should return whether a file exists', async () => {
|
||||
expect(await fsUtils.exists(testFile1)).to.be.true;
|
||||
await fs.unlink(testFile1).catch(() => {
|
||||
/* noop */
|
||||
});
|
||||
expect(await fsUtils.exists(testFile1)).to.be.false;
|
||||
});
|
||||
});
|
||||
|
||||
describe('pathExistsOnHost', () => {
|
||||
before(mockFs);
|
||||
after(unmockFs);
|
||||
|
||||
it('should return whether a file exists in host OS fs', async () => {
|
||||
expect(await fsUtils.pathExistsOnHost(testFileName1)).to.be.true;
|
||||
await fs.unlink(testFile1);
|
||||
expect(await fsUtils.pathExistsOnHost(testFileName1)).to.be.false;
|
||||
});
|
||||
});
|
||||
|
||||
describe('mkdirp', () => {
|
||||
before(mockFs);
|
||||
after(unmockFs);
|
||||
|
||||
it('should recursively create directories', async () => {
|
||||
await fsUtils.mkdirp(
|
||||
path.join(rootMountPoint, 'test1', 'test2', 'test3'),
|
||||
);
|
||||
expect(() =>
|
||||
fs.readdir(path.join(rootMountPoint, 'test1', 'test2', 'test3')),
|
||||
).to.not.throw();
|
||||
});
|
||||
});
|
||||
|
||||
describe('unlinkAll', () => {
|
||||
beforeEach(mockFs);
|
||||
afterEach(unmockFs);
|
||||
|
||||
it('should unlink a single file', async () => {
|
||||
await fsUtils.unlinkAll(testFile1);
|
||||
expect(await fs.readdir(rootMountPoint)).to.not.include(testFileName1);
|
||||
});
|
||||
|
||||
it('should unlink multiple files', async () => {
|
||||
await fsUtils.unlinkAll(testFile1, testFile2);
|
||||
expect(await fs.readdir(rootMountPoint)).to.have.length(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPathOnHost', () => {
|
||||
before(mockFs);
|
||||
after(unmockFs);
|
||||
|
||||
it("should return the paths of one or more files as they exist on host OS's root", async () => {
|
||||
expect(fsUtils.getPathOnHost(testFileName1)).to.deep.equal([testFile1]);
|
||||
expect(
|
||||
fsUtils.getPathOnHost(...[testFileName1, testFileName2]),
|
||||
).to.deep.equal([testFile1, testFile2]);
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue
Block a user