mirror of
https://github.com/balena-os/balena-supervisor.git
synced 2024-12-19 05:37:53 +00:00
Merge pull request #2020 from balena-os/update-typescript
Update to typescript 4.8.2
This commit is contained in:
commit
03f65653c2
1058
package-lock.json
generated
1058
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -10,7 +10,7 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "./entry.sh",
|
"start": "./entry.sh",
|
||||||
"build": "npm run clean && npm run release && webpack",
|
"build": "npm run clean && npm run release && webpack",
|
||||||
"lint": "balena-lint -e ts -e js --typescript src/ test/ typings/ build-utils/ webpack.config.js",
|
"lint": "balena-lint -e ts -e js src/ test/ typings/ build-utils/ webpack.config.js",
|
||||||
"test:build": "tsc --noEmit && tsc --noEmit --project tsconfig.js.json",
|
"test:build": "tsc --noEmit && tsc --noEmit --project tsconfig.js.json",
|
||||||
"test:unit": "mocha --config test/unit/.mocharc.js",
|
"test:unit": "mocha --config test/unit/.mocharc.js",
|
||||||
"test:integration": "mocha --config test/integration/.mocharc.js",
|
"test:integration": "mocha --config test/integration/.mocharc.js",
|
||||||
@ -20,7 +20,7 @@
|
|||||||
"test:compose": "docker-compose -f docker-compose.yml -f docker-compose.test.yml up --build --remove-orphans --exit-code-from=sut ; npm run compose:down",
|
"test:compose": "docker-compose -f docker-compose.yml -f docker-compose.test.yml up --build --remove-orphans --exit-code-from=sut ; npm run compose:down",
|
||||||
"test": "npm run lint && npm run test:build && npm run test:unit && npm run test:legacy",
|
"test": "npm run lint && npm run test:build && npm run test:unit && npm run test:legacy",
|
||||||
"compose:down": "docker-compose -f docker-compose.test.yml down",
|
"compose:down": "docker-compose -f docker-compose.test.yml down",
|
||||||
"prettify": "balena-lint -e ts -e js --typescript --fix src/ test/ typings/ build-utils/ webpack.config.js",
|
"prettify": "balena-lint -e ts -e js --fix src/ test/ typings/ build-utils/ webpack.config.js",
|
||||||
"release": "tsc --project tsconfig.release.json && mv build/src/* build",
|
"release": "tsc --project tsconfig.release.json && mv build/src/* build",
|
||||||
"sync": "ts-node --files sync/sync.ts",
|
"sync": "ts-node --files sync/sync.ts",
|
||||||
"clean": "rimraf build",
|
"clean": "rimraf build",
|
||||||
@ -42,7 +42,7 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@balena/contrato": "^0.6.0",
|
"@balena/contrato": "^0.6.0",
|
||||||
"@balena/es-version": "^1.0.1",
|
"@balena/es-version": "^1.0.1",
|
||||||
"@balena/lint": "^5.1.0",
|
"@balena/lint": "^6.2.0",
|
||||||
"@types/bluebird": "^3.5.32",
|
"@types/bluebird": "^3.5.32",
|
||||||
"@types/chai": "^4.2.16",
|
"@types/chai": "^4.2.16",
|
||||||
"@types/chai-as-promised": "^7.1.3",
|
"@types/chai-as-promised": "^7.1.3",
|
||||||
@ -129,7 +129,7 @@
|
|||||||
"ts-node": "^8.10.2",
|
"ts-node": "^8.10.2",
|
||||||
"tsconfig-paths": "^4.1.0",
|
"tsconfig-paths": "^4.1.0",
|
||||||
"typed-error": "^3.2.1",
|
"typed-error": "^3.2.1",
|
||||||
"typescript": "^4.2.4",
|
"typescript": "^4.8.3",
|
||||||
"webpack": "^4.44.1",
|
"webpack": "^4.44.1",
|
||||||
"webpack-cli": "^3.3.12",
|
"webpack-cli": "^3.3.12",
|
||||||
"winston": "^3.3.3",
|
"winston": "^3.3.3",
|
||||||
|
@ -46,15 +46,12 @@ interface DeviceTag {
|
|||||||
let readyForUpdates = false;
|
let readyForUpdates = false;
|
||||||
|
|
||||||
export async function healthcheck() {
|
export async function healthcheck() {
|
||||||
const {
|
const { appUpdatePollInterval, unmanaged, connectivityCheckEnabled } =
|
||||||
appUpdatePollInterval,
|
await config.getMany([
|
||||||
unmanaged,
|
'appUpdatePollInterval',
|
||||||
connectivityCheckEnabled,
|
'unmanaged',
|
||||||
} = await config.getMany([
|
'connectivityCheckEnabled',
|
||||||
'appUpdatePollInterval',
|
]);
|
||||||
'unmanaged',
|
|
||||||
'connectivityCheckEnabled',
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Don't have to perform checks for unmanaged
|
// Don't have to perform checks for unmanaged
|
||||||
if (unmanaged) {
|
if (unmanaged) {
|
||||||
@ -535,7 +532,7 @@ async function reportInitialName(
|
|||||||
device_name: name,
|
device_name: name,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
log.error('Unable to report initial device name to API');
|
log.error('Unable to report initial device name to API');
|
||||||
logger.logSystemMessage(
|
logger.logSystemMessage(
|
||||||
'Unable to report initial device name to API',
|
'Unable to report initial device name to API',
|
||||||
|
@ -57,9 +57,9 @@ async function report({ body, opts }: StateReport) {
|
|||||||
body,
|
body,
|
||||||
};
|
};
|
||||||
|
|
||||||
const [
|
const [{ statusCode, body: statusMessage, headers }] = await request
|
||||||
{ statusCode, body: statusMessage, headers },
|
.patchAsync(endpoint, params)
|
||||||
] = await request.patchAsync(endpoint, params).timeout(apiTimeout);
|
.timeout(apiTimeout);
|
||||||
|
|
||||||
if (statusCode < 200 || statusCode >= 300) {
|
if (statusCode < 200 || statusCode >= 300) {
|
||||||
throw new StatusError(
|
throw new StatusError(
|
||||||
|
@ -750,25 +750,21 @@ export class App {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
const [
|
const [opts, supervisorApiHost, hostPathExists, hostname] =
|
||||||
opts,
|
await Promise.all([
|
||||||
supervisorApiHost,
|
config.get('extendedEnvOptions'),
|
||||||
hostPathExists,
|
dockerUtils
|
||||||
hostname,
|
.getNetworkGateway(constants.supervisorNetworkInterface)
|
||||||
] = await Promise.all([
|
.catch(() => '127.0.0.1'),
|
||||||
config.get('extendedEnvOptions'),
|
(async () => ({
|
||||||
dockerUtils
|
firmware: await pathExistsOnHost('/lib/firmware'),
|
||||||
.getNetworkGateway(constants.supervisorNetworkInterface)
|
modules: await pathExistsOnHost('/lib/modules'),
|
||||||
.catch(() => '127.0.0.1'),
|
}))(),
|
||||||
(async () => ({
|
(
|
||||||
firmware: await pathExistsOnHost('/lib/firmware'),
|
(await config.get('hostname')) ??
|
||||||
modules: await pathExistsOnHost('/lib/modules'),
|
(await fs.readFile('/etc/hostname', 'utf-8'))
|
||||||
}))(),
|
).trim(),
|
||||||
(
|
]);
|
||||||
(await config.get('hostname')) ??
|
|
||||||
(await fs.readFile('/etc/hostname', 'utf-8'))
|
|
||||||
).trim(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const svcOpts = {
|
const svcOpts = {
|
||||||
appName: app.name,
|
appName: app.name,
|
||||||
@ -809,7 +805,7 @@ export class App {
|
|||||||
let imageInfo: ImageInspectInfo | undefined;
|
let imageInfo: ImageInspectInfo | undefined;
|
||||||
try {
|
try {
|
||||||
imageInfo = await imageManager.inspectByName(svc.image);
|
imageInfo = await imageManager.inspectByName(svc.image);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (!NotFoundError(e)) {
|
if (!NotFoundError(e)) {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
@ -824,7 +820,7 @@ export class App {
|
|||||||
// FIXME: Typings for DeviceMetadata
|
// FIXME: Typings for DeviceMetadata
|
||||||
return await Service.fromComposeObject(
|
return await Service.fromComposeObject(
|
||||||
svc,
|
svc,
|
||||||
(thisSvcOpts as unknown) as DeviceMetadata,
|
thisSvcOpts as unknown as DeviceMetadata,
|
||||||
);
|
);
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
@ -51,12 +51,10 @@ type ApplicationManagerEventEmitter = StrictEventEmitter<
|
|||||||
const events: ApplicationManagerEventEmitter = new EventEmitter();
|
const events: ApplicationManagerEventEmitter = new EventEmitter();
|
||||||
export const on: typeof events['on'] = events.on.bind(events);
|
export const on: typeof events['on'] = events.on.bind(events);
|
||||||
export const once: typeof events['once'] = events.once.bind(events);
|
export const once: typeof events['once'] = events.once.bind(events);
|
||||||
export const removeListener: typeof events['removeListener'] = events.removeListener.bind(
|
export const removeListener: typeof events['removeListener'] =
|
||||||
events,
|
events.removeListener.bind(events);
|
||||||
);
|
export const removeAllListeners: typeof events['removeAllListeners'] =
|
||||||
export const removeAllListeners: typeof events['removeAllListeners'] = events.removeAllListeners.bind(
|
events.removeAllListeners.bind(events);
|
||||||
events,
|
|
||||||
);
|
|
||||||
|
|
||||||
const proxyvisor = new Proxyvisor();
|
const proxyvisor = new Proxyvisor();
|
||||||
const localModeManager = new LocalModeManager();
|
const localModeManager = new LocalModeManager();
|
||||||
|
@ -70,17 +70,14 @@ const events = new ImageEventEmitter();
|
|||||||
|
|
||||||
export const on: typeof events['on'] = events.on.bind(events);
|
export const on: typeof events['on'] = events.on.bind(events);
|
||||||
export const once: typeof events['once'] = events.once.bind(events);
|
export const once: typeof events['once'] = events.once.bind(events);
|
||||||
export const removeListener: typeof events['removeListener'] = events.removeListener.bind(
|
export const removeListener: typeof events['removeListener'] =
|
||||||
events,
|
events.removeListener.bind(events);
|
||||||
);
|
export const removeAllListeners: typeof events['removeAllListeners'] =
|
||||||
export const removeAllListeners: typeof events['removeAllListeners'] = events.removeAllListeners.bind(
|
events.removeAllListeners.bind(events);
|
||||||
events,
|
|
||||||
);
|
|
||||||
|
|
||||||
const imageFetchFailures: Dictionary<number> = {};
|
const imageFetchFailures: Dictionary<number> = {};
|
||||||
const imageFetchLastFailureTime: Dictionary<ReturnType<
|
const imageFetchLastFailureTime: Dictionary<ReturnType<typeof process.hrtime>> =
|
||||||
typeof process.hrtime
|
{};
|
||||||
>> = {};
|
|
||||||
const imageCleanupFailures: Dictionary<number> = {};
|
const imageCleanupFailures: Dictionary<number> = {};
|
||||||
|
|
||||||
type ImageState = Pick<Image, 'status' | 'downloadProgress'>;
|
type ImageState = Pick<Image, 'status' | 'downloadProgress'>;
|
||||||
@ -239,7 +236,7 @@ export async function triggerFetch(
|
|||||||
await markAsSupervised({ ...image, dockerImageId: img.Id });
|
await markAsSupervised({ ...image, dockerImageId: img.Id });
|
||||||
|
|
||||||
success = true;
|
success = true;
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (!NotFoundError(e)) {
|
if (!NotFoundError(e)) {
|
||||||
if (!(e instanceof ImageDownloadBackoffError)) {
|
if (!(e instanceof ImageDownloadBackoffError)) {
|
||||||
addImageFailure(image.name);
|
addImageFailure(image.name);
|
||||||
@ -538,9 +535,11 @@ async function getImagesForCleanup(): Promise<string[]> {
|
|||||||
// for images with deltas this should return unless there is some inconsistency
|
// for images with deltas this should return unless there is some inconsistency
|
||||||
// and the tag was deleted.
|
// and the tag was deleted.
|
||||||
const inspectByReference = async (imageName: string) => {
|
const inspectByReference = async (imageName: string) => {
|
||||||
const { registry, imageName: name, tagName } = dockerUtils.getRegistryAndName(
|
const {
|
||||||
imageName,
|
registry,
|
||||||
);
|
imageName: name,
|
||||||
|
tagName,
|
||||||
|
} = dockerUtils.getRegistryAndName(imageName);
|
||||||
|
|
||||||
const repo = [registry, name].filter((s) => !!s).join('/');
|
const repo = [registry, name].filter((s) => !!s).join('/');
|
||||||
const reference = [repo, tagName].filter((s) => !!s).join(':');
|
const reference = [repo, tagName].filter((s) => !!s).join(':');
|
||||||
@ -618,7 +617,7 @@ export async function cleanup() {
|
|||||||
try {
|
try {
|
||||||
await docker.getImage(image).remove({ force: true });
|
await docker.getImage(image).remove({ force: true });
|
||||||
delete imageCleanupFailures[image];
|
delete imageCleanupFailures[image];
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
logger.logSystemMessage(
|
logger.logSystemMessage(
|
||||||
`Error cleaning up ${image}: ${e.message} - will ignore for 1 hour`,
|
`Error cleaning up ${image}: ${e.message} - will ignore for 1 hour`,
|
||||||
{ error: e },
|
{ error: e },
|
||||||
@ -730,7 +729,7 @@ async function removeImageIfNotNeeded(image: Image): Promise<void> {
|
|||||||
|
|
||||||
// Mark the image as removed
|
// Mark the image as removed
|
||||||
removed = true;
|
removed = true;
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (NotFoundError(e)) {
|
if (NotFoundError(e)) {
|
||||||
removed = false;
|
removed = false;
|
||||||
} else {
|
} else {
|
||||||
@ -781,7 +780,7 @@ async function fetchDelta(
|
|||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
logger.logSystemEvent(LogTypes.downloadImageDelta, { image });
|
logger.logSystemEvent(LogTypes.downloadImageDelta, { image });
|
||||||
|
|
||||||
const deltaOpts = (opts as unknown) as DeltaFetchOptions;
|
const deltaOpts = opts as unknown as DeltaFetchOptions;
|
||||||
const srcImage = await inspectByName(deltaOpts.deltaSource);
|
const srcImage = await inspectByName(deltaOpts.deltaSource);
|
||||||
|
|
||||||
deltaOpts.deltaSourceId = srcImage.Id;
|
deltaOpts.deltaSourceId = srcImage.Id;
|
||||||
|
@ -45,7 +45,7 @@ export async function create(network: Network) {
|
|||||||
|
|
||||||
// We have a network with the same config and name
|
// We have a network with the same config and name
|
||||||
// already created, we can skip this
|
// already created, we can skip this
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (!NotFoundError(e)) {
|
if (!NotFoundError(e)) {
|
||||||
logger.logSystemEvent(logTypes.createNetworkError, {
|
logger.logSystemEvent(logTypes.createNetworkError, {
|
||||||
network: { name: network.name, appUuid: network.appUuid },
|
network: { name: network.name, appUuid: network.appUuid },
|
||||||
|
@ -20,9 +20,11 @@ export class Network {
|
|||||||
|
|
||||||
private constructor() {}
|
private constructor() {}
|
||||||
|
|
||||||
private static deconstructDockerName(
|
private static deconstructDockerName(name: string): {
|
||||||
name: string,
|
name: string;
|
||||||
): { name: string; appId?: number; appUuid?: string } {
|
appId?: number;
|
||||||
|
appUuid?: string;
|
||||||
|
} {
|
||||||
const matchWithAppId = name.match(/^(\d+)_(\S+)/);
|
const matchWithAppId = name.match(/^(\d+)_(\S+)/);
|
||||||
if (matchWithAppId == null) {
|
if (matchWithAppId == null) {
|
||||||
const matchWithAppUuid = name.match(/^([0-9a-f-A-F]{32,})_(\S+)/);
|
const matchWithAppUuid = name.match(/^([0-9a-f-A-F]{32,})_(\S+)/);
|
||||||
|
@ -2,7 +2,8 @@ import * as _ from 'lodash';
|
|||||||
import { TypedError } from 'typed-error';
|
import { TypedError } from 'typed-error';
|
||||||
|
|
||||||
// Adapted from https://github.com/docker/docker-py/blob/master/docker/utils/ports.py#L3
|
// Adapted from https://github.com/docker/docker-py/blob/master/docker/utils/ports.py#L3
|
||||||
const PORTS_REGEX = /^(?:(?:([a-fA-F\d.:]+):)?([\d]*)(?:-([\d]+))?:)?([\d]+)(?:-([\d]+))?(?:\/(udp|tcp))?$/;
|
const PORTS_REGEX =
|
||||||
|
/^(?:(?:([a-fA-F\d.:]+):)?([\d]*)(?:-([\d]+))?:)?([\d]+)(?:-([\d]+))?(?:\/(udp|tcp))?$/;
|
||||||
|
|
||||||
// A regex to extract the protocol and internal port of the incoming Docker options
|
// A regex to extract the protocol and internal port of the incoming Docker options
|
||||||
const DOCKER_OPTS_PORTS_REGEX = /(\d+)(?:\/?([a-z]+))?/i;
|
const DOCKER_OPTS_PORTS_REGEX = /(\d+)(?:\/?([a-z]+))?/i;
|
||||||
|
@ -43,12 +43,10 @@ interface KillOpts {
|
|||||||
|
|
||||||
export const on: typeof events['on'] = events.on.bind(events);
|
export const on: typeof events['on'] = events.on.bind(events);
|
||||||
export const once: typeof events['once'] = events.once.bind(events);
|
export const once: typeof events['once'] = events.once.bind(events);
|
||||||
export const removeListener: typeof events['removeListener'] = events.removeListener.bind(
|
export const removeListener: typeof events['removeListener'] =
|
||||||
events,
|
events.removeListener.bind(events);
|
||||||
);
|
export const removeAllListeners: typeof events['removeAllListeners'] =
|
||||||
export const removeAllListeners: typeof events['removeAllListeners'] = events.removeAllListeners.bind(
|
events.removeAllListeners.bind(events);
|
||||||
events,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Whether a container has died, indexed by ID
|
// Whether a container has died, indexed by ID
|
||||||
const containerHasDied: Dictionary<boolean> = {};
|
const containerHasDied: Dictionary<boolean> = {};
|
||||||
@ -74,7 +72,7 @@ export const getAll = async (
|
|||||||
service.status = vState.status;
|
service.status = vState.status;
|
||||||
}
|
}
|
||||||
return service;
|
return service;
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (NotFoundError(e)) {
|
if (NotFoundError(e)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -90,10 +88,8 @@ async function get(service: Service) {
|
|||||||
const containerIds = await getContainerIdMap(
|
const containerIds = await getContainerIdMap(
|
||||||
service.appUuid || service.appId,
|
service.appUuid || service.appId,
|
||||||
);
|
);
|
||||||
const services = (
|
const services = (await getAll(`service-name=${service.serviceName}`)).filter(
|
||||||
await getAll(`service-name=${service.serviceName}`)
|
(currentService) => currentService.isEqualConfig(service, containerIds),
|
||||||
).filter((currentService) =>
|
|
||||||
currentService.isEqualConfig(service, containerIds),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if (services.length === 0) {
|
if (services.length === 0) {
|
||||||
@ -210,7 +206,7 @@ export async function remove(service: Service) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await docker.getContainer(existingService.containerId).remove({ v: true });
|
await docker.getContainer(existingService.containerId).remove({ v: true });
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (!NotFoundError(e)) {
|
if (!NotFoundError(e)) {
|
||||||
logger.logSystemEvent(LogTypes.removeDeadServiceError, {
|
logger.logSystemEvent(LogTypes.removeDeadServiceError, {
|
||||||
service,
|
service,
|
||||||
@ -231,7 +227,7 @@ async function create(service: Service) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
return docker.getContainer(existing.containerId);
|
return docker.getContainer(existing.containerId);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (!NotFoundError(e)) {
|
if (!NotFoundError(e)) {
|
||||||
logger.logSystemEvent(LogTypes.installServiceError, {
|
logger.logSystemEvent(LogTypes.installServiceError, {
|
||||||
service,
|
service,
|
||||||
@ -387,7 +383,7 @@ export function listenToEvents() {
|
|||||||
let service: Service | null = null;
|
let service: Service | null = null;
|
||||||
try {
|
try {
|
||||||
service = await getByDockerContainerId(data.id);
|
service = await getByDockerContainerId(data.id);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (!NotFoundError(e)) {
|
if (!NotFoundError(e)) {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
@ -418,7 +414,7 @@ export function listenToEvents() {
|
|||||||
await logMonitor.detach(data.id);
|
await logMonitor.detach(data.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
log.error('Error on docker event:', e, e.stack);
|
log.error('Error on docker event:', e, e.stack);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -92,9 +92,8 @@ export class Service {
|
|||||||
'dns',
|
'dns',
|
||||||
'dnsSearch',
|
'dnsSearch',
|
||||||
];
|
];
|
||||||
public static allConfigArrayFields: ServiceConfigArrayField[] = Service.configArrayFields.concat(
|
public static allConfigArrayFields: ServiceConfigArrayField[] =
|
||||||
Service.orderedConfigArrayFields,
|
Service.configArrayFields.concat(Service.orderedConfigArrayFields);
|
||||||
);
|
|
||||||
|
|
||||||
// A list of fields to ignore when comparing container configuration
|
// A list of fields to ignore when comparing container configuration
|
||||||
private static omitFields = [
|
private static omitFields = [
|
||||||
@ -724,9 +723,8 @@ export class Service {
|
|||||||
ExposedPorts: exposedPorts,
|
ExposedPorts: exposedPorts,
|
||||||
Image: this.config.image,
|
Image: this.config.image,
|
||||||
Labels: this.config.labels,
|
Labels: this.config.labels,
|
||||||
NetworkingConfig: ComposeUtils.serviceNetworksToDockerNetworks(
|
NetworkingConfig:
|
||||||
mainNetwork,
|
ComposeUtils.serviceNetworksToDockerNetworks(mainNetwork),
|
||||||
),
|
|
||||||
StopSignal: this.config.stopSignal,
|
StopSignal: this.config.stopSignal,
|
||||||
Domainname: this.config.domainname,
|
Domainname: this.config.domainname,
|
||||||
Hostname: this.config.hostname,
|
Hostname: this.config.hostname,
|
||||||
@ -821,8 +819,8 @@ export class Service {
|
|||||||
// Service.orderedConfigArrayFields are defined as
|
// Service.orderedConfigArrayFields are defined as
|
||||||
// fields inside of Service.config
|
// fields inside of Service.config
|
||||||
const arrayEq = ComposeUtils.compareArrayFields(
|
const arrayEq = ComposeUtils.compareArrayFields(
|
||||||
(this.config as unknown) as Dictionary<unknown>,
|
this.config as unknown as Dictionary<unknown>,
|
||||||
(service.config as unknown) as Dictionary<unknown>,
|
service.config as unknown as Dictionary<unknown>,
|
||||||
Service.configArrayFields,
|
Service.configArrayFields,
|
||||||
Service.orderedConfigArrayFields,
|
Service.orderedConfigArrayFields,
|
||||||
);
|
);
|
||||||
|
@ -249,9 +249,8 @@ export function getHealthcheck(
|
|||||||
const imageServiceHealthcheck = dockerHealthcheckToServiceHealthcheck(
|
const imageServiceHealthcheck = dockerHealthcheckToServiceHealthcheck(
|
||||||
_.get(imageInfo, 'Config.Healthcheck', null),
|
_.get(imageInfo, 'Config.Healthcheck', null),
|
||||||
);
|
);
|
||||||
const composeServiceHealthcheck = composeHealthcheckToServiceHealthcheck(
|
const composeServiceHealthcheck =
|
||||||
composeHealthcheck,
|
composeHealthcheckToServiceHealthcheck(composeHealthcheck);
|
||||||
);
|
|
||||||
|
|
||||||
// Overlay any compose healthcheck fields on the image healthchecks
|
// Overlay any compose healthcheck fields on the image healthchecks
|
||||||
return _.assign(
|
return _.assign(
|
||||||
@ -277,9 +276,8 @@ export function getWorkingDir(
|
|||||||
workingDir: string | null | undefined,
|
workingDir: string | null | undefined,
|
||||||
imageInfo?: Dockerode.ImageInspectInfo,
|
imageInfo?: Dockerode.ImageInspectInfo,
|
||||||
): string {
|
): string {
|
||||||
return (workingDir != null
|
return (
|
||||||
? workingDir
|
workingDir != null ? workingDir : _.get(imageInfo, 'Config.WorkingDir', '')
|
||||||
: _.get(imageInfo, 'Config.WorkingDir', '')
|
|
||||||
).replace(/(^.+)\/$/, '$1');
|
).replace(/(^.+)\/$/, '$1');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -448,9 +446,10 @@ export function serviceUlimitsToDockerUlimits(
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function serviceRestartToDockerRestartPolicy(
|
export function serviceRestartToDockerRestartPolicy(restart: string): {
|
||||||
restart: string,
|
Name: string;
|
||||||
): { Name: string; MaximumRetryCount: number } {
|
MaximumRetryCount: number;
|
||||||
|
} {
|
||||||
return {
|
return {
|
||||||
Name: restart,
|
Name: restart,
|
||||||
MaximumRetryCount: 0,
|
MaximumRetryCount: 0,
|
||||||
@ -535,9 +534,9 @@ export function normalizeNullValues(obj: Dictionary<any>): void {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function normalizeLabels(labels: {
|
export function normalizeLabels(labels: { [key: string]: string }): {
|
||||||
[key: string]: string;
|
[key: string]: string;
|
||||||
}): { [key: string]: string } {
|
} {
|
||||||
const legacyLabels = _.mapKeys(
|
const legacyLabels = _.mapKeys(
|
||||||
_.pickBy(labels, (_v, k) => _.startsWith(k, 'io.resin.')),
|
_.pickBy(labels, (_v, k) => _.startsWith(k, 'io.resin.')),
|
||||||
(_v, k) => {
|
(_v, k) => {
|
||||||
|
@ -58,7 +58,7 @@ export async function create(volume: Volume): Promise<void> {
|
|||||||
if (!volume.isEqualConfig(existing)) {
|
if (!volume.isEqualConfig(existing)) {
|
||||||
throw new ResourceRecreationAttemptError('volume', volume.name);
|
throw new ResourceRecreationAttemptError('volume', volume.name);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (!NotFoundError(e)) {
|
if (!NotFoundError(e)) {
|
||||||
logger.logSystemEvent(LogTypes.createVolumeError, {
|
logger.logSystemEvent(LogTypes.createVolumeError, {
|
||||||
volume: { name: volume.name },
|
volume: { name: volume.name },
|
||||||
|
@ -122,9 +122,10 @@ export class Volume {
|
|||||||
return `${appId}_${name}`;
|
return `${appId}_${name}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static deconstructDockerName(
|
private static deconstructDockerName(name: string): {
|
||||||
name: string,
|
name: string;
|
||||||
): { name: string; appId: number } {
|
appId: number;
|
||||||
|
} {
|
||||||
const match = name.match(/(\d+)_(\S+)/);
|
const match = name.match(/(\d+)_(\S+)/);
|
||||||
if (match == null) {
|
if (match == null) {
|
||||||
throw new InternalInconsistencyError(
|
throw new InternalInconsistencyError(
|
||||||
|
@ -225,10 +225,8 @@ export class ExtraUEnv extends ConfigBackend {
|
|||||||
// Reduce ConfigOptions into a Map that joins collections
|
// Reduce ConfigOptions into a Map that joins collections
|
||||||
return Object.entries(configs).reduce(
|
return Object.entries(configs).reduce(
|
||||||
(configMap: Map<string, string>, [configKey, configValue]) => {
|
(configMap: Map<string, string>, [configKey, configValue]) => {
|
||||||
const {
|
const { key: ENTRY_KEY, collection: ENTRY_IS_COLLECTION } =
|
||||||
key: ENTRY_KEY,
|
ExtraUEnv.supportedConfigs[configKey];
|
||||||
collection: ENTRY_IS_COLLECTION,
|
|
||||||
} = ExtraUEnv.supportedConfigs[configKey];
|
|
||||||
// Check if we have to build the value for the entry
|
// Check if we have to build the value for the entry
|
||||||
if (ENTRY_IS_COLLECTION) {
|
if (ENTRY_IS_COLLECTION) {
|
||||||
return configMap.set(
|
return configMap.set(
|
||||||
|
@ -20,12 +20,9 @@ export class Odmdata extends ConfigBackend {
|
|||||||
private static supportedConfigs = ['configuration'];
|
private static supportedConfigs = ['configuration'];
|
||||||
private BYTE_OFFSETS = [1659, 5243, 18043];
|
private BYTE_OFFSETS = [1659, 5243, 18043];
|
||||||
private CONFIG_BYTES = [
|
private CONFIG_BYTES = [
|
||||||
0x0 /* Config Option #1 */,
|
0x0 /* Config Option #1 */, 0x1 /* Config Option #2 */,
|
||||||
0x1 /* Config Option #2 */,
|
0x6 /* Config Option #3 */, 0x7 /* Config Option #4 */,
|
||||||
0x6 /* Config Option #3 */,
|
0x2 /* Config Option #5 */, 0x3 /* Config Option #6 */,
|
||||||
0x7 /* Config Option #4 */,
|
|
||||||
0x2 /* Config Option #5 */,
|
|
||||||
0x3 /* Config Option #6 */,
|
|
||||||
];
|
];
|
||||||
private CONFIG_BUFFER = Buffer.from(this.CONFIG_BYTES);
|
private CONFIG_BUFFER = Buffer.from(this.CONFIG_BYTES);
|
||||||
|
|
||||||
@ -186,7 +183,7 @@ export class Odmdata extends ConfigBackend {
|
|||||||
): Promise<fs.FileHandle> {
|
): Promise<fs.FileHandle> {
|
||||||
try {
|
try {
|
||||||
return await fs.open(file, flags);
|
return await fs.open(file, flags);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
switch (e.code) {
|
switch (e.code) {
|
||||||
case 'ENOENT':
|
case 'ENOENT':
|
||||||
log.error(`File not found at: ${file}`);
|
log.error(`File not found at: ${file}`);
|
||||||
|
@ -35,9 +35,9 @@ export default class ConfigJsonConfigBackend {
|
|||||||
readLock('config.json').disposer((release) => release());
|
readLock('config.json').disposer((release) => release());
|
||||||
}
|
}
|
||||||
|
|
||||||
public async set<T extends Schema.SchemaKey>(
|
public async set<T extends Schema.SchemaKey>(keyVals: {
|
||||||
keyVals: { [key in T]: unknown },
|
[key in T]: unknown;
|
||||||
) {
|
}) {
|
||||||
await this.init();
|
await this.init();
|
||||||
await Bluebird.using(this.writeLockConfigJson(), async () => {
|
await Bluebird.using(this.writeLockConfigJson(), async () => {
|
||||||
let changed = false;
|
let changed = false;
|
||||||
|
@ -35,9 +35,8 @@ interface ConfigEventTypes {
|
|||||||
change: ConfigChangeMap<SchemaTypeKey>;
|
change: ConfigChangeMap<SchemaTypeKey>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const configJsonBackend: ConfigJsonConfigBackend = new ConfigJsonConfigBackend(
|
export const configJsonBackend: ConfigJsonConfigBackend =
|
||||||
Schema.schema,
|
new ConfigJsonConfigBackend(Schema.schema);
|
||||||
);
|
|
||||||
|
|
||||||
type ConfigEventEmitter = StrictEventEmitter<EventEmitter, ConfigEventTypes>;
|
type ConfigEventEmitter = StrictEventEmitter<EventEmitter, ConfigEventTypes>;
|
||||||
class ConfigEvents extends (EventEmitter as new () => ConfigEventEmitter) {}
|
class ConfigEvents extends (EventEmitter as new () => ConfigEventEmitter) {}
|
||||||
@ -46,9 +45,8 @@ const events = new ConfigEvents();
|
|||||||
// Expose methods which make this module act as an EventEmitter
|
// Expose methods which make this module act as an EventEmitter
|
||||||
export const on: typeof events['on'] = events.on.bind(events);
|
export const on: typeof events['on'] = events.on.bind(events);
|
||||||
export const once: typeof events['once'] = events.once.bind(events);
|
export const once: typeof events['once'] = events.once.bind(events);
|
||||||
export const removeListener: typeof events['removeListener'] = events.removeListener.bind(
|
export const removeListener: typeof events['removeListener'] =
|
||||||
events,
|
events.removeListener.bind(events);
|
||||||
);
|
|
||||||
|
|
||||||
export async function get<T extends SchemaTypeKey>(
|
export async function get<T extends SchemaTypeKey>(
|
||||||
key: T,
|
key: T,
|
||||||
@ -120,9 +118,9 @@ export async function getMany<T extends SchemaTypeKey>(
|
|||||||
trx?: Transaction,
|
trx?: Transaction,
|
||||||
): Promise<{ [key in T]: SchemaReturn<key> }> {
|
): Promise<{ [key in T]: SchemaReturn<key> }> {
|
||||||
const values = await Promise.all(keys.map((k) => get(k, trx)));
|
const values = await Promise.all(keys.map((k) => get(k, trx)));
|
||||||
return (_.zipObject(keys, values) as unknown) as Promise<
|
return _.zipObject(keys, values) as unknown as Promise<{
|
||||||
{ [key in T]: SchemaReturn<key> }
|
[key in T]: SchemaReturn<key>;
|
||||||
>;
|
}>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function set<T extends SchemaTypeKey>(
|
export async function set<T extends SchemaTypeKey>(
|
||||||
@ -332,11 +330,11 @@ function valueToString(value: unknown, name: string) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkValueDecode(
|
function checkValueDecode<T>(
|
||||||
decoded: Either<t.Errors, unknown>,
|
decoded: Either<t.Errors, T>,
|
||||||
key: string,
|
key: string,
|
||||||
value: unknown,
|
value: T,
|
||||||
): decoded is Right<unknown> {
|
): decoded is Right<T> {
|
||||||
if (isLeft(decoded)) {
|
if (isLeft(decoded)) {
|
||||||
throw new ConfigurationValidationError(key, value);
|
throw new ConfigurationValidationError(key, value);
|
||||||
}
|
}
|
||||||
|
@ -286,7 +286,7 @@ export function createV2Api(router: Router) {
|
|||||||
let apps: any;
|
let apps: any;
|
||||||
try {
|
try {
|
||||||
apps = await applicationManager.getLegacyState();
|
apps = await applicationManager.getLegacyState();
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
log.error(e.message);
|
log.error(e.message);
|
||||||
return res.status(500).json({
|
return res.status(500).json({
|
||||||
status: 'failed',
|
status: 'failed',
|
||||||
@ -358,7 +358,7 @@ export function createV2Api(router: Router) {
|
|||||||
status: 'success',
|
status: 'success',
|
||||||
message: 'OK',
|
message: 'OK',
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
res.status(400).json({
|
res.status(400).json({
|
||||||
status: 'failed',
|
status: 'failed',
|
||||||
message: e.message,
|
message: e.message,
|
||||||
@ -380,7 +380,7 @@ export function createV2Api(router: Router) {
|
|||||||
deviceType,
|
deviceType,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
res.status(500).json({
|
res.status(500).json({
|
||||||
status: 'failed',
|
status: 'failed',
|
||||||
message: e.message,
|
message: e.message,
|
||||||
@ -536,7 +536,7 @@ export function createV2Api(router: Router) {
|
|||||||
status: 'success',
|
status: 'success',
|
||||||
tags,
|
tags,
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
log.error(e);
|
log.error(e);
|
||||||
res.status(500).json({
|
res.status(500).json({
|
||||||
status: 'failed',
|
status: 'failed',
|
||||||
|
@ -79,7 +79,7 @@ const actionExecutors: DeviceActionExecutors = {
|
|||||||
success: true,
|
success: true,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
if (step.humanReadableTarget) {
|
if (step.humanReadableTarget) {
|
||||||
logger.logConfigChange(step.humanReadableTarget, {
|
logger.logConfigChange(step.humanReadableTarget, {
|
||||||
err,
|
err,
|
||||||
@ -102,7 +102,7 @@ const actionExecutors: DeviceActionExecutors = {
|
|||||||
if (!initial) {
|
if (!initial) {
|
||||||
logger.logConfigChange(logValue, { success: true });
|
logger.logConfigChange(logValue, { success: true });
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
logger.logConfigChange(logValue, { err });
|
logger.logConfigChange(logValue, { err });
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
@ -271,7 +271,7 @@ export async function getTarget({
|
|||||||
let conf: Dictionary<string>;
|
let conf: Dictionary<string>;
|
||||||
try {
|
try {
|
||||||
conf = JSON.parse(devConfig.targetValues);
|
conf = JSON.parse(devConfig.targetValues);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
throw new Error(`Corrupted supervisor database! Error: ${e.message}`);
|
throw new Error(`Corrupted supervisor database! Error: ${e.message}`);
|
||||||
}
|
}
|
||||||
if (initial || conf.SUPERVISOR_VPN_CONTROL == null) {
|
if (initial || conf.SUPERVISOR_VPN_CONTROL == null) {
|
||||||
@ -697,7 +697,7 @@ async function isVPNEnabled(): Promise<boolean> {
|
|||||||
try {
|
try {
|
||||||
const activeState = await dbus.serviceActiveState(vpnServiceName);
|
const activeState = await dbus.serviceActiveState(vpnServiceName);
|
||||||
return !_.includes(['inactive', 'deactivating'], activeState);
|
return !_.includes(['inactive', 'deactivating'], activeState);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (UnitNotLoadedError(e)) {
|
if (UnitNotLoadedError(e)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -78,7 +78,7 @@ function createDeviceStateRouter() {
|
|||||||
try {
|
try {
|
||||||
const response = await executeStepAction({ action }, { force });
|
const response = await executeStepAction({ action }, { force });
|
||||||
res.status(202).json(response);
|
res.status(202).json(response);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
const status = e instanceof UpdatesLockedError ? 423 : 500;
|
const status = e instanceof UpdatesLockedError ? 423 : 500;
|
||||||
res.status(status).json({
|
res.status(status).json({
|
||||||
Data: '',
|
Data: '',
|
||||||
@ -155,7 +155,7 @@ function createDeviceStateRouter() {
|
|||||||
validation.checkTruthy(req.body.force) || lockOverride,
|
validation.checkTruthy(req.body.force) || lockOverride,
|
||||||
);
|
);
|
||||||
res.status(200).send('OK');
|
res.status(200).send('OK');
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
// TODO: We should be able to throw err if it's UpdatesLockedError
|
// TODO: We should be able to throw err if it's UpdatesLockedError
|
||||||
// and the error middleware will handle it, but this doesn't work in
|
// and the error middleware will handle it, but this doesn't work in
|
||||||
// the test environment. Fix this when fixing API tests.
|
// the test environment. Fix this when fixing API tests.
|
||||||
@ -194,7 +194,7 @@ function createDeviceStateRouter() {
|
|||||||
stateToSend.download_progress = service.download_progress;
|
stateToSend.download_progress = service.download_progress;
|
||||||
}
|
}
|
||||||
res.json(stateToSend);
|
res.json(stateToSend);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
res.status(500).json({
|
res.status(500).json({
|
||||||
Data: '',
|
Data: '',
|
||||||
Error: (e != null ? e.message : undefined) || e || 'Unknown error',
|
Error: (e != null ? e.message : undefined) || e || 'Unknown error',
|
||||||
@ -231,12 +231,10 @@ type DeviceStateEventEmitter = StrictEventEmitter<
|
|||||||
const events = new EventEmitter() as DeviceStateEventEmitter;
|
const events = new EventEmitter() as DeviceStateEventEmitter;
|
||||||
export const on: typeof events['on'] = events.on.bind(events);
|
export const on: typeof events['on'] = events.on.bind(events);
|
||||||
export const once: typeof events['once'] = events.once.bind(events);
|
export const once: typeof events['once'] = events.once.bind(events);
|
||||||
export const removeListener: typeof events['removeListener'] = events.removeListener.bind(
|
export const removeListener: typeof events['removeListener'] =
|
||||||
events,
|
events.removeListener.bind(events);
|
||||||
);
|
export const removeAllListeners: typeof events['removeAllListeners'] =
|
||||||
export const removeAllListeners: typeof events['removeAllListeners'] = events.removeAllListeners.bind(
|
events.removeAllListeners.bind(events);
|
||||||
events,
|
|
||||||
);
|
|
||||||
|
|
||||||
type DeviceStateStepTarget = 'reboot' | 'shutdown' | 'noop';
|
type DeviceStateStepTarget = 'reboot' | 'shutdown' | 'noop';
|
||||||
|
|
||||||
@ -509,9 +507,10 @@ export async function setTarget(target: TargetState, localSource?: boolean) {
|
|||||||
export function getTarget({
|
export function getTarget({
|
||||||
initial = false,
|
initial = false,
|
||||||
intermediate = false,
|
intermediate = false,
|
||||||
}: { initial?: boolean; intermediate?: boolean } = {}): Bluebird<
|
}: {
|
||||||
InstancedDeviceState
|
initial?: boolean;
|
||||||
> {
|
intermediate?: boolean;
|
||||||
|
} = {}): Bluebird<InstancedDeviceState> {
|
||||||
return usingReadLockTarget(async () => {
|
return usingReadLockTarget(async () => {
|
||||||
if (intermediate) {
|
if (intermediate) {
|
||||||
return intermediateTarget!;
|
return intermediateTarget!;
|
||||||
@ -772,7 +771,7 @@ export async function applyStep<T extends PossibleStepTargets>(
|
|||||||
skipLock,
|
skipLock,
|
||||||
});
|
});
|
||||||
emitAsync('step-completed', null, step, stepResult || undefined);
|
emitAsync('step-completed', null, step, stepResult || undefined);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
emitAsync('step-error', e, step);
|
emitAsync('step-error', e, step);
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
@ -918,7 +917,7 @@ export const applyTarget = async ({
|
|||||||
nextDelay,
|
nextDelay,
|
||||||
retryCount,
|
retryCount,
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (e instanceof UpdatesLockedError) {
|
if (e instanceof UpdatesLockedError) {
|
||||||
// Forward the UpdatesLockedError directly
|
// Forward the UpdatesLockedError directly
|
||||||
throw e;
|
throw e;
|
||||||
|
@ -53,9 +53,11 @@ export async function setApps(
|
|||||||
|
|
||||||
const services = Object.keys(release.services ?? {}).map((serviceName) => {
|
const services = Object.keys(release.services ?? {}).map((serviceName) => {
|
||||||
const { id: releaseId } = release;
|
const { id: releaseId } = release;
|
||||||
const { id: serviceId, image_id: imageId, ...service } = release.services[
|
const {
|
||||||
serviceName
|
id: serviceId,
|
||||||
];
|
image_id: imageId,
|
||||||
|
...service
|
||||||
|
} = release.services[serviceName];
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...service,
|
...service,
|
||||||
@ -94,53 +96,62 @@ export async function getTargetJson(): Promise<TargetApps> {
|
|||||||
const dbApps = await getDBEntry();
|
const dbApps = await getDBEntry();
|
||||||
|
|
||||||
return dbApps
|
return dbApps
|
||||||
.map(({ source, uuid, releaseId, commit: releaseUuid, ...app }): [
|
.map(
|
||||||
string,
|
({
|
||||||
TargetApp,
|
source,
|
||||||
] => {
|
|
||||||
const services = (JSON.parse(app.services) as DatabaseService[])
|
|
||||||
.map(({ serviceName, serviceId, imageId, ...service }): [
|
|
||||||
string,
|
|
||||||
TargetService,
|
|
||||||
] => [
|
|
||||||
serviceName,
|
|
||||||
{
|
|
||||||
id: serviceId,
|
|
||||||
image_id: imageId,
|
|
||||||
..._.omit(service, ['appId', 'appUuid', 'commit', 'releaseId']),
|
|
||||||
} as TargetService,
|
|
||||||
])
|
|
||||||
// Map by serviceName
|
|
||||||
.reduce(
|
|
||||||
(svcs, [serviceName, s]) => ({
|
|
||||||
...svcs,
|
|
||||||
[serviceName]: s,
|
|
||||||
}),
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
|
|
||||||
const releases = releaseUuid
|
|
||||||
? {
|
|
||||||
[releaseUuid]: {
|
|
||||||
id: releaseId,
|
|
||||||
services,
|
|
||||||
networks: JSON.parse(app.networks),
|
|
||||||
volumes: JSON.parse(app.volumes),
|
|
||||||
} as TargetRelease,
|
|
||||||
}
|
|
||||||
: {};
|
|
||||||
|
|
||||||
return [
|
|
||||||
uuid,
|
uuid,
|
||||||
{
|
releaseId,
|
||||||
id: app.appId,
|
commit: releaseUuid,
|
||||||
name: app.name,
|
...app
|
||||||
class: app.class,
|
}): [string, TargetApp] => {
|
||||||
is_host: !!app.isHost,
|
const services = (JSON.parse(app.services) as DatabaseService[])
|
||||||
releases,
|
.map(
|
||||||
},
|
({
|
||||||
];
|
serviceName,
|
||||||
})
|
serviceId,
|
||||||
|
imageId,
|
||||||
|
...service
|
||||||
|
}): [string, TargetService] => [
|
||||||
|
serviceName,
|
||||||
|
{
|
||||||
|
id: serviceId,
|
||||||
|
image_id: imageId,
|
||||||
|
..._.omit(service, ['appId', 'appUuid', 'commit', 'releaseId']),
|
||||||
|
} as TargetService,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
// Map by serviceName
|
||||||
|
.reduce(
|
||||||
|
(svcs, [serviceName, s]) => ({
|
||||||
|
...svcs,
|
||||||
|
[serviceName]: s,
|
||||||
|
}),
|
||||||
|
{},
|
||||||
|
);
|
||||||
|
|
||||||
|
const releases = releaseUuid
|
||||||
|
? {
|
||||||
|
[releaseUuid]: {
|
||||||
|
id: releaseId,
|
||||||
|
services,
|
||||||
|
networks: JSON.parse(app.networks),
|
||||||
|
volumes: JSON.parse(app.volumes),
|
||||||
|
} as TargetRelease,
|
||||||
|
}
|
||||||
|
: {};
|
||||||
|
|
||||||
|
return [
|
||||||
|
uuid,
|
||||||
|
{
|
||||||
|
id: app.appId,
|
||||||
|
name: app.name,
|
||||||
|
class: app.class,
|
||||||
|
is_host: !!app.isHost,
|
||||||
|
releases,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
},
|
||||||
|
)
|
||||||
.reduce((apps, [uuid, app]) => ({ ...apps, [uuid]: app }), {});
|
.reduce((apps, [uuid, app]) => ({ ...apps, [uuid]: app }), {});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ export async function loadTargetFromFile(appsPath: string): Promise<boolean> {
|
|||||||
let stateFromFile: AppsJsonFormat | any[];
|
let stateFromFile: AppsJsonFormat | any[];
|
||||||
try {
|
try {
|
||||||
stateFromFile = JSON.parse(content);
|
stateFromFile = JSON.parse(content);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
throw new AppsJsonParseError(e);
|
throw new AppsJsonParseError(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -157,7 +157,7 @@ export async function loadTargetFromFile(appsPath: string): Promise<boolean> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
// Ensure that this is actually a file, and not an empty path
|
// Ensure that this is actually a file, and not an empty path
|
||||||
// It can be an empty path because if the file does not exist
|
// It can be an empty path because if the file does not exist
|
||||||
// on host, the docker daemon creates an empty directory when
|
// on host, the docker daemon creates an empty directory when
|
||||||
|
@ -24,10 +24,8 @@ interface TargetStateEvents {
|
|||||||
) => void;
|
) => void;
|
||||||
'target-state-apply': (force: boolean, isFromApi: boolean) => void;
|
'target-state-apply': (force: boolean, isFromApi: boolean) => void;
|
||||||
}
|
}
|
||||||
export const emitter: StrictEventEmitter<
|
export const emitter: StrictEventEmitter<EventEmitter, TargetStateEvents> =
|
||||||
EventEmitter,
|
new EventEmitter();
|
||||||
TargetStateEvents
|
|
||||||
> = new EventEmitter();
|
|
||||||
|
|
||||||
const lockGetTarget = () =>
|
const lockGetTarget = () =>
|
||||||
writeLock('getTarget').disposer((release) => release());
|
writeLock('getTarget').disposer((release) => release());
|
||||||
@ -105,17 +103,13 @@ export const update = async (
|
|||||||
): Promise<void> => {
|
): Promise<void> => {
|
||||||
await config.initialized();
|
await config.initialized();
|
||||||
return Bluebird.using(lockGetTarget(), async () => {
|
return Bluebird.using(lockGetTarget(), async () => {
|
||||||
const {
|
const { uuid, apiEndpoint, apiTimeout, deviceApiKey } =
|
||||||
uuid,
|
await config.getMany([
|
||||||
apiEndpoint,
|
'uuid',
|
||||||
apiTimeout,
|
'apiEndpoint',
|
||||||
deviceApiKey,
|
'apiTimeout',
|
||||||
} = await config.getMany([
|
'deviceApiKey',
|
||||||
'uuid',
|
]);
|
||||||
'apiEndpoint',
|
|
||||||
'apiTimeout',
|
|
||||||
'deviceApiKey',
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (typeof apiEndpoint !== 'string') {
|
if (typeof apiEndpoint !== 'string') {
|
||||||
throw new InternalInconsistencyError(
|
throw new InternalInconsistencyError(
|
||||||
@ -188,7 +182,7 @@ const poll = async (
|
|||||||
await update();
|
await update();
|
||||||
// Reset fetchErrors because we successfuly updated
|
// Reset fetchErrors because we successfuly updated
|
||||||
fetchErrors = 0;
|
fetchErrors = 0;
|
||||||
} catch (e) {
|
} catch {
|
||||||
// Exponential back off if request fails
|
// Exponential back off if request fails
|
||||||
pollInterval = Math.min(appUpdatePollInterval, 15000 * 2 ** fetchErrors);
|
pollInterval = Math.min(appUpdatePollInterval, 15000 * 2 ** fetchErrors);
|
||||||
++fetchErrors;
|
++fetchErrors;
|
||||||
@ -228,10 +222,8 @@ export const startPoll = async (): Promise<void> => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Query and set config values we need to avoid multiple db hits
|
// Query and set config values we need to avoid multiple db hits
|
||||||
const {
|
const { instantUpdates: updates, appUpdatePollInterval: interval } =
|
||||||
instantUpdates: updates,
|
await config.getMany(['instantUpdates', 'appUpdatePollInterval']);
|
||||||
appUpdatePollInterval: interval,
|
|
||||||
} = await config.getMany(['instantUpdates', 'appUpdatePollInterval']);
|
|
||||||
instantUpdates = updates;
|
instantUpdates = updates;
|
||||||
appUpdatePollInterval = interval;
|
appUpdatePollInterval = interval;
|
||||||
} catch {
|
} catch {
|
||||||
|
@ -32,17 +32,9 @@ export let client: mixpanel.Mixpanel | null = null;
|
|||||||
export const initialized = _.once(async () => {
|
export const initialized = _.once(async () => {
|
||||||
await config.initialized();
|
await config.initialized();
|
||||||
|
|
||||||
const {
|
const { unmanaged, mixpanelHost, mixpanelToken, uuid } = await config.getMany(
|
||||||
unmanaged,
|
['unmanaged', 'mixpanelHost', 'mixpanelToken', 'uuid'],
|
||||||
mixpanelHost,
|
);
|
||||||
mixpanelToken,
|
|
||||||
uuid,
|
|
||||||
} = await config.getMany([
|
|
||||||
'unmanaged',
|
|
||||||
'mixpanelHost',
|
|
||||||
'mixpanelToken',
|
|
||||||
'uuid',
|
|
||||||
]);
|
|
||||||
|
|
||||||
defaultProperties = {
|
defaultProperties = {
|
||||||
distinct_id: uuid,
|
distinct_id: uuid,
|
||||||
|
@ -67,7 +67,7 @@ async function readProxy(): Promise<ProxyConfig | undefined> {
|
|||||||
let redsocksConf: string;
|
let redsocksConf: string;
|
||||||
try {
|
try {
|
||||||
redsocksConf = await fs.readFile(redsocksConfPath, 'utf-8');
|
redsocksConf = await fs.readFile(redsocksConfPath, 'utf-8');
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (!ENOENT(e)) {
|
if (!ENOENT(e)) {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
@ -99,7 +99,7 @@ async function readProxy(): Promise<ProxyConfig | undefined> {
|
|||||||
if (noProxy.length) {
|
if (noProxy.length) {
|
||||||
conf.noProxy = noProxy;
|
conf.noProxy = noProxy;
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (!ENOENT(e)) {
|
if (!ENOENT(e)) {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
@ -141,7 +141,7 @@ async function setProxy(maybeConf: ProxyConfig | null): Promise<void> {
|
|||||||
let currentConf: ProxyConfig | undefined;
|
let currentConf: ProxyConfig | undefined;
|
||||||
try {
|
try {
|
||||||
currentConf = await readProxy();
|
currentConf = await readProxy();
|
||||||
} catch (err) {
|
} catch {
|
||||||
// Noop - current redsocks.conf does not exist
|
// Noop - current redsocks.conf does not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ export const fetchDevice = async (
|
|||||||
}
|
}
|
||||||
|
|
||||||
return device;
|
return device;
|
||||||
} catch (e) {
|
} catch {
|
||||||
throw new DeviceNotFoundError();
|
throw new DeviceNotFoundError();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -123,12 +123,14 @@ export const exchangeKeyAndGetDevice = async (
|
|||||||
opts.provisioningApiKey,
|
opts.provisioningApiKey,
|
||||||
apiTimeout,
|
apiTimeout,
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch {
|
||||||
throw new ExchangeKeyError(`Couldn't fetch device with provisioning key`);
|
throw new ExchangeKeyError(`Couldn't fetch device with provisioning key`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// We found the device so we can try to register a working device key for it
|
// We found the device so we can try to register a working device key for it
|
||||||
const [res] = await (await request.getRequestInstance())
|
const [res] = await (
|
||||||
|
await request.getRequestInstance()
|
||||||
|
)
|
||||||
.postAsync(`${opts.apiEndpoint}/api-key/device/${device.id}/device-key`, {
|
.postAsync(`${opts.apiEndpoint}/api-key/device/${device.id}/device-key`, {
|
||||||
json: true,
|
json: true,
|
||||||
body: {
|
body: {
|
||||||
|
@ -72,7 +72,7 @@ export const isScoped = (
|
|||||||
scopes: Scope[],
|
scopes: Scope[],
|
||||||
) =>
|
) =>
|
||||||
scopes.some((scope) =>
|
scopes.some((scope) =>
|
||||||
scopeChecks[scope.type](resources, (scope as unknown) as any),
|
scopeChecks[scope.type](resources, scope as unknown as any),
|
||||||
);
|
);
|
||||||
|
|
||||||
export type AuthorizedRequest = express.Request & {
|
export type AuthorizedRequest = express.Request & {
|
||||||
|
@ -219,7 +219,7 @@ export function validateTargetContracts(
|
|||||||
service.labels?.['io.balena.features.optional'],
|
service.labels?.['io.balena.features.optional'],
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
throw new ContractValidationError(serviceName, e.message);
|
throw new ContractValidationError(serviceName, e.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -170,10 +170,9 @@ export async function fetchDeltaWithProgress(
|
|||||||
|
|
||||||
const url = `${deltaOpts.deltaEndpoint}/api/v${deltaOpts.deltaVersion}/delta?src=${deltaOpts.deltaSource}&dest=${imgDest}`;
|
const url = `${deltaOpts.deltaEndpoint}/api/v${deltaOpts.deltaVersion}/delta?src=${deltaOpts.deltaSource}&dest=${imgDest}`;
|
||||||
|
|
||||||
const [res, data] = await (await request.getRequestInstance()).getAsync(
|
const [res, data] = await (
|
||||||
url,
|
await request.getRequestInstance()
|
||||||
opts,
|
).getAsync(url, opts);
|
||||||
);
|
|
||||||
if (res.statusCode === 502 || res.statusCode === 504) {
|
if (res.statusCode === 502 || res.statusCode === 504) {
|
||||||
throw new DeltaStillProcessingError();
|
throw new DeltaStillProcessingError();
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ export function equals<T>(value: T, other: T): boolean {
|
|||||||
* Returns true if the the object equals `{}` or is an empty
|
* Returns true if the the object equals `{}` or is an empty
|
||||||
* array
|
* array
|
||||||
*/
|
*/
|
||||||
export function empty<T>(value: T): boolean {
|
export function empty<T extends {}>(value: T): boolean {
|
||||||
return (Array.isArray(value) && value.length === 0) || equals(value, {});
|
return (Array.isArray(value) && value.length === 0) || equals(value, {});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ async function createVolumeFromLegacyData(
|
|||||||
{},
|
{},
|
||||||
legacyPath,
|
legacyPath,
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
logger.logSystemMessage(
|
logger.logSystemMessage(
|
||||||
`Warning: could not migrate legacy /data volume: ${e.message}`,
|
`Warning: could not migrate legacy /data volume: ${e.message}`,
|
||||||
{ error: e },
|
{ error: e },
|
||||||
@ -85,7 +85,7 @@ export async function normaliseLegacyDatabase() {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
services = JSON.parse(app.services);
|
services = JSON.parse(app.services);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
throw new DatabaseParseError(e);
|
throw new DatabaseParseError(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -304,76 +304,74 @@ export async function fromV2TargetApps(
|
|||||||
return (
|
return (
|
||||||
(
|
(
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
Object.keys(apps).map(
|
Object.keys(apps).map(async (id): Promise<[string, TargetApp]> => {
|
||||||
async (id): Promise<[string, TargetApp]> => {
|
const appId = parseInt(id, 10);
|
||||||
const appId = parseInt(id, 10);
|
const app = apps[appId];
|
||||||
const app = apps[appId];
|
|
||||||
|
|
||||||
// If local mode or connectivity is not available just use id as uuid
|
// If local mode or connectivity is not available just use id as uuid
|
||||||
const uuid = local
|
const uuid = local
|
||||||
? id
|
? id
|
||||||
: await getUUIDFromAPI(appId).catch(() => {
|
: await getUUIDFromAPI(appId).catch(() => {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Cannot migrate from v2 apps.json without Internet connectivity. Please use balenaCLI v13.5.1+ for offline preload support.',
|
'Cannot migrate from v2 apps.json without Internet connectivity. Please use balenaCLI v13.5.1+ for offline preload support.',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
const releases = app.commit
|
const releases = app.commit
|
||||||
? {
|
? {
|
||||||
[app.commit]: {
|
[app.commit]: {
|
||||||
id: app.releaseId,
|
id: app.releaseId,
|
||||||
services: Object.keys(app.services ?? {})
|
services: Object.keys(app.services ?? {})
|
||||||
.map((serviceId) => {
|
.map((serviceId) => {
|
||||||
const {
|
const {
|
||||||
imageId,
|
imageId,
|
||||||
serviceName,
|
serviceName,
|
||||||
|
image,
|
||||||
|
environment,
|
||||||
|
labels,
|
||||||
|
running,
|
||||||
|
serviceId: _serviceId,
|
||||||
|
contract,
|
||||||
|
...composition
|
||||||
|
} = app.services[serviceId];
|
||||||
|
|
||||||
|
return [
|
||||||
|
serviceName,
|
||||||
|
{
|
||||||
|
id: serviceId,
|
||||||
|
image_id: imageId,
|
||||||
image,
|
image,
|
||||||
environment,
|
environment,
|
||||||
labels,
|
labels,
|
||||||
running,
|
running,
|
||||||
serviceId: _serviceId,
|
|
||||||
contract,
|
contract,
|
||||||
...composition
|
composition,
|
||||||
} = app.services[serviceId];
|
},
|
||||||
|
];
|
||||||
|
})
|
||||||
|
.reduce(
|
||||||
|
(res, [serviceName, svc]) => ({
|
||||||
|
...res,
|
||||||
|
[serviceName]: svc,
|
||||||
|
}),
|
||||||
|
{},
|
||||||
|
),
|
||||||
|
volumes: app.volumes ?? {},
|
||||||
|
networks: app.networks ?? {},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {};
|
||||||
|
|
||||||
return [
|
return [
|
||||||
serviceName,
|
uuid,
|
||||||
{
|
{
|
||||||
id: serviceId,
|
id: appId,
|
||||||
image_id: imageId,
|
name: app.name,
|
||||||
image,
|
class: 'fleet',
|
||||||
environment,
|
releases,
|
||||||
labels,
|
} as TargetApp,
|
||||||
running,
|
];
|
||||||
contract,
|
}),
|
||||||
composition,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
})
|
|
||||||
.reduce(
|
|
||||||
(res, [serviceName, svc]) => ({
|
|
||||||
...res,
|
|
||||||
[serviceName]: svc,
|
|
||||||
}),
|
|
||||||
{},
|
|
||||||
),
|
|
||||||
volumes: app.volumes ?? {},
|
|
||||||
networks: app.networks ?? {},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
: {};
|
|
||||||
|
|
||||||
return [
|
|
||||||
uuid,
|
|
||||||
{
|
|
||||||
id: appId,
|
|
||||||
name: app.name,
|
|
||||||
class: 'fleet',
|
|
||||||
releases,
|
|
||||||
} as TargetApp,
|
|
||||||
];
|
|
||||||
},
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
// Key by uuid
|
// Key by uuid
|
||||||
|
@ -126,7 +126,7 @@ export async function unlock(path: string): Promise<void> {
|
|||||||
export function unlockSync(path: string) {
|
export function unlockSync(path: string) {
|
||||||
try {
|
try {
|
||||||
return unlinkSync(path);
|
return unlinkSync(path);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (e.code === 'EPERM' || e.code === 'EISDIR') {
|
if (e.code === 'EPERM' || e.code === 'EISDIR') {
|
||||||
return rmdirSync(path);
|
return rmdirSync(path);
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@ const getOSReleaseData = _.memoize(
|
|||||||
const value = _.trim(values.join('=')).replace(/^"(.+(?="$))"$/, '$1');
|
const value = _.trim(values.join('=')).replace(/^"(.+(?="$))"$/, '$1');
|
||||||
releaseItems[_.trim(key)] = value;
|
releaseItems[_.trim(key)] = value;
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
throw new InternalInconsistencyError(
|
throw new InternalInconsistencyError(
|
||||||
`Unable to read file at ${path}: ${e.message} ${e.stack}`,
|
`Unable to read file at ${path}: ${e.message} ${e.stack}`,
|
||||||
);
|
);
|
||||||
|
@ -42,7 +42,7 @@ const formatter = winston.format.printf((args) => {
|
|||||||
)}${message}`;
|
)}${message}`;
|
||||||
});
|
});
|
||||||
|
|
||||||
export const winstonLog = (winston.createLogger({
|
export const winstonLog = winston.createLogger({
|
||||||
format: winston.format.combine(winston.format.colorize(), formatter),
|
format: winston.format.combine(winston.format.colorize(), formatter),
|
||||||
transports: [new winston.transports.Console()],
|
transports: [new winston.transports.Console()],
|
||||||
// In the future we can reduce this logging level in
|
// In the future we can reduce this logging level in
|
||||||
@ -57,7 +57,7 @@ export const winstonLog = (winston.createLogger({
|
|||||||
// below, we first cast to unknown so we can do what we
|
// below, we first cast to unknown so we can do what we
|
||||||
// like, and then assign every log level a function (which
|
// like, and then assign every log level a function (which
|
||||||
// is what happens internally in winston)
|
// is what happens internally in winston)
|
||||||
}) as unknown) as { [key in logLevel]: (message: string) => void };
|
}) as unknown as { [key in logLevel]: (message: string) => void };
|
||||||
|
|
||||||
winston.addColors(colors);
|
winston.addColors(colors);
|
||||||
|
|
||||||
|
@ -46,11 +46,9 @@ export function abortIfHUPInProgress({
|
|||||||
force: boolean | undefined;
|
force: boolean | undefined;
|
||||||
}): Promise<boolean | never> {
|
}): Promise<boolean | never> {
|
||||||
return Promise.all(
|
return Promise.all(
|
||||||
[
|
['rollback-health-breadcrumb', 'rollback-altboot-breadcrumb'].map(
|
||||||
'rollback-health-breadcrumb',
|
(filename) =>
|
||||||
'rollback-altboot-breadcrumb',
|
pathExistsOnHost(path.join(constants.stateMountPoint, filename)),
|
||||||
].map((filename) =>
|
|
||||||
pathExistsOnHost(path.join(constants.stateMountPoint, filename)),
|
|
||||||
),
|
),
|
||||||
).then((existsArray) => {
|
).then((existsArray) => {
|
||||||
const anyExists = existsArray.some((exists) => exists);
|
const anyExists = existsArray.some((exists) => exists);
|
||||||
@ -112,7 +110,7 @@ export async function lock<T extends unknown>(
|
|||||||
let lockOverride: boolean;
|
let lockOverride: boolean;
|
||||||
try {
|
try {
|
||||||
lockOverride = await config.get('lockOverride');
|
lockOverride = await config.get('lockOverride');
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
throw new InternalInconsistencyError(
|
throw new InternalInconsistencyError(
|
||||||
`Error getting lockOverride config value: ${err?.message ?? err}`,
|
`Error getting lockOverride config value: ${err?.message ?? err}`,
|
||||||
);
|
);
|
||||||
|
@ -161,7 +161,7 @@ export class LocalModeManager {
|
|||||||
return this.collectContainerResources(
|
return this.collectContainerResources(
|
||||||
this.containerId || SUPERVISOR_CONTAINER_NAME_FALLBACK,
|
this.containerId || SUPERVISOR_CONTAINER_NAME_FALLBACK,
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (this.containerId !== undefined) {
|
if (this.containerId !== undefined) {
|
||||||
try {
|
try {
|
||||||
// Inspect operation fails (container ID is out of sync?).
|
// Inspect operation fails (container ID is out of sync?).
|
||||||
@ -172,7 +172,7 @@ export class LocalModeManager {
|
|||||||
e.message,
|
e.message,
|
||||||
);
|
);
|
||||||
return this.collectContainerResources(fallback);
|
return this.collectContainerResources(fallback);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
// Inspect operation fails (using legacy container name?).
|
// Inspect operation fails (using legacy container name?).
|
||||||
const fallback = SUPERVISOR_LEGACY_CONTAINER_NAME_FALLBACK;
|
const fallback = SUPERVISOR_LEGACY_CONTAINER_NAME_FALLBACK;
|
||||||
log.warn(
|
log.warn(
|
||||||
@ -230,7 +230,7 @@ export class LocalModeManager {
|
|||||||
EngineSnapshot.fromJSON(r.snapshot),
|
EngineSnapshot.fromJSON(r.snapshot),
|
||||||
LocalModeManager.parseTimestamp(r.timestamp),
|
LocalModeManager.parseTimestamp(r.timestamp),
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
// Some parsing error happened. Ensure we add data details to the error description.
|
// Some parsing error happened. Ensure we add data details to the error description.
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Cannot parse snapshot data ${JSON.stringify(r)}.` +
|
`Cannot parse snapshot data ${JSON.stringify(r)}.` +
|
||||||
|
@ -101,9 +101,8 @@ class LogMonitor {
|
|||||||
timestamp: Date.now(),
|
timestamp: Date.now(),
|
||||||
writeRequired: false,
|
writeRequired: false,
|
||||||
};
|
};
|
||||||
this.containers[
|
this.containers[containerId].timestamp =
|
||||||
containerId
|
await this.getContainerSentTimestamp(containerId);
|
||||||
].timestamp = await this.getContainerSentTimestamp(containerId);
|
|
||||||
this.backfill(containerId, this.containers[containerId].timestamp);
|
this.backfill(containerId, this.containers[containerId].timestamp);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ const _ = require('lodash');
|
|||||||
var tryParse = function (obj) {
|
var tryParse = function (obj) {
|
||||||
try {
|
try {
|
||||||
return JSON.parse(obj);
|
return JSON.parse(obj);
|
||||||
} catch (e) {
|
} catch {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -22,7 +22,7 @@ exports.up = function (knex) {
|
|||||||
try {
|
try {
|
||||||
const parsed = JSON.parse(data.toString());
|
const parsed = JSON.parse(data.toString());
|
||||||
resolve(parsed);
|
resolve(parsed);
|
||||||
} catch (e) {
|
} catch {
|
||||||
console.log(
|
console.log(
|
||||||
'Failed to parse config.json! Things may fail unexpectedly!',
|
'Failed to parse config.json! Things may fail unexpectedly!',
|
||||||
);
|
);
|
||||||
|
@ -25,7 +25,7 @@ exports.up = function (knex) {
|
|||||||
return resolve(checkTruthy(parsed.localMode));
|
return resolve(checkTruthy(parsed.localMode));
|
||||||
}
|
}
|
||||||
return resolve(false);
|
return resolve(false);
|
||||||
} catch (e) {
|
} catch {
|
||||||
console.log(
|
console.log(
|
||||||
'Failed to parse config.json! Things may fail unexpectedly!',
|
'Failed to parse config.json! Things may fail unexpectedly!',
|
||||||
);
|
);
|
||||||
|
@ -46,7 +46,7 @@ export async function isVPNActive(): Promise<boolean> {
|
|||||||
let active: boolean = true;
|
let active: boolean = true;
|
||||||
try {
|
try {
|
||||||
await fs.lstat(`${constants.vpnStatusPath}/active`);
|
await fs.lstat(`${constants.vpnStatusPath}/active`);
|
||||||
} catch (e) {
|
} catch {
|
||||||
active = false;
|
active = false;
|
||||||
}
|
}
|
||||||
log.info(`VPN connection is ${active ? 'active' : 'not active'}.`);
|
log.info(`VPN connection is ${active ? 'active' : 'not active'}.`);
|
||||||
@ -116,7 +116,8 @@ export const connectivityCheckEnabled = Bluebird.method(
|
|||||||
() => isConnectivityCheckEnabled,
|
() => isConnectivityCheckEnabled,
|
||||||
);
|
);
|
||||||
|
|
||||||
const IP_REGEX = /^(?:(?:balena|docker|rce|tun)[0-9]+|tun[0-9]+|resin-vpn|lo|resin-dns|supervisor0|balena-redsocks|resin-redsocks|br-[0-9a-f]{12})$/;
|
const IP_REGEX =
|
||||||
|
/^(?:(?:balena|docker|rce|tun)[0-9]+|tun[0-9]+|resin-vpn|lo|resin-dns|supervisor0|balena-redsocks|resin-redsocks|br-[0-9a-f]{12})$/;
|
||||||
|
|
||||||
export const shouldReportInterface = (intf: string) => !IP_REGEX.test(intf);
|
export const shouldReportInterface = (intf: string) => !IP_REGEX.test(intf);
|
||||||
|
|
||||||
|
@ -96,7 +96,7 @@ const createProxyvisorRouter = function (proxyvisor) {
|
|||||||
const fields = await db.models('dependentDevice').select();
|
const fields = await db.models('dependentDevice').select();
|
||||||
const devices = fields.map(parseDeviceFields);
|
const devices = fields.map(parseDeviceFields);
|
||||||
res.json(devices);
|
res.json(devices);
|
||||||
} catch (err) {
|
} catch (/** @type {any} */ err) {
|
||||||
res.status(503).send(err?.message || err || 'Unknown error');
|
res.status(503).send(err?.message || err || 'Unknown error');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -320,7 +320,7 @@ const createProxyvisorRouter = function (proxyvisor) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
res.sendFile(dest);
|
res.sendFile(dest);
|
||||||
} catch (err) {
|
} catch (/** @type {any} */ err) {
|
||||||
log.error(`Error on ${req.method} ${url.parse(req.url).pathname}`, err);
|
log.error(`Error on ${req.method} ${url.parse(req.url).pathname}`, err);
|
||||||
return res.status(503).send(err?.message || err || 'Unknown error');
|
return res.status(503).send(err?.message || err || 'Unknown error');
|
||||||
}
|
}
|
||||||
@ -337,7 +337,7 @@ const createProxyvisorRouter = function (proxyvisor) {
|
|||||||
config: JSON.parse(app.config ?? '{}'),
|
config: JSON.parse(app.config ?? '{}'),
|
||||||
}));
|
}));
|
||||||
res.json($apps);
|
res.json($apps);
|
||||||
} catch (err) {
|
} catch (/** @type {any} */ err) {
|
||||||
log.error(`Error on ${req.method} ${url.parse(req.url).pathname}`, err);
|
log.error(`Error on ${req.method} ${url.parse(req.url).pathname}`, err);
|
||||||
return res.status(503).send(err?.message || err || 'Unknown error');
|
return res.status(503).send(err?.message || err || 'Unknown error');
|
||||||
}
|
}
|
||||||
@ -350,9 +350,8 @@ export class Proxyvisor {
|
|||||||
constructor() {
|
constructor() {
|
||||||
this.executeStepAction = this.executeStepAction.bind(this);
|
this.executeStepAction = this.executeStepAction.bind(this);
|
||||||
this.getCurrentStates = this.getCurrentStates.bind(this);
|
this.getCurrentStates = this.getCurrentStates.bind(this);
|
||||||
this.normaliseDependentAppForDB = this.normaliseDependentAppForDB.bind(
|
this.normaliseDependentAppForDB =
|
||||||
this,
|
this.normaliseDependentAppForDB.bind(this);
|
||||||
);
|
|
||||||
this.setTargetInTransaction = this.setTargetInTransaction.bind(this);
|
this.setTargetInTransaction = this.setTargetInTransaction.bind(this);
|
||||||
this.getTarget = this.getTarget.bind(this);
|
this.getTarget = this.getTarget.bind(this);
|
||||||
this._getHookStep = this._getHookStep.bind(this);
|
this._getHookStep = this._getHookStep.bind(this);
|
||||||
|
@ -58,7 +58,7 @@ export class SupervisorAPI {
|
|||||||
return res.status(500).send('Unhealthy');
|
return res.status(500).send('Unhealthy');
|
||||||
}
|
}
|
||||||
return res.sendStatus(200);
|
return res.sendStatus(200);
|
||||||
} catch (_e) {
|
} catch {
|
||||||
log.error('Healthcheck failed');
|
log.error('Healthcheck failed');
|
||||||
return res.status(500).send('Unhealthy');
|
return res.status(500).send('Unhealthy');
|
||||||
}
|
}
|
||||||
|
@ -191,7 +191,7 @@ export type DeviceName = t.TypeOf<typeof DeviceName>;
|
|||||||
const restrictedRecord = <
|
const restrictedRecord = <
|
||||||
K extends t.Mixed,
|
K extends t.Mixed,
|
||||||
V extends t.Mixed,
|
V extends t.Mixed,
|
||||||
R extends { [key in t.TypeOf<K>]: t.TypeOf<V> }
|
R extends { [key in t.TypeOf<K>]: t.TypeOf<V> },
|
||||||
>(
|
>(
|
||||||
k: K,
|
k: K,
|
||||||
v: V,
|
v: V,
|
||||||
|
@ -109,7 +109,7 @@ export async function initDevice(opts: Opts) {
|
|||||||
opts.docker,
|
opts.docker,
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch {
|
||||||
await Bluebird.delay(500);
|
await Bluebird.delay(500);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -174,7 +174,7 @@ describe('lib/lockfile', () => {
|
|||||||
await expect(lockfile.lock(lockOne)).to.not.be.rejected;
|
await expect(lockfile.lock(lockOne)).to.not.be.rejected;
|
||||||
await expect(lockfile.lock(lockTwo, NOBODY_UID)).to.not.be.rejected;
|
await expect(lockfile.lock(lockTwo, NOBODY_UID)).to.not.be.rejected;
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
process.emit('exit');
|
process.emit('exit');
|
||||||
|
|
||||||
// Verify lockfile removal regardless of appId / appUuid
|
// Verify lockfile removal regardless of appId / appUuid
|
||||||
|
@ -47,14 +47,14 @@ describe('Database Migrations', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
after(() => {
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
constants.databasePath = process.env.DATABASE_PATH;
|
constants.databasePath = process.env.DATABASE_PATH;
|
||||||
delete require.cache[require.resolve('~/src/db')];
|
delete require.cache[require.resolve('~/src/db')];
|
||||||
});
|
});
|
||||||
|
|
||||||
it('creates a database at the path passed on creation', async () => {
|
it('creates a database at the path passed on creation', async () => {
|
||||||
const databasePath = process.env.DATABASE_PATH_2!;
|
const databasePath = process.env.DATABASE_PATH_2!;
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
constants.databasePath = databasePath;
|
constants.databasePath = databasePath;
|
||||||
delete require.cache[require.resolve('~/src/db')];
|
delete require.cache[require.resolve('~/src/db')];
|
||||||
|
|
||||||
@ -67,7 +67,7 @@ describe('Database Migrations', () => {
|
|||||||
const databasePath = process.env.DATABASE_PATH_3!;
|
const databasePath = process.env.DATABASE_PATH_3!;
|
||||||
|
|
||||||
const knexForDB = await createOldDatabase(databasePath);
|
const knexForDB = await createOldDatabase(databasePath);
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
constants.databasePath = databasePath;
|
constants.databasePath = databasePath;
|
||||||
delete require.cache[require.resolve('~/src/db')];
|
delete require.cache[require.resolve('~/src/db')];
|
||||||
const testDb = await import('~/src/db');
|
const testDb = await import('~/src/db');
|
||||||
|
@ -113,7 +113,7 @@ describe('device-state', () => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await testDb.destroy();
|
await testDb.destroy();
|
||||||
} catch (e) {
|
} catch {
|
||||||
/* noop */
|
/* noop */
|
||||||
}
|
}
|
||||||
sinon.restore();
|
sinon.restore();
|
||||||
|
@ -91,9 +91,9 @@ describe('EventTracker', () => {
|
|||||||
it('initializes a mixpanel client when not in unmanaged mode', () => {
|
it('initializes a mixpanel client when not in unmanaged mode', () => {
|
||||||
expect(eventTracker.initialized()).to.be.fulfilled.then(() => {
|
expect(eventTracker.initialized()).to.be.fulfilled.then(() => {
|
||||||
expect(mixpanel.init).to.have.been.calledWith('someToken');
|
expect(mixpanel.init).to.have.been.calledWith('someToken');
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
expect(eventTracker.client.token).to.equal('someToken');
|
expect(eventTracker.client.token).to.equal('someToken');
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
expect(eventTracker.client.track).to.be.a('function');
|
expect(eventTracker.client.track).to.be.a('function');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -138,7 +138,7 @@ describe('EventTracker', () => {
|
|||||||
'Test event 2',
|
'Test event 2',
|
||||||
JSON.stringify({ appId: 'someOtherValue' }),
|
JSON.stringify({ appId: 'someOtherValue' }),
|
||||||
);
|
);
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
expect(eventTracker.client.track).to.be.calledWith('Test event 2', {
|
expect(eventTracker.client.track).to.be.calledWith('Test event 2', {
|
||||||
appId: 'someOtherValue',
|
appId: 'someOtherValue',
|
||||||
uuid: 'barbaz',
|
uuid: 'barbaz',
|
||||||
@ -150,7 +150,7 @@ describe('EventTracker', () => {
|
|||||||
it('can be passed an Error and it is added to the event properties', async () => {
|
it('can be passed an Error and it is added to the event properties', async () => {
|
||||||
const theError = new Error('something went wrong');
|
const theError = new Error('something went wrong');
|
||||||
await eventTracker.track('Error event', theError);
|
await eventTracker.track('Error event', theError);
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
expect(eventTracker.client.track).to.be.calledWith('Error event', {
|
expect(eventTracker.client.track).to.be.calledWith('Error event', {
|
||||||
error: {
|
error: {
|
||||||
message: theError.message,
|
message: theError.message,
|
||||||
@ -174,7 +174,7 @@ describe('EventTracker', () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
await eventTracker.track('Some app event', props);
|
await eventTracker.track('Some app event', props);
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
expect(eventTracker.client.track).to.be.calledWith('Some app event', {
|
expect(eventTracker.client.track).to.be.calledWith('Some app event', {
|
||||||
service: { appId: '1' },
|
service: { appId: '1' },
|
||||||
uuid: 'barbaz',
|
uuid: 'barbaz',
|
||||||
|
@ -83,7 +83,7 @@ describe('network', () => {
|
|||||||
} as any),
|
} as any),
|
||||||
);
|
);
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
after(() => os.networkInterfaces.restore());
|
after(() => os.networkInterfaces.restore());
|
||||||
|
|
||||||
it('returns only the relevant IP addresses', () =>
|
it('returns only the relevant IP addresses', () =>
|
||||||
|
@ -418,9 +418,7 @@ describe('ApiBinder', () => {
|
|||||||
|
|
||||||
// Copy previous values to restore later
|
// Copy previous values to restore later
|
||||||
const previousStateReportErrors = currentState.stateReportErrors;
|
const previousStateReportErrors = currentState.stateReportErrors;
|
||||||
const previousDeviceStateConnected =
|
const previousDeviceStateConnected = components.deviceState.connected;
|
||||||
// @ts-ignore
|
|
||||||
components.deviceState.connected;
|
|
||||||
|
|
||||||
// Set additional conditions not in configStub to cause a fail
|
// Set additional conditions not in configStub to cause a fail
|
||||||
try {
|
try {
|
||||||
|
@ -25,7 +25,7 @@ describe('Logger', function () {
|
|||||||
this.requestStub = sinon.stub(https, 'request').returns(this._req);
|
this.requestStub = sinon.stub(https, 'request').returns(this._req);
|
||||||
|
|
||||||
configStub = sinon.stub(config, 'getMany').returns(
|
configStub = sinon.stub(config, 'getMany').returns(
|
||||||
// @ts-ignore this should actually work but the type system doesnt like it
|
// @ts-expect-error this should actually work but the type system doesnt like it
|
||||||
Promise.resolve({
|
Promise.resolve({
|
||||||
apiEndpoint: 'https://example.com',
|
apiEndpoint: 'https://example.com',
|
||||||
uuid: 'deadbeef',
|
uuid: 'deadbeef',
|
||||||
@ -134,7 +134,7 @@ describe('Logger', function () {
|
|||||||
'\u0001\u0000\u0000\u0000\u0000\u0000\u0000?2018-09-21T12:37:09.819134000Z this is the message';
|
'\u0001\u0000\u0000\u0000\u0000\u0000\u0000?2018-09-21T12:37:09.819134000Z this is the message';
|
||||||
const buffer = Buffer.from(message);
|
const buffer = Buffer.from(message);
|
||||||
|
|
||||||
// @ts-ignore accessing a private function
|
// @ts-expect-error accessing a private function
|
||||||
expect(ContainerLogs.extractMessage(buffer)).to.deep.equal({
|
expect(ContainerLogs.extractMessage(buffer)).to.deep.equal({
|
||||||
message: 'this is the message',
|
message: 'this is the message',
|
||||||
timestamp: 1537533429819,
|
timestamp: 1537533429819,
|
||||||
|
@ -158,7 +158,6 @@ describe('device-config', () => {
|
|||||||
it('correctly parses a config.txt file', async () => {
|
it('correctly parses a config.txt file', async () => {
|
||||||
// Will try to parse /test/data/mnt/boot/config.txt
|
// Will try to parse /test/data/mnt/boot/config.txt
|
||||||
await expect(
|
await expect(
|
||||||
// @ts-ignore accessing private value
|
|
||||||
deviceConfig.getBootConfig(configTxtBackend),
|
deviceConfig.getBootConfig(configTxtBackend),
|
||||||
).to.eventually.deep.equal({
|
).to.eventually.deep.equal({
|
||||||
HOST_CONFIG_dtparam: '"i2c_arm=on","spi=on","audio=on"',
|
HOST_CONFIG_dtparam: '"i2c_arm=on","spi=on","audio=on"',
|
||||||
@ -181,7 +180,6 @@ describe('device-config', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
// @ts-ignore accessing private value
|
|
||||||
deviceConfig.getBootConfig(configTxtBackend),
|
deviceConfig.getBootConfig(configTxtBackend),
|
||||||
).to.eventually.deep.equal({
|
).to.eventually.deep.equal({
|
||||||
HOST_CONFIG_initramfs: 'initramf.gz 0x00800000',
|
HOST_CONFIG_initramfs: 'initramf.gz 0x00800000',
|
||||||
@ -207,7 +205,7 @@ describe('device-config', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
expect(() =>
|
expect(() =>
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
deviceConfig.bootConfigChangeRequired(
|
deviceConfig.bootConfigChangeRequired(
|
||||||
configTxtBackend,
|
configTxtBackend,
|
||||||
current,
|
current,
|
||||||
@ -243,7 +241,7 @@ describe('device-config', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
deviceConfig.bootConfigChangeRequired(
|
deviceConfig.bootConfigChangeRequired(
|
||||||
configTxtBackend,
|
configTxtBackend,
|
||||||
current,
|
current,
|
||||||
@ -311,7 +309,7 @@ describe('device-config', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
deviceConfig.bootConfigChangeRequired(
|
deviceConfig.bootConfigChangeRequired(
|
||||||
configTxtBackend,
|
configTxtBackend,
|
||||||
current,
|
current,
|
||||||
@ -386,11 +384,10 @@ describe('device-config', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
deviceConfig.bootConfigChangeRequired(extlinuxBackend, current, target),
|
deviceConfig.bootConfigChangeRequired(extlinuxBackend, current, target),
|
||||||
).to.equal(true);
|
).to.equal(true);
|
||||||
|
|
||||||
// @ts-ignore accessing private value
|
|
||||||
await deviceConfig.setBootConfig(extlinuxBackend, target);
|
await deviceConfig.setBootConfig(extlinuxBackend, target);
|
||||||
expect(logSpy).to.be.calledTwice;
|
expect(logSpy).to.be.calledTwice;
|
||||||
expect(logSpy.getCall(1).args[2]).to.equal('Apply boot config success');
|
expect(logSpy.getCall(1).args[2]).to.equal('Apply boot config success');
|
||||||
|
@ -30,12 +30,12 @@ describe('conversions', function () {
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
it('should return an empty object with an empty input', function () {
|
it('should return an empty object with an empty input', function () {
|
||||||
// @ts-ignore passing invalid value to test
|
// @ts-expect-error passing invalid value to test
|
||||||
expect(conversion.envArrayToObject(null)).to.deep.equal({});
|
expect(conversion.envArrayToObject(null)).to.deep.equal({});
|
||||||
// @ts-ignore passing invalid value to test
|
// @ts-expect-error passing invalid value to test
|
||||||
expect(conversion.envArrayToObject('')).to.deep.equal({});
|
expect(conversion.envArrayToObject('')).to.deep.equal({});
|
||||||
expect(conversion.envArrayToObject([])).to.deep.equal({});
|
expect(conversion.envArrayToObject([])).to.deep.equal({});
|
||||||
// @ts-ignore passing invalid value to test
|
// @ts-expect-error passing invalid value to test
|
||||||
expect(conversion.envArrayToObject(1)).to.deep.equal({});
|
expect(conversion.envArrayToObject(1)).to.deep.equal({});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -2,7 +2,7 @@ import { PortMap, PortRange } from '~/src/compose/ports';
|
|||||||
import { expect } from 'chai';
|
import { expect } from 'chai';
|
||||||
|
|
||||||
// Force cast `PortMap` as a public version so we can test it
|
// Force cast `PortMap` as a public version so we can test it
|
||||||
const PortMapPublic = (PortMap as any) as new (
|
const PortMapPublic = PortMap as any as new (
|
||||||
portStrOrObj: string | PortRange,
|
portStrOrObj: string | PortRange,
|
||||||
) => PortMap;
|
) => PortMap;
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ describe('SupervisorAPI', () => {
|
|||||||
after(async () => {
|
after(async () => {
|
||||||
try {
|
try {
|
||||||
await api.stop();
|
await api.stop();
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (e.message !== 'Server is not running.') {
|
if (e.message !== 'Server is not running.') {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
@ -170,7 +170,7 @@ describe('SupervisorAPI', () => {
|
|||||||
// Start each case with API stopped
|
// Start each case with API stopped
|
||||||
try {
|
try {
|
||||||
await api.stop();
|
await api.stop();
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (e.message !== 'Server is not running.') {
|
if (e.message !== 'Server is not running.') {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
@ -178,9 +178,9 @@ describe('SupervisorAPI', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
after(async () => {
|
after(async () => {
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
Log.info.restore();
|
Log.info.restore();
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
Log.error.restore();
|
Log.error.restore();
|
||||||
// Resume API for other test suites
|
// Resume API for other test suites
|
||||||
return api.listen(mockedOptions.listenPort, mockedOptions.timeout);
|
return api.listen(mockedOptions.listenPort, mockedOptions.timeout);
|
||||||
@ -190,7 +190,7 @@ describe('SupervisorAPI', () => {
|
|||||||
// Start API
|
// Start API
|
||||||
await api.listen(mockedOptions.listenPort, mockedOptions.timeout);
|
await api.listen(mockedOptions.listenPort, mockedOptions.timeout);
|
||||||
// Check if success start was logged
|
// Check if success start was logged
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
expect(Log.info.lastCall?.lastArg).to.equal(
|
expect(Log.info.lastCall?.lastArg).to.equal(
|
||||||
`Supervisor API successfully started on port ${mockedOptions.listenPort}`,
|
`Supervisor API successfully started on port ${mockedOptions.listenPort}`,
|
||||||
);
|
);
|
||||||
@ -202,7 +202,7 @@ describe('SupervisorAPI', () => {
|
|||||||
// Stop API
|
// Stop API
|
||||||
await api.stop();
|
await api.stop();
|
||||||
// Check if stopped with info was logged
|
// Check if stopped with info was logged
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
expect(Log.info.lastCall?.lastArg).to.equal('Stopped Supervisor API');
|
expect(Log.info.lastCall?.lastArg).to.equal('Stopped Supervisor API');
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -212,7 +212,7 @@ describe('SupervisorAPI', () => {
|
|||||||
// Stop API with error
|
// Stop API with error
|
||||||
await api.stop({ errored: true });
|
await api.stop({ errored: true });
|
||||||
// Check if stopped with error was logged
|
// Check if stopped with error was logged
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
expect(Log.error.lastCall?.lastArg).to.equal('Stopped Supervisor API');
|
expect(Log.error.lastCall?.lastArg).to.equal('Stopped Supervisor API');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -166,9 +166,8 @@ describe('LocalModeManager', () => {
|
|||||||
const stubEngineObjectMethods = (
|
const stubEngineObjectMethods = (
|
||||||
removeThrows: boolean,
|
removeThrows: boolean,
|
||||||
): Array<sinon.SinonStubbedInstance<EngineStubbedObject>> => {
|
): Array<sinon.SinonStubbedInstance<EngineStubbedObject>> => {
|
||||||
const resArray: Array<sinon.SinonStubbedInstance<
|
const resArray: Array<sinon.SinonStubbedInstance<EngineStubbedObject>> =
|
||||||
EngineStubbedObject
|
[];
|
||||||
>> = [];
|
|
||||||
|
|
||||||
const stub = <T>(
|
const stub = <T>(
|
||||||
c: sinon.StubbableType<EngineStubbedObject>,
|
c: sinon.StubbableType<EngineStubbedObject>,
|
||||||
@ -186,7 +185,7 @@ describe('LocalModeManager', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
resArray.push(res);
|
resArray.push(res);
|
||||||
return (res as unknown) as T;
|
return res as unknown as T;
|
||||||
};
|
};
|
||||||
dockerStub.getImage.returns(stub(Docker.Image, 'image'));
|
dockerStub.getImage.returns(stub(Docker.Image, 'image'));
|
||||||
dockerStub.getContainer.returns(stub(Docker.Container, 'container'));
|
dockerStub.getContainer.returns(stub(Docker.Container, 'container'));
|
||||||
@ -400,7 +399,7 @@ describe('LocalModeManager', () => {
|
|||||||
try {
|
try {
|
||||||
const result = await localMode.retrieveLatestSnapshot();
|
const result = await localMode.retrieveLatestSnapshot();
|
||||||
expect(result).to.not.exist;
|
expect(result).to.not.exist;
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
expect(e.message).to.match(/Cannot parse snapshot data.*"bad json"/);
|
expect(e.message).to.match(/Cannot parse snapshot data.*"bad json"/);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -416,7 +415,7 @@ describe('LocalModeManager', () => {
|
|||||||
try {
|
try {
|
||||||
const result = await localMode.retrieveLatestSnapshot();
|
const result = await localMode.retrieveLatestSnapshot();
|
||||||
expect(result).to.not.exist;
|
expect(result).to.not.exist;
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
expect(e.message).to.match(
|
expect(e.message).to.match(
|
||||||
/Cannot parse snapshot data.*"bad timestamp"/,
|
/Cannot parse snapshot data.*"bad timestamp"/,
|
||||||
);
|
);
|
||||||
|
@ -341,57 +341,51 @@ describe('Container contracts', () => {
|
|||||||
|
|
||||||
describe('Optional containers', () => {
|
describe('Optional containers', () => {
|
||||||
it('should correctly run passing optional containers', async () => {
|
it('should correctly run passing optional containers', async () => {
|
||||||
const {
|
const { valid, unmetServices, fulfilledServices } =
|
||||||
valid,
|
containerContractsFulfilled({
|
||||||
unmetServices,
|
service1: {
|
||||||
fulfilledServices,
|
contract: {
|
||||||
} = containerContractsFulfilled({
|
type: 'sw.container',
|
||||||
service1: {
|
slug: 'service1',
|
||||||
contract: {
|
requires: [
|
||||||
type: 'sw.container',
|
{
|
||||||
slug: 'service1',
|
type: 'sw.supervisor',
|
||||||
requires: [
|
version: `<${supervisorVersionGreater}`,
|
||||||
{
|
},
|
||||||
type: 'sw.supervisor',
|
],
|
||||||
version: `<${supervisorVersionGreater}`,
|
},
|
||||||
},
|
optional: true,
|
||||||
],
|
|
||||||
},
|
},
|
||||||
optional: true,
|
});
|
||||||
},
|
|
||||||
});
|
|
||||||
expect(valid).to.equal(true);
|
expect(valid).to.equal(true);
|
||||||
expect(unmetServices).to.deep.equal([]);
|
expect(unmetServices).to.deep.equal([]);
|
||||||
expect(fulfilledServices).to.deep.equal(['service1']);
|
expect(fulfilledServices).to.deep.equal(['service1']);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should corrrectly omit failing optional containers', async () => {
|
it('should corrrectly omit failing optional containers', async () => {
|
||||||
const {
|
const { valid, unmetServices, fulfilledServices } =
|
||||||
valid,
|
containerContractsFulfilled({
|
||||||
unmetServices,
|
service1: {
|
||||||
fulfilledServices,
|
contract: {
|
||||||
} = containerContractsFulfilled({
|
type: 'sw.container',
|
||||||
service1: {
|
slug: 'service1',
|
||||||
contract: {
|
requires: [
|
||||||
type: 'sw.container',
|
{
|
||||||
slug: 'service1',
|
type: 'sw.supervisor',
|
||||||
requires: [
|
version: `>${supervisorVersionGreater}`,
|
||||||
{
|
},
|
||||||
type: 'sw.supervisor',
|
],
|
||||||
version: `>${supervisorVersionGreater}`,
|
},
|
||||||
},
|
optional: true,
|
||||||
],
|
|
||||||
},
|
},
|
||||||
optional: true,
|
service2: {
|
||||||
},
|
contract: {
|
||||||
service2: {
|
type: 'sw.container',
|
||||||
contract: {
|
slug: 'service2',
|
||||||
type: 'sw.container',
|
},
|
||||||
slug: 'service2',
|
optional: false,
|
||||||
},
|
},
|
||||||
optional: false,
|
});
|
||||||
},
|
|
||||||
});
|
|
||||||
expect(valid).to.equal(true);
|
expect(valid).to.equal(true);
|
||||||
expect(unmetServices).to.deep.equal(['service1']);
|
expect(unmetServices).to.deep.equal(['service1']);
|
||||||
expect(fulfilledServices).to.deep.equal(['service2']);
|
expect(fulfilledServices).to.deep.equal(['service2']);
|
||||||
|
@ -8,8 +8,7 @@ describe('Deltas', () => {
|
|||||||
const imageStub = stub(dockerUtils.docker, 'getImage').returns({
|
const imageStub = stub(dockerUtils.docker, 'getImage').returns({
|
||||||
inspect: () => {
|
inspect: () => {
|
||||||
return Promise.resolve({
|
return Promise.resolve({
|
||||||
Id:
|
Id: 'sha256:34ec91fe6e08cb0f867bbc069c5f499d39297eb8e874bb8ce9707537d983bcbc',
|
||||||
'sha256:34ec91fe6e08cb0f867bbc069c5f499d39297eb8e874bb8ce9707537d983bcbc',
|
|
||||||
RepoTags: [],
|
RepoTags: [],
|
||||||
RepoDigests: [],
|
RepoDigests: [],
|
||||||
Parent: '',
|
Parent: '',
|
||||||
|
@ -23,7 +23,7 @@ describe('Extlinux Configuration', () => {
|
|||||||
APPEND \${cbootargs} \${resin_kernel_root} ro rootwait\
|
APPEND \${cbootargs} \${resin_kernel_root} ro rootwait\
|
||||||
`;
|
`;
|
||||||
|
|
||||||
// @ts-ignore accessing private method
|
// @ts-expect-error accessing private method
|
||||||
const parsed = Extlinux.parseExtlinuxFile(text);
|
const parsed = Extlinux.parseExtlinuxFile(text);
|
||||||
expect(parsed.globals).to.have.property('DEFAULT').that.equals('primary');
|
expect(parsed.globals).to.have.property('DEFAULT').that.equals('primary');
|
||||||
expect(parsed.globals).to.have.property('TIMEOUT').that.equals('30');
|
expect(parsed.globals).to.have.property('TIMEOUT').that.equals('30');
|
||||||
@ -60,7 +60,7 @@ describe('Extlinux Configuration', () => {
|
|||||||
APPEND test4\
|
APPEND test4\
|
||||||
`;
|
`;
|
||||||
|
|
||||||
// @ts-ignore accessing private method
|
// @ts-expect-error accessing private method
|
||||||
const parsed = Extlinux.parseExtlinuxFile(text);
|
const parsed = Extlinux.parseExtlinuxFile(text);
|
||||||
expect(parsed.labels).to.have.property('primary').that.deep.equals({
|
expect(parsed.labels).to.have.property('primary').that.deep.equals({
|
||||||
LINUX: 'test1',
|
LINUX: 'test1',
|
||||||
@ -147,7 +147,7 @@ describe('Extlinux Configuration', () => {
|
|||||||
// Expect correct rejection from the given bad config
|
// Expect correct rejection from the given bad config
|
||||||
try {
|
try {
|
||||||
await backend.getBootConfig();
|
await backend.getBootConfig();
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
expect(e.message).to.equal(badConfig.reason);
|
expect(e.message).to.equal(badConfig.reason);
|
||||||
}
|
}
|
||||||
// Restore stub
|
// Restore stub
|
||||||
@ -248,12 +248,11 @@ describe('Extlinux Configuration', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('normalizes variable value', () => {
|
it('normalizes variable value', () => {
|
||||||
[
|
[{ input: { key: 'key', value: 'value' }, output: 'value' }].forEach(
|
||||||
{ input: { key: 'key', value: 'value' }, output: 'value' },
|
({ input, output }) =>
|
||||||
].forEach(({ input, output }) =>
|
expect(backend.processConfigVarValue(input.key, input.value)).to.equal(
|
||||||
expect(backend.processConfigVarValue(input.key, input.value)).to.equal(
|
output,
|
||||||
output,
|
),
|
||||||
),
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ describe('db-format', () => {
|
|||||||
after(async () => {
|
after(async () => {
|
||||||
try {
|
try {
|
||||||
await testDb.destroy();
|
await testDb.destroy();
|
||||||
} catch (e) {
|
} catch {
|
||||||
/* noop */
|
/* noop */
|
||||||
}
|
}
|
||||||
sinon.restore();
|
sinon.restore();
|
||||||
|
@ -6,7 +6,7 @@ describe('FDT directive', () => {
|
|||||||
|
|
||||||
it('parses valid FDT value', () => {
|
it('parses valid FDT value', () => {
|
||||||
VALID_VALUES.forEach(({ input, output }) =>
|
VALID_VALUES.forEach(({ input, output }) =>
|
||||||
// @ts-ignore input with no FDT can still be parsed
|
// @ts-expect-error input with no FDT can still be parsed
|
||||||
expect(directive.parse(input)).to.deep.equal(output),
|
expect(directive.parse(input)).to.deep.equal(output),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
@ -24,7 +24,7 @@ describe('extra_uEnv Configuration', () => {
|
|||||||
custom_fdt_file=mycustom.dtb
|
custom_fdt_file=mycustom.dtb
|
||||||
extra_os_cmdline=isolcpus=3,4 splash console=tty0
|
extra_os_cmdline=isolcpus=3,4 splash console=tty0
|
||||||
`;
|
`;
|
||||||
// @ts-ignore accessing private method
|
// @ts-expect-error accessing private method
|
||||||
const parsed = ExtraUEnv.parseOptions(fileContents);
|
const parsed = ExtraUEnv.parseOptions(fileContents);
|
||||||
expect(parsed).to.deep.equal({
|
expect(parsed).to.deep.equal({
|
||||||
fdt: 'mycustom.dtb',
|
fdt: 'mycustom.dtb',
|
||||||
@ -100,10 +100,10 @@ describe('extra_uEnv Configuration', () => {
|
|||||||
readFileStub.resolves(badConfig.contents);
|
readFileStub.resolves(badConfig.contents);
|
||||||
// Expect warning log from the given bad config
|
// Expect warning log from the given bad config
|
||||||
await backend.getBootConfig();
|
await backend.getBootConfig();
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
expect(Log.warn.lastCall?.lastArg).to.equal(badConfig.reason);
|
expect(Log.warn.lastCall?.lastArg).to.equal(badConfig.reason);
|
||||||
}
|
}
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
Log.warn.restore();
|
Log.warn.restore();
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -144,10 +144,10 @@ describe('extra_uEnv Configuration', () => {
|
|||||||
stub(fsUtils, 'writeAndSyncFile').resolves();
|
stub(fsUtils, 'writeAndSyncFile').resolves();
|
||||||
const logWarningStub = spy(Log, 'warn');
|
const logWarningStub = spy(Log, 'warn');
|
||||||
|
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
const previousSupportedConfigs = ExtraUEnv.supportedConfigs;
|
const previousSupportedConfigs = ExtraUEnv.supportedConfigs;
|
||||||
// Stub isSupportedConfig so we can confirm collections work
|
// Stub isSupportedConfig so we can confirm collections work
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
ExtraUEnv.supportedConfigs = {
|
ExtraUEnv.supportedConfigs = {
|
||||||
fdt: { key: 'custom_fdt_file', collection: false },
|
fdt: { key: 'custom_fdt_file', collection: false },
|
||||||
isolcpus: { key: 'extra_os_cmdline', collection: true },
|
isolcpus: { key: 'extra_os_cmdline', collection: true },
|
||||||
@ -171,7 +171,7 @@ describe('extra_uEnv Configuration', () => {
|
|||||||
// Restore stubs
|
// Restore stubs
|
||||||
(fsUtils.writeAndSyncFile as SinonStub).restore();
|
(fsUtils.writeAndSyncFile as SinonStub).restore();
|
||||||
logWarningStub.restore();
|
logWarningStub.restore();
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
ExtraUEnv.supportedConfigs = previousSupportedConfigs;
|
ExtraUEnv.supportedConfigs = previousSupportedConfigs;
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -212,12 +212,11 @@ describe('extra_uEnv Configuration', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('normalizes variable value', () => {
|
it('normalizes variable value', () => {
|
||||||
[
|
[{ input: { key: 'key', value: 'value' }, output: 'value' }].forEach(
|
||||||
{ input: { key: 'key', value: 'value' }, output: 'value' },
|
({ input, output }) =>
|
||||||
].forEach(({ input, output }) =>
|
expect(backend.processConfigVarValue(input.key, input.value)).to.equal(
|
||||||
expect(backend.processConfigVarValue(input.key, input.value)).to.equal(
|
output,
|
||||||
output,
|
),
|
||||||
),
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -10,17 +10,17 @@ describe('ODMDATA Configuration', () => {
|
|||||||
const backend = new Odmdata();
|
const backend = new Odmdata();
|
||||||
let logWarningStub: SinonStub;
|
let logWarningStub: SinonStub;
|
||||||
let logErrorStub: SinonStub;
|
let logErrorStub: SinonStub;
|
||||||
// @ts-ignore accessing private vluae
|
// @ts-expect-error accessing private vluae
|
||||||
const previousConfigPath = Odmdata.bootConfigPath;
|
const previousConfigPath = Odmdata.bootConfigPath;
|
||||||
const testConfigPath = resolve(process.cwd(), 'test/data/boot0.img');
|
const testConfigPath = resolve(process.cwd(), 'test/data/boot0.img');
|
||||||
|
|
||||||
before(() => {
|
before(() => {
|
||||||
// @ts-ignore setting value of private variable
|
// @ts-expect-error setting value of private variable
|
||||||
Odmdata.bootConfigPath = testConfigPath;
|
Odmdata.bootConfigPath = testConfigPath;
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
after(() => {
|
||||||
// @ts-ignore setting value of private variable
|
// @ts-expect-error setting value of private variable
|
||||||
Odmdata.bootConfigPath = previousConfigPath;
|
Odmdata.bootConfigPath = previousConfigPath;
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -60,9 +60,9 @@ describe('ODMDATA Configuration', () => {
|
|||||||
// Stub openFileStub with specific error
|
// Stub openFileStub with specific error
|
||||||
openFileStub.rejects(log.error);
|
openFileStub.rejects(log.error);
|
||||||
try {
|
try {
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
await backend.getFileHandle(testConfigPath);
|
await backend.getFileHandle(testConfigPath);
|
||||||
} catch (e) {
|
} catch {
|
||||||
// noop
|
// noop
|
||||||
}
|
}
|
||||||
// Check that correct message was logged
|
// Check that correct message was logged
|
||||||
@ -80,7 +80,7 @@ describe('ODMDATA Configuration', () => {
|
|||||||
|
|
||||||
it('correctly parses configuration mode', async () => {
|
it('correctly parses configuration mode', async () => {
|
||||||
for (const config of CONFIG_MODES) {
|
for (const config of CONFIG_MODES) {
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
expect(backend.parseOptions(config.buffer)).to.deep.equal({
|
expect(backend.parseOptions(config.buffer)).to.deep.equal({
|
||||||
configuration: config.mode,
|
configuration: config.mode,
|
||||||
});
|
});
|
||||||
@ -90,7 +90,7 @@ describe('ODMDATA Configuration', () => {
|
|||||||
it('logs error for malformed configuration mode', async () => {
|
it('logs error for malformed configuration mode', async () => {
|
||||||
// Logs when configuration mode is unknown
|
// Logs when configuration mode is unknown
|
||||||
try {
|
try {
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
backend.parseOptions(Buffer.from([0x9, 0x9, 0x9]));
|
backend.parseOptions(Buffer.from([0x9, 0x9, 0x9]));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// noop
|
// noop
|
||||||
@ -102,9 +102,9 @@ describe('ODMDATA Configuration', () => {
|
|||||||
|
|
||||||
// Logs when bytes don't match
|
// Logs when bytes don't match
|
||||||
try {
|
try {
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
backend.parseOptions(Buffer.from([0x1, 0x0, 0x0]));
|
backend.parseOptions(Buffer.from([0x1, 0x0, 0x0]));
|
||||||
} catch (e) {
|
} catch {
|
||||||
// noop
|
// noop
|
||||||
}
|
}
|
||||||
// Check that correct message was logged
|
// Check that correct message was logged
|
||||||
@ -115,7 +115,7 @@ describe('ODMDATA Configuration', () => {
|
|||||||
|
|
||||||
it('unlock/lock bootConfigPath RO access', async () => {
|
it('unlock/lock bootConfigPath RO access', async () => {
|
||||||
const writeSpy = stub().resolves();
|
const writeSpy = stub().resolves();
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
const handleStub = stub(backend, 'getFileHandle').resolves({
|
const handleStub = stub(backend, 'getFileHandle').resolves({
|
||||||
write: writeSpy,
|
write: writeSpy,
|
||||||
close: async (): Promise<void> => {
|
close: async (): Promise<void> => {
|
||||||
@ -123,11 +123,11 @@ describe('ODMDATA Configuration', () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
await backend.setReadOnly(false); // Try to unlock
|
await backend.setReadOnly(false); // Try to unlock
|
||||||
expect(writeSpy).to.be.calledWith('0');
|
expect(writeSpy).to.be.calledWith('0');
|
||||||
|
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
await backend.setReadOnly(true); // Try to lock
|
await backend.setReadOnly(true); // Try to lock
|
||||||
expect(writeSpy).to.be.calledWith('1');
|
expect(writeSpy).to.be.calledWith('1');
|
||||||
|
|
||||||
@ -135,7 +135,7 @@ describe('ODMDATA Configuration', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('sets new config values', async () => {
|
it('sets new config values', async () => {
|
||||||
// @ts-ignore accessing private value
|
// @ts-expect-error accessing private value
|
||||||
const setROStub = stub(backend, 'setReadOnly');
|
const setROStub = stub(backend, 'setReadOnly');
|
||||||
setROStub.resolves();
|
setROStub.resolves();
|
||||||
// Get current config
|
// Get current config
|
||||||
@ -189,12 +189,11 @@ describe('ODMDATA Configuration', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('normalizes variable value', () => {
|
it('normalizes variable value', () => {
|
||||||
[
|
[{ input: { key: 'key', value: 'value' }, output: 'value' }].forEach(
|
||||||
{ input: { key: 'key', value: 'value' }, output: 'value' },
|
({ input, output }) =>
|
||||||
].forEach(({ input, output }) =>
|
expect(backend.processConfigVarValue(input.key, input.value)).to.equal(
|
||||||
expect(backend.processConfigVarValue(input.key, input.value)).to.equal(
|
output,
|
||||||
output,
|
),
|
||||||
),
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -118,7 +118,7 @@ describe('SupervisorAPI [V1 Endpoints]', () => {
|
|||||||
after(async () => {
|
after(async () => {
|
||||||
try {
|
try {
|
||||||
await api.stop();
|
await api.stop();
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (e.message !== 'Server is not running.') {
|
if (e.message !== 'Server is not running.') {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
@ -1065,11 +1065,9 @@ describe('SupervisorAPI [V1 Endpoints]', () => {
|
|||||||
|
|
||||||
it('skips restarting hostname services if they are part of config-json.target', async () => {
|
it('skips restarting hostname services if they are part of config-json.target', async () => {
|
||||||
// stub servicePartOf to return the config-json.target we are looking for
|
// stub servicePartOf to return the config-json.target we are looking for
|
||||||
stub(dbus, 'servicePartOf').callsFake(
|
stub(dbus, 'servicePartOf').callsFake(async (): Promise<string> => {
|
||||||
async (): Promise<string> => {
|
return 'config-json.target';
|
||||||
return 'config-json.target';
|
});
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
await unlinkAll(redsocksPath, noProxyPath);
|
await unlinkAll(redsocksPath, noProxyPath);
|
||||||
|
|
||||||
@ -1223,11 +1221,9 @@ describe('SupervisorAPI [V1 Endpoints]', () => {
|
|||||||
|
|
||||||
it('skips restarting proxy services when part of redsocks-conf.target', async () => {
|
it('skips restarting proxy services when part of redsocks-conf.target', async () => {
|
||||||
// stub servicePartOf to return the redsocks-conf.target we are looking for
|
// stub servicePartOf to return the redsocks-conf.target we are looking for
|
||||||
stub(dbus, 'servicePartOf').callsFake(
|
stub(dbus, 'servicePartOf').callsFake(async (): Promise<string> => {
|
||||||
async (): Promise<string> => {
|
return 'redsocks-conf.target';
|
||||||
return 'redsocks-conf.target';
|
});
|
||||||
},
|
|
||||||
);
|
|
||||||
// Test each proxy patch sequentially to prevent conflicts when writing to fs
|
// Test each proxy patch sequentially to prevent conflicts when writing to fs
|
||||||
for (const key of Object.keys(validProxyReqs)) {
|
for (const key of Object.keys(validProxyReqs)) {
|
||||||
const patchBodyValuesforKey: string[] | number[] =
|
const patchBodyValuesforKey: string[] | number[] =
|
||||||
|
@ -62,7 +62,7 @@ describe('SupervisorAPI [V2 Endpoints]', () => {
|
|||||||
after(async () => {
|
after(async () => {
|
||||||
try {
|
try {
|
||||||
await api.stop();
|
await api.stop();
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
if (e.message !== 'Server is not running.') {
|
if (e.message !== 'Server is not running.') {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
|
@ -396,10 +396,11 @@ describe('compose/app', () => {
|
|||||||
volumes: [volume],
|
volumes: [volume],
|
||||||
isTarget: true,
|
isTarget: true,
|
||||||
});
|
});
|
||||||
const recreateVolumeSteps = currentWithVolumesRemoved.nextStepsForAppUpdate(
|
const recreateVolumeSteps =
|
||||||
contextWithImages,
|
currentWithVolumesRemoved.nextStepsForAppUpdate(
|
||||||
target,
|
contextWithImages,
|
||||||
);
|
target,
|
||||||
|
);
|
||||||
|
|
||||||
expect(recreateVolumeSteps).to.have.length(1);
|
expect(recreateVolumeSteps).to.have.length(1);
|
||||||
expectSteps('createVolume', recreateVolumeSteps);
|
expectSteps('createVolume', recreateVolumeSteps);
|
||||||
@ -411,10 +412,11 @@ describe('compose/app', () => {
|
|||||||
volumes: [volume],
|
volumes: [volume],
|
||||||
});
|
});
|
||||||
|
|
||||||
const createServiceSteps = currentWithVolumeRecreated.nextStepsForAppUpdate(
|
const createServiceSteps =
|
||||||
contextWithImages,
|
currentWithVolumeRecreated.nextStepsForAppUpdate(
|
||||||
target,
|
contextWithImages,
|
||||||
);
|
target,
|
||||||
|
);
|
||||||
expectSteps('start', createServiceSteps);
|
expectSteps('start', createServiceSteps);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -195,7 +195,7 @@ describe('compose/application-manager', () => {
|
|||||||
after(async () => {
|
after(async () => {
|
||||||
try {
|
try {
|
||||||
await testDb.destroy();
|
await testDb.destroy();
|
||||||
} catch (e) {
|
} catch {
|
||||||
/* noop */
|
/* noop */
|
||||||
}
|
}
|
||||||
// Restore stubbed methods
|
// Restore stubbed methods
|
||||||
@ -216,15 +216,11 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [await createService({ running: false, appId: 1 })],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
});
|
||||||
} = createCurrentState({
|
|
||||||
services: [await createService({ running: false, appId: 1 })],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
});
|
|
||||||
|
|
||||||
const steps = await applicationManager.inferNextSteps(
|
const steps = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -248,15 +244,11 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [await createService()],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
});
|
||||||
} = createCurrentState({
|
|
||||||
services: [await createService()],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [killStep] = await applicationManager.inferNextSteps(
|
const [killStep] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -282,16 +274,12 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [await createService({ appId: 1 })],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
images: [],
|
||||||
} = createCurrentState({
|
});
|
||||||
services: [await createService({ appId: 1 })],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
images: [],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [fetchStep] = await applicationManager.inferNextSteps(
|
const [fetchStep] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -317,16 +305,12 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [await createService({ appId: 1 })],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
downloading: ['image-new'],
|
||||||
} = createCurrentState({
|
});
|
||||||
services: [await createService({ appId: 1 })],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
downloading: ['image-new'],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [noopStep, ...nextSteps] = await applicationManager.inferNextSteps(
|
const [noopStep, ...nextSteps] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -360,25 +344,21 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [
|
||||||
downloading,
|
await createService(
|
||||||
containerIdsByAppId,
|
{
|
||||||
} = createCurrentState({
|
image: 'image-old',
|
||||||
services: [
|
labels,
|
||||||
await createService(
|
appId: 1,
|
||||||
{
|
commit: 'old-release',
|
||||||
image: 'image-old',
|
},
|
||||||
labels,
|
{ options: { imageInfo: { Id: 'sha256:image-old-id' } } },
|
||||||
appId: 1,
|
),
|
||||||
commit: 'old-release',
|
],
|
||||||
},
|
networks: [DEFAULT_NETWORK],
|
||||||
{ options: { imageInfo: { Id: 'sha256:image-old-id' } } },
|
});
|
||||||
),
|
|
||||||
],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [killStep] = await applicationManager.inferNextSteps(
|
const [killStep] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -414,25 +394,21 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [
|
||||||
downloading,
|
await createService(
|
||||||
containerIdsByAppId,
|
{
|
||||||
} = createCurrentState({
|
image: 'image-old',
|
||||||
services: [
|
labels,
|
||||||
await createService(
|
appId: 1,
|
||||||
{
|
commit: 'old-release',
|
||||||
image: 'image-old',
|
},
|
||||||
labels,
|
{ options: { imageInfo: { Id: 'sha256:image-old-id' } } },
|
||||||
appId: 1,
|
),
|
||||||
commit: 'old-release',
|
],
|
||||||
},
|
networks: [DEFAULT_NETWORK],
|
||||||
{ options: { imageInfo: { Id: 'sha256:image-old-id' } } },
|
});
|
||||||
),
|
|
||||||
],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [killStep] = await applicationManager.inferNextSteps(
|
const [killStep] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -468,23 +444,19 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [],
|
||||||
downloading,
|
images: [
|
||||||
containerIdsByAppId,
|
createImage({
|
||||||
} = createCurrentState({
|
appId: 1,
|
||||||
services: [],
|
name: 'image-old',
|
||||||
images: [
|
serviceName: 'main',
|
||||||
createImage({
|
dockerImageId: 'image-old-id',
|
||||||
appId: 1,
|
}),
|
||||||
name: 'image-old',
|
],
|
||||||
serviceName: 'main',
|
networks: [DEFAULT_NETWORK],
|
||||||
dockerImageId: 'image-old-id',
|
});
|
||||||
}),
|
|
||||||
],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [removeImage] = await applicationManager.inferNextSteps(
|
const [removeImage] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -527,38 +499,34 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [
|
||||||
downloading,
|
await createService({
|
||||||
containerIdsByAppId,
|
appId: 1,
|
||||||
} = createCurrentState({
|
commit: 'old-release',
|
||||||
services: [
|
serviceName: 'main',
|
||||||
await createService({
|
composition: {
|
||||||
appId: 1,
|
depends_on: ['dep'],
|
||||||
commit: 'old-release',
|
},
|
||||||
serviceName: 'main',
|
}),
|
||||||
composition: {
|
await createService({
|
||||||
depends_on: ['dep'],
|
appId: 1,
|
||||||
},
|
commit: 'old-release',
|
||||||
}),
|
serviceName: 'dep',
|
||||||
await createService({
|
}),
|
||||||
appId: 1,
|
],
|
||||||
commit: 'old-release',
|
networks: [DEFAULT_NETWORK],
|
||||||
serviceName: 'dep',
|
downloading: ['dep-image'], // dep-image is still being downloaded
|
||||||
}),
|
images: [
|
||||||
],
|
// main-image was already downloaded
|
||||||
networks: [DEFAULT_NETWORK],
|
createImage({
|
||||||
downloading: ['dep-image'], // dep-image is still being downloaded
|
appId: 1,
|
||||||
images: [
|
name: 'main-image',
|
||||||
// main-image was already downloaded
|
serviceName: 'main',
|
||||||
createImage({
|
}),
|
||||||
appId: 1,
|
],
|
||||||
name: 'main-image',
|
});
|
||||||
serviceName: 'main',
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const steps = await applicationManager.inferNextSteps(
|
const steps = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -601,48 +569,44 @@ describe('compose/application-manager', () => {
|
|||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [
|
||||||
downloading,
|
await createService({
|
||||||
containerIdsByAppId,
|
appId: 1,
|
||||||
} = createCurrentState({
|
appUuid: 'appuuid',
|
||||||
services: [
|
commit: 'old-release',
|
||||||
await createService({
|
serviceName: 'main',
|
||||||
appId: 1,
|
composition: {
|
||||||
appUuid: 'appuuid',
|
depends_on: ['dep'],
|
||||||
commit: 'old-release',
|
},
|
||||||
serviceName: 'main',
|
}),
|
||||||
composition: {
|
await createService({
|
||||||
depends_on: ['dep'],
|
appId: 1,
|
||||||
},
|
appUuid: 'appuuid',
|
||||||
}),
|
commit: 'old-release',
|
||||||
await createService({
|
serviceName: 'dep',
|
||||||
appId: 1,
|
}),
|
||||||
appUuid: 'appuuid',
|
],
|
||||||
commit: 'old-release',
|
networks: [DEFAULT_NETWORK],
|
||||||
serviceName: 'dep',
|
images: [
|
||||||
}),
|
// Both images have been downloaded
|
||||||
],
|
createImage({
|
||||||
networks: [DEFAULT_NETWORK],
|
appId: 1,
|
||||||
images: [
|
appUuid: 'appuuid',
|
||||||
// Both images have been downloaded
|
name: 'main-image',
|
||||||
createImage({
|
serviceName: 'main',
|
||||||
appId: 1,
|
commit: 'new-release',
|
||||||
appUuid: 'appuuid',
|
}),
|
||||||
name: 'main-image',
|
createImage({
|
||||||
serviceName: 'main',
|
appId: 1,
|
||||||
commit: 'new-release',
|
appUuid: 'appuuid',
|
||||||
}),
|
name: 'dep-image',
|
||||||
createImage({
|
serviceName: 'dep',
|
||||||
appId: 1,
|
commit: 'new-release',
|
||||||
appUuid: 'appuuid',
|
}),
|
||||||
name: 'dep-image',
|
],
|
||||||
serviceName: 'dep',
|
});
|
||||||
commit: 'new-release',
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const steps = await applicationManager.inferNextSteps(
|
const steps = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -690,28 +654,24 @@ describe('compose/application-manager', () => {
|
|||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
images: [
|
||||||
} = createCurrentState({
|
// Both images have been downloaded
|
||||||
services: [],
|
createImage({
|
||||||
networks: [DEFAULT_NETWORK],
|
name: 'main-image',
|
||||||
images: [
|
serviceName: 'main',
|
||||||
// Both images have been downloaded
|
commit: 'new-release',
|
||||||
createImage({
|
}),
|
||||||
name: 'main-image',
|
createImage({
|
||||||
serviceName: 'main',
|
name: 'dep-image',
|
||||||
commit: 'new-release',
|
serviceName: 'dep',
|
||||||
}),
|
commit: 'new-release',
|
||||||
createImage({
|
}),
|
||||||
name: 'dep-image',
|
],
|
||||||
serviceName: 'dep',
|
});
|
||||||
commit: 'new-release',
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [startStep, ...nextSteps] = await applicationManager.inferNextSteps(
|
const [startStep, ...nextSteps] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -756,34 +716,30 @@ describe('compose/application-manager', () => {
|
|||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [
|
||||||
downloading,
|
await createService({
|
||||||
containerIdsByAppId,
|
image: 'dep-image',
|
||||||
} = createCurrentState({
|
serviceName: 'dep',
|
||||||
services: [
|
commit: 'new-release',
|
||||||
await createService({
|
}),
|
||||||
image: 'dep-image',
|
],
|
||||||
serviceName: 'dep',
|
networks: [DEFAULT_NETWORK],
|
||||||
commit: 'new-release',
|
images: [
|
||||||
}),
|
// Both images have been downloaded
|
||||||
],
|
createImage({
|
||||||
networks: [DEFAULT_NETWORK],
|
name: 'main-image',
|
||||||
images: [
|
serviceName: 'main',
|
||||||
// Both images have been downloaded
|
commit: 'new-release',
|
||||||
createImage({
|
}),
|
||||||
name: 'main-image',
|
createImage({
|
||||||
serviceName: 'main',
|
name: 'dep-image',
|
||||||
commit: 'new-release',
|
serviceName: 'dep',
|
||||||
}),
|
commit: 'new-release',
|
||||||
createImage({
|
}),
|
||||||
name: 'dep-image',
|
],
|
||||||
serviceName: 'dep',
|
});
|
||||||
commit: 'new-release',
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [startStep, ...nextSteps] = await applicationManager.inferNextSteps(
|
const [startStep, ...nextSteps] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -812,22 +768,20 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [
|
||||||
downloading,
|
await createService({ appId: 5, serviceName: 'old-service' }),
|
||||||
containerIdsByAppId,
|
],
|
||||||
} = createCurrentState({
|
networks: [DEFAULT_NETWORK],
|
||||||
services: [await createService({ appId: 5, serviceName: 'old-service' })],
|
images: [
|
||||||
networks: [DEFAULT_NETWORK],
|
// Image has been downloaded
|
||||||
images: [
|
createImage({
|
||||||
// Image has been downloaded
|
name: 'main-image',
|
||||||
createImage({
|
serviceName: 'main',
|
||||||
name: 'main-image',
|
}),
|
||||||
serviceName: 'main',
|
],
|
||||||
}),
|
});
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const steps = await applicationManager.inferNextSteps(
|
const steps = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -857,16 +811,12 @@ describe('compose/application-manager', () => {
|
|||||||
|
|
||||||
it('should not remove an app volumes when they are no longer referenced', async () => {
|
it('should not remove an app volumes when they are no longer referenced', async () => {
|
||||||
const targetApps = createApps({ networks: [DEFAULT_NETWORK] }, true);
|
const targetApps = createApps({ networks: [DEFAULT_NETWORK] }, true);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
volumes: [Volume.fromComposeObject('test-volume', 1, 'deadbeef')],
|
||||||
} = createCurrentState({
|
});
|
||||||
services: [],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
volumes: [Volume.fromComposeObject('test-volume', 1, 'deadbeef')],
|
|
||||||
});
|
|
||||||
|
|
||||||
const steps = await applicationManager.inferNextSteps(
|
const steps = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -883,17 +833,13 @@ describe('compose/application-manager', () => {
|
|||||||
|
|
||||||
it('should remove volumes from previous applications', async () => {
|
it('should remove volumes from previous applications', async () => {
|
||||||
const targetApps = createApps({ networks: [DEFAULT_NETWORK] }, true);
|
const targetApps = createApps({ networks: [DEFAULT_NETWORK] }, true);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [],
|
||||||
downloading,
|
networks: [],
|
||||||
containerIdsByAppId,
|
// Volume with different id
|
||||||
} = createCurrentState({
|
volumes: [Volume.fromComposeObject('test-volume', 2, 'deadbeef')],
|
||||||
services: [],
|
});
|
||||||
networks: [],
|
|
||||||
// Volume with different id
|
|
||||||
volumes: [Volume.fromComposeObject('test-volume', 2, 'deadbeef')],
|
|
||||||
});
|
|
||||||
|
|
||||||
const steps = await applicationManager.inferNextSteps(
|
const steps = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -916,24 +862,18 @@ describe('compose/application-manager', () => {
|
|||||||
{ services: [await createService()], networks: [DEFAULT_NETWORK] },
|
{ services: [await createService()], networks: [DEFAULT_NETWORK] },
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
});
|
||||||
} = createCurrentState({
|
|
||||||
services: [],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [
|
const [ensureNetworkStep, ...nextSteps] =
|
||||||
ensureNetworkStep,
|
await applicationManager.inferNextSteps(currentApps, targetApps, {
|
||||||
...nextSteps
|
downloading,
|
||||||
] = await applicationManager.inferNextSteps(currentApps, targetApps, {
|
availableImages,
|
||||||
downloading,
|
containerIdsByAppId,
|
||||||
availableImages,
|
});
|
||||||
containerIdsByAppId,
|
|
||||||
});
|
|
||||||
expect(ensureNetworkStep).to.deep.include({
|
expect(ensureNetworkStep).to.deep.include({
|
||||||
action: 'ensureSupervisorNetwork',
|
action: 'ensureSupervisorNetwork',
|
||||||
});
|
});
|
||||||
@ -955,17 +895,13 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [
|
||||||
downloading,
|
await createService({ labels }, { options: { listenPort: '48484' } }),
|
||||||
containerIdsByAppId,
|
],
|
||||||
} = createCurrentState({
|
networks: [DEFAULT_NETWORK],
|
||||||
services: [
|
});
|
||||||
await createService({ labels }, { options: { listenPort: '48484' } }),
|
|
||||||
],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [killStep] = await applicationManager.inferNextSteps(
|
const [killStep] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -995,15 +931,11 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [await createService()],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
});
|
||||||
} = createCurrentState({
|
|
||||||
services: [await createService()],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [cleanupStep, ...nextSteps] = await applicationManager.inferNextSteps(
|
const [cleanupStep, ...nextSteps] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -1036,30 +968,26 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
images: [
|
||||||
} = createCurrentState({
|
// An image for a service that no longer exists
|
||||||
services: [],
|
createImage({
|
||||||
networks: [DEFAULT_NETWORK],
|
name: 'old-image',
|
||||||
images: [
|
appId: 5,
|
||||||
// An image for a service that no longer exists
|
serviceName: 'old-service',
|
||||||
createImage({
|
dockerImageId: 'sha256:aaaa',
|
||||||
name: 'old-image',
|
}),
|
||||||
appId: 5,
|
createImage({
|
||||||
serviceName: 'old-service',
|
name: 'main-image',
|
||||||
dockerImageId: 'sha256:aaaa',
|
appId: 1,
|
||||||
}),
|
serviceName: 'main',
|
||||||
createImage({
|
dockerImageId: 'sha256:bbbb',
|
||||||
name: 'main-image',
|
}),
|
||||||
appId: 1,
|
],
|
||||||
serviceName: 'main',
|
});
|
||||||
dockerImageId: 'sha256:bbbb',
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [removeImageStep] = await applicationManager.inferNextSteps(
|
const [removeImageStep] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -1088,36 +1016,32 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [
|
||||||
downloading,
|
await createService(
|
||||||
containerIdsByAppId,
|
{ image: 'main-image' },
|
||||||
} = createCurrentState({
|
// Target has a matching image already
|
||||||
services: [
|
{ options: { imageInfo: { Id: 'sha256:bbbb' } } },
|
||||||
await createService(
|
),
|
||||||
{ image: 'main-image' },
|
],
|
||||||
// Target has a matching image already
|
networks: [DEFAULT_NETWORK],
|
||||||
{ options: { imageInfo: { Id: 'sha256:bbbb' } } },
|
images: [
|
||||||
),
|
// An image for a service that no longer exists
|
||||||
],
|
createImage({
|
||||||
networks: [DEFAULT_NETWORK],
|
name: 'old-image',
|
||||||
images: [
|
appId: 5,
|
||||||
// An image for a service that no longer exists
|
serviceName: 'old-service',
|
||||||
createImage({
|
dockerImageId: 'sha256:aaaa',
|
||||||
name: 'old-image',
|
}),
|
||||||
appId: 5,
|
createImage({
|
||||||
serviceName: 'old-service',
|
name: 'main-image',
|
||||||
dockerImageId: 'sha256:aaaa',
|
appId: 1,
|
||||||
}),
|
serviceName: 'main',
|
||||||
createImage({
|
dockerImageId: 'sha256:bbbb',
|
||||||
name: 'main-image',
|
}),
|
||||||
appId: 1,
|
],
|
||||||
serviceName: 'main',
|
});
|
||||||
dockerImageId: 'sha256:bbbb',
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const [removeImageStep] = await applicationManager.inferNextSteps(
|
const [removeImageStep] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -1152,16 +1076,12 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [],
|
||||||
downloading,
|
networks: [DEFAULT_NETWORK],
|
||||||
containerIdsByAppId,
|
images: [], // no available images exist
|
||||||
} = createCurrentState({
|
});
|
||||||
services: [],
|
|
||||||
networks: [DEFAULT_NETWORK],
|
|
||||||
images: [], // no available images exist
|
|
||||||
});
|
|
||||||
|
|
||||||
const [saveImageStep] = await applicationManager.inferNextSteps(
|
const [saveImageStep] = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
@ -1207,35 +1127,31 @@ describe('compose/application-manager', () => {
|
|||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
const {
|
const { currentApps, availableImages, downloading, containerIdsByAppId } =
|
||||||
currentApps,
|
createCurrentState({
|
||||||
availableImages,
|
services: [],
|
||||||
downloading,
|
networks: [
|
||||||
containerIdsByAppId,
|
// Default networks for two apps
|
||||||
} = createCurrentState({
|
Network.fromComposeObject('default', 1, 'app-one', {}),
|
||||||
services: [],
|
Network.fromComposeObject('default', 2, 'app-two', {}),
|
||||||
networks: [
|
],
|
||||||
// Default networks for two apps
|
images: [
|
||||||
Network.fromComposeObject('default', 1, 'app-one', {}),
|
createImage({
|
||||||
Network.fromComposeObject('default', 2, 'app-two', {}),
|
name: 'main-image-1',
|
||||||
],
|
appId: 1,
|
||||||
images: [
|
appUuid: 'app-one',
|
||||||
createImage({
|
serviceName: 'main',
|
||||||
name: 'main-image-1',
|
commit: 'commit-for-app-1',
|
||||||
appId: 1,
|
}),
|
||||||
appUuid: 'app-one',
|
createImage({
|
||||||
serviceName: 'main',
|
name: 'main-image-2',
|
||||||
commit: 'commit-for-app-1',
|
appId: 2,
|
||||||
}),
|
appUuid: 'app-two',
|
||||||
createImage({
|
serviceName: 'main',
|
||||||
name: 'main-image-2',
|
commit: 'commit-for-app-2',
|
||||||
appId: 2,
|
}),
|
||||||
appUuid: 'app-two',
|
],
|
||||||
serviceName: 'main',
|
});
|
||||||
commit: 'commit-for-app-2',
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const steps = await applicationManager.inferNextSteps(
|
const steps = await applicationManager.inferNextSteps(
|
||||||
currentApps,
|
currentApps,
|
||||||
|
@ -57,8 +57,7 @@ describe('compose/images', () => {
|
|||||||
|
|
||||||
it('finds image by matching digest on the database', async () => {
|
it('finds image by matching digest on the database', async () => {
|
||||||
const dbImage = createDBImage({
|
const dbImage = createDBImage({
|
||||||
name:
|
name: 'registry2.balena-cloud.com/v2/aaaaa@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
||||||
'registry2.balena-cloud.com/v2/aaaaa@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
|
||||||
dockerImageId:
|
dockerImageId:
|
||||||
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
||||||
});
|
});
|
||||||
@ -67,8 +66,7 @@ describe('compose/images', () => {
|
|||||||
const images = [
|
const images = [
|
||||||
createImage(
|
createImage(
|
||||||
{
|
{
|
||||||
Id:
|
Id: 'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
||||||
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
References: [
|
References: [
|
||||||
@ -108,8 +106,7 @@ describe('compose/images', () => {
|
|||||||
const images = [
|
const images = [
|
||||||
createImage(
|
createImage(
|
||||||
{
|
{
|
||||||
Id:
|
Id: 'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
||||||
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
References: ['some-image:some-tag'],
|
References: ['some-image:some-tag'],
|
||||||
@ -149,8 +146,7 @@ describe('compose/images', () => {
|
|||||||
const images = [
|
const images = [
|
||||||
createImage(
|
createImage(
|
||||||
{
|
{
|
||||||
Id:
|
Id: 'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
||||||
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
References: [
|
References: [
|
||||||
@ -186,8 +182,7 @@ describe('compose/images', () => {
|
|||||||
const images = [
|
const images = [
|
||||||
createImage(
|
createImage(
|
||||||
{
|
{
|
||||||
Id:
|
Id: 'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
||||||
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
References: [
|
References: [
|
||||||
@ -262,8 +257,7 @@ describe('compose/images', () => {
|
|||||||
dockerImageId: 'sha256:second-image-id',
|
dockerImageId: 'sha256:second-image-id',
|
||||||
}),
|
}),
|
||||||
createDBImage({
|
createDBImage({
|
||||||
name:
|
name: 'registry2.balena-cloud.com/v2/three@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf558',
|
||||||
'registry2.balena-cloud.com/v2/three@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf558',
|
|
||||||
serviceName: 'app_3',
|
serviceName: 'app_3',
|
||||||
// Third image has different name but same docker id
|
// Third image has different name but same docker id
|
||||||
dockerImageId: 'sha256:second-image-id',
|
dockerImageId: 'sha256:second-image-id',
|
||||||
@ -381,8 +375,7 @@ describe('compose/images', () => {
|
|||||||
it('removes image from DB and engine when there is a single DB image with matching name', async () => {
|
it('removes image from DB and engine when there is a single DB image with matching name', async () => {
|
||||||
// Newer image
|
// Newer image
|
||||||
const imageToRemove = createDBImage({
|
const imageToRemove = createDBImage({
|
||||||
name:
|
name: 'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
||||||
'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
|
||||||
dockerImageId: 'sha256:image-id-one',
|
dockerImageId: 'sha256:image-id-one',
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -390,8 +383,7 @@ describe('compose/images', () => {
|
|||||||
await testDb.models('image').insert([
|
await testDb.models('image').insert([
|
||||||
imageToRemove,
|
imageToRemove,
|
||||||
createDBImage({
|
createDBImage({
|
||||||
name:
|
name: 'registry2.balena-cloud.com/v2/two@sha256:12345a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
||||||
'registry2.balena-cloud.com/v2/two@sha256:12345a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
|
||||||
dockerImageId: 'sha256:image-id-two',
|
dockerImageId: 'sha256:image-id-two',
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
@ -469,14 +461,12 @@ describe('compose/images', () => {
|
|||||||
|
|
||||||
it('removes the requested image even when there are multiple DB images with same docker ID', async () => {
|
it('removes the requested image even when there are multiple DB images with same docker ID', async () => {
|
||||||
const imageToRemove = createDBImage({
|
const imageToRemove = createDBImage({
|
||||||
name:
|
name: 'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
||||||
'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
|
||||||
dockerImageId: 'sha256:image-id-one',
|
dockerImageId: 'sha256:image-id-one',
|
||||||
});
|
});
|
||||||
|
|
||||||
const imageWithSameDockerImageId = createDBImage({
|
const imageWithSameDockerImageId = createDBImage({
|
||||||
name:
|
name: 'registry2.balena-cloud.com/v2/two@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
||||||
'registry2.balena-cloud.com/v2/two@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
|
||||||
// Same imageId
|
// Same imageId
|
||||||
dockerImageId: 'sha256:image-id-one',
|
dockerImageId: 'sha256:image-id-one',
|
||||||
});
|
});
|
||||||
@ -554,14 +544,12 @@ describe('compose/images', () => {
|
|||||||
|
|
||||||
it('removes image from DB by tag when deltas are being used', async () => {
|
it('removes image from DB by tag when deltas are being used', async () => {
|
||||||
const imageToRemove = createDBImage({
|
const imageToRemove = createDBImage({
|
||||||
name:
|
name: 'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
||||||
'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
|
||||||
dockerImageId: 'sha256:image-one-id',
|
dockerImageId: 'sha256:image-one-id',
|
||||||
});
|
});
|
||||||
|
|
||||||
const imageWithSameDockerImageId = createDBImage({
|
const imageWithSameDockerImageId = createDBImage({
|
||||||
name:
|
name: 'registry2.balena-cloud.com/v2/two@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
||||||
'registry2.balena-cloud.com/v2/two@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
|
|
||||||
// Same docker id
|
// Same docker id
|
||||||
dockerImageId: 'sha256:image-one-id',
|
dockerImageId: 'sha256:image-one-id',
|
||||||
});
|
});
|
||||||
|
@ -40,12 +40,12 @@ describe('lib/update-lock', () => {
|
|||||||
|
|
||||||
// TODO: Remove these hooks when we don't need './test/data' as test process's rootMountPoint
|
// TODO: Remove these hooks when we don't need './test/data' as test process's rootMountPoint
|
||||||
before(() => {
|
before(() => {
|
||||||
// @ts-ignore // Set rootMountPoint for mockFs
|
// @ts-expect-error // Set rootMountPoint for mockFs
|
||||||
constants.rootMountPoint = '/mnt/root';
|
constants.rootMountPoint = '/mnt/root';
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
after(() => {
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
constants.rootMountPoint = process.env.ROOT_MOUNTPOINT;
|
constants.rootMountPoint = process.env.ROOT_MOUNTPOINT;
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -125,7 +125,7 @@ describe('lib/update-lock', () => {
|
|||||||
lockSpy = spy(lockfile, 'lock');
|
lockSpy = spy(lockfile, 'lock');
|
||||||
// lockfile.lock calls exec to interface with the lockfile binary,
|
// lockfile.lock calls exec to interface with the lockfile binary,
|
||||||
// so mock it here as we don't have access to the binary in the test env
|
// so mock it here as we don't have access to the binary in the test env
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
execStub = stub(fsUtils, 'exec').callsFake(async (command, opts) => {
|
execStub = stub(fsUtils, 'exec').callsFake(async (command, opts) => {
|
||||||
// Sanity check for the command call
|
// Sanity check for the command call
|
||||||
expect(command.trim().startsWith('lockfile')).to.be.true;
|
expect(command.trim().startsWith('lockfile')).to.be.true;
|
||||||
|
@ -9,7 +9,7 @@ export async function createDB() {
|
|||||||
// for testing we use an in memory database
|
// for testing we use an in memory database
|
||||||
process.env.DATABASE_PATH = ':memory:';
|
process.env.DATABASE_PATH = ':memory:';
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
constants.databasePath = process.env.DATABASE_PATH;
|
constants.databasePath = process.env.DATABASE_PATH;
|
||||||
|
|
||||||
// Cleanup the module cache in order to have it reloaded in the local context
|
// Cleanup the module cache in order to have it reloaded in the local context
|
||||||
@ -71,7 +71,7 @@ export async function createDB() {
|
|||||||
(db.upsertModel as sinon.SinonStub).restore();
|
(db.upsertModel as sinon.SinonStub).restore();
|
||||||
|
|
||||||
// Restore the constants
|
// Restore the constants
|
||||||
// @ts-ignore
|
// @ts-expect-error
|
||||||
constants.databasePath = process.env.DATABASE_PATH;
|
constants.databasePath = process.env.DATABASE_PATH;
|
||||||
|
|
||||||
// Cleanup the module cache in order to have it reloaded
|
// Cleanup the module cache in order to have it reloaded
|
||||||
|
@ -37,7 +37,7 @@ export function setImages(images: Image[]) {
|
|||||||
|
|
||||||
function stubImages() {
|
function stubImages() {
|
||||||
// Set the functions for this model (add them as you need for your test cases)
|
// Set the functions for this model (add them as you need for your test cases)
|
||||||
MOCKED_MODELS['image'] = ({
|
MOCKED_MODELS['image'] = {
|
||||||
select: () => {
|
select: () => {
|
||||||
return {
|
return {
|
||||||
where: async (condition: Partial<Image>) =>
|
where: async (condition: Partial<Image>) =>
|
||||||
@ -64,5 +64,5 @@ function stubImages() {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
} as unknown) as QueryBuilder;
|
} as unknown as QueryBuilder;
|
||||||
}
|
}
|
||||||
|
@ -94,8 +94,7 @@ const mockService = (overrides?: Partial<Service>) => {
|
|||||||
const mockImage = (overrides?: Partial<Image>) => {
|
const mockImage = (overrides?: Partial<Image>) => {
|
||||||
return {
|
return {
|
||||||
...{
|
...{
|
||||||
name:
|
name: 'registry2.balena-cloud.com/v2/e2bf6410ffc30850e96f5071cdd1dca8@sha256:e2e87a8139b8fc14510095b210ad652d7d5badcc64fdc686cbf749d399fba15e',
|
||||||
'registry2.balena-cloud.com/v2/e2bf6410ffc30850e96f5071cdd1dca8@sha256:e2e87a8139b8fc14510095b210ad652d7d5badcc64fdc686cbf749d399fba15e',
|
|
||||||
appId: 1658654,
|
appId: 1658654,
|
||||||
serviceName: 'main',
|
serviceName: 'main',
|
||||||
imageId: 2885946,
|
imageId: 2885946,
|
||||||
|
@ -75,7 +75,7 @@ registerOverride(
|
|||||||
export function registerOverride<
|
export function registerOverride<
|
||||||
T extends DockerodeFunction,
|
T extends DockerodeFunction,
|
||||||
P extends Parameters<dockerode[T]>,
|
P extends Parameters<dockerode[T]>,
|
||||||
R extends ReturnType<dockerode[T]>
|
R extends ReturnType<dockerode[T]>,
|
||||||
>(name: T, fn: (...args: P) => R) {
|
>(name: T, fn: (...args: P) => R) {
|
||||||
console.log(`Overriding ${name}...`);
|
console.log(`Overriding ${name}...`);
|
||||||
overrides[name] = fn;
|
overrides[name] = fn;
|
||||||
@ -207,7 +207,7 @@ function createMockedDockerode(data: TestData) {
|
|||||||
return mockedDockerode;
|
return mockedDockerode;
|
||||||
}
|
}
|
||||||
|
|
||||||
type Prototype = Dictionary<(...args: any[]) => any>;
|
type Prototype = { [key: string]: any };
|
||||||
function clonePrototype(prototype: Prototype): Prototype {
|
function clonePrototype(prototype: Prototype): Prototype {
|
||||||
const clone: Prototype = {};
|
const clone: Prototype = {};
|
||||||
Object.getOwnPropertyNames(prototype).forEach((fn) => {
|
Object.getOwnPropertyNames(prototype).forEach((fn) => {
|
||||||
|
@ -11,29 +11,25 @@ type DeepPartial<T> = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Partial container inspect info for receiving as testing data
|
// Partial container inspect info for receiving as testing data
|
||||||
export type PartialContainerInspectInfo = DeepPartial<
|
export type PartialContainerInspectInfo =
|
||||||
dockerode.ContainerInspectInfo
|
DeepPartial<dockerode.ContainerInspectInfo> & {
|
||||||
> & {
|
Id: string;
|
||||||
Id: string;
|
};
|
||||||
};
|
|
||||||
|
|
||||||
export type PartialNetworkInspectInfo = DeepPartial<
|
export type PartialNetworkInspectInfo =
|
||||||
dockerode.NetworkInspectInfo
|
DeepPartial<dockerode.NetworkInspectInfo> & {
|
||||||
> & {
|
Id: string;
|
||||||
Id: string;
|
};
|
||||||
};
|
|
||||||
|
|
||||||
export type PartialVolumeInspectInfo = DeepPartial<
|
export type PartialVolumeInspectInfo =
|
||||||
dockerode.VolumeInspectInfo
|
DeepPartial<dockerode.VolumeInspectInfo> & {
|
||||||
> & {
|
Name: string;
|
||||||
Name: string;
|
};
|
||||||
};
|
|
||||||
|
|
||||||
export type PartialImageInspectInfo = DeepPartial<
|
export type PartialImageInspectInfo =
|
||||||
dockerode.ImageInspectInfo
|
DeepPartial<dockerode.ImageInspectInfo> & {
|
||||||
> & {
|
Id: string;
|
||||||
Id: string;
|
};
|
||||||
};
|
|
||||||
|
|
||||||
type Methods<T> = {
|
type Methods<T> = {
|
||||||
[K in keyof T]: T[K] extends (...args: any) => any ? T[K] : never;
|
[K in keyof T]: T[K] extends (...args: any) => any ? T[K] : never;
|
||||||
@ -47,7 +43,9 @@ function createFake<Prototype extends object>(prototype: Prototype) {
|
|||||||
...res,
|
...res,
|
||||||
[fn]: () => {
|
[fn]: () => {
|
||||||
throw Error(
|
throw Error(
|
||||||
`Fake method not implemented: ${prototype.constructor.name}.${fn}()`,
|
`Fake method not implemented: ${
|
||||||
|
prototype.constructor.name
|
||||||
|
}.${fn.toString()}()`,
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
@ -318,14 +316,8 @@ export function createImage(
|
|||||||
const createImageInspectInfo = (
|
const createImageInspectInfo = (
|
||||||
partialImage: PartialImageInspectInfo,
|
partialImage: PartialImageInspectInfo,
|
||||||
): dockerode.ImageInspectInfo => {
|
): dockerode.ImageInspectInfo => {
|
||||||
const {
|
const { Id, ContainerConfig, Config, GraphDriver, RootFS, ...Info } =
|
||||||
Id,
|
partialImage;
|
||||||
ContainerConfig,
|
|
||||||
Config,
|
|
||||||
GraphDriver,
|
|
||||||
RootFS,
|
|
||||||
...Info
|
|
||||||
} = partialImage;
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
Id,
|
Id,
|
||||||
@ -903,9 +895,9 @@ export class MockEngine {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function createMockerode(engine: MockEngine) {
|
export function createMockerode(engine: MockEngine) {
|
||||||
const dockerodeStubs: Stubs<dockerode> = (Object.getOwnPropertyNames(
|
const dockerodeStubs: Stubs<dockerode> = (
|
||||||
dockerode.prototype,
|
Object.getOwnPropertyNames(dockerode.prototype) as (keyof dockerode)[]
|
||||||
) as (keyof dockerode)[])
|
)
|
||||||
.filter((fn) => typeof dockerode.prototype[fn] === 'function')
|
.filter((fn) => typeof dockerode.prototype[fn] === 'function')
|
||||||
.reduce((stubMap, fn) => {
|
.reduce((stubMap, fn) => {
|
||||||
const stub = sinon.stub(dockerode.prototype, fn);
|
const stub = sinon.stub(dockerode.prototype, fn);
|
||||||
|
@ -24,13 +24,13 @@ export = async function () {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
fs.unlinkSync(process.env.DATABASE_PATH_2!);
|
fs.unlinkSync(process.env.DATABASE_PATH_2!);
|
||||||
} catch (e) {
|
} catch {
|
||||||
/* ignore /*/
|
/* ignore /*/
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
fs.unlinkSync(process.env.DATABASE_PATH_3!);
|
fs.unlinkSync(process.env.DATABASE_PATH_3!);
|
||||||
} catch (e) {
|
} catch {
|
||||||
/* ignore /*/
|
/* ignore /*/
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ export = async function () {
|
|||||||
'./test/data/config-apibinder-offline2.json',
|
'./test/data/config-apibinder-offline2.json',
|
||||||
fs.readFileSync('./test/data/testconfig-apibinder-offline2.json'),
|
fs.readFileSync('./test/data/testconfig-apibinder-offline2.json'),
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch {
|
||||||
/* ignore /*/
|
/* ignore /*/
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -155,9 +155,9 @@ describe('lib/fs-utils', () => {
|
|||||||
|
|
||||||
it("should return the paths of one or more files as they exist on host OS's root", async () => {
|
it("should return the paths of one or more files as they exist on host OS's root", async () => {
|
||||||
expect(fsUtils.getPathOnHost(testFileName1)).to.deep.equal(testFile1);
|
expect(fsUtils.getPathOnHost(testFileName1)).to.deep.equal(testFile1);
|
||||||
expect(
|
expect(fsUtils.getPathOnHost(testFileName1, testFileName2)).to.deep.equal(
|
||||||
fsUtils.getPathOnHost(testFileName1, testFileName2),
|
[testFile1, testFile2],
|
||||||
).to.deep.equal([testFile1, testFile2]);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -12,7 +12,6 @@ describe('System information', () => {
|
|||||||
stub(systeminformation, 'mem').resolves(mockMemory);
|
stub(systeminformation, 'mem').resolves(mockMemory);
|
||||||
stub(systeminformation, 'currentLoad').resolves(mockCPU.load);
|
stub(systeminformation, 'currentLoad').resolves(mockCPU.load);
|
||||||
stub(systeminformation, 'cpuTemperature').resolves(mockCPU.temp);
|
stub(systeminformation, 'cpuTemperature').resolves(mockCPU.temp);
|
||||||
// @ts-ignore TS thinks we can't return a buffer...
|
|
||||||
stub(fs, 'readFile').resolves(mockCPU.idBuffer);
|
stub(fs, 'readFile').resolves(mockCPU.idBuffer);
|
||||||
stub(fsUtils, 'exec');
|
stub(fsUtils, 'exec');
|
||||||
});
|
});
|
||||||
|
@ -38,7 +38,7 @@ const lookForOptionalDeps = function (sourceDir) {
|
|||||||
packageJson = JSON.parse(
|
packageJson = JSON.parse(
|
||||||
fs.readFileSync(path.join(sourceDir, dir, '/package.json'), 'utf8'),
|
fs.readFileSync(path.join(sourceDir, dir, '/package.json'), 'utf8'),
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (packageJson.optionalDependencies != null) {
|
if (packageJson.optionalDependencies != null) {
|
||||||
|
Loading…
Reference in New Issue
Block a user