mirror of
https://github.com/balena-os/balena-supervisor.git
synced 2024-12-24 15:56:40 +00:00
commit
3db260aa5c
10
.lintstagedrc
Normal file
10
.lintstagedrc
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"*.coffee": [
|
||||
"resin-lint"
|
||||
],
|
||||
"*.ts": [
|
||||
"prettier --config ./node_modules/resin-lint/config/.prettierrc --write",
|
||||
"resin-lint --typescript --no-prettier",
|
||||
"git add"
|
||||
],
|
||||
}
|
11
package.json
11
package.json
@ -10,7 +10,11 @@
|
||||
"scripts": {
|
||||
"start": "./entry.sh",
|
||||
"build": "webpack",
|
||||
"lint": "resin-lint --typescript src/ test/",
|
||||
"precommit": "lint-staged",
|
||||
"prettify": "prettier --config ./node_modules/resin-lint/config/.prettierrc --write \"{src,test,typings}/**/*.ts\"",
|
||||
"lint:coffee": "resin-lint src/ test/",
|
||||
"lint:typescript": "resin-lint --typescript src/ test/ typings/ && tsc --noEmit",
|
||||
"lint": "npm run lint:coffee && npm run lint:typescript",
|
||||
"test": "npm run lint && JUNIT_REPORT_PATH=report.xml mocha --exit -r ts-node/register -r coffee-script/register -r register-coffee-coverage test/*.{js,coffee} && npm run coverage",
|
||||
"test:fast": "mocha --exit -r ts-node/register -r coffee-script/register test/*.{js,coffee}",
|
||||
"test:build": "tsc && coffee -m -c -o build . && cp -r test/data build/test/ && cp -r src/migrations build/src && cp package.json build",
|
||||
@ -52,9 +56,11 @@
|
||||
"duration-js": "^4.0.0",
|
||||
"event-stream": "^3.0.20",
|
||||
"express": "^4.0.0",
|
||||
"husky": "^1.1.3",
|
||||
"istanbul": "^0.4.5",
|
||||
"json-mask": "^0.3.8",
|
||||
"knex": "~0.12.3",
|
||||
"lint-staged": "^8.0.4",
|
||||
"lockfile": "^1.0.1",
|
||||
"lodash": "^4.16.3",
|
||||
"log-timestamp": "^0.1.2",
|
||||
@ -68,9 +74,10 @@
|
||||
"node-loader": "^0.6.0",
|
||||
"null-loader": "^0.1.1",
|
||||
"pinejs-client": "^2.4.0",
|
||||
"prettier": "^1.14.3",
|
||||
"register-coffee-coverage": "0.0.1",
|
||||
"request": "^2.51.0",
|
||||
"resin-lint": "^1.5.7",
|
||||
"resin-lint": "^2.0.1",
|
||||
"resin-register-device": "^3.0.0",
|
||||
"resin-sync": "^9.3.0",
|
||||
"resumable-request": "^2.0.0",
|
||||
|
7
src/application-manager.d.ts
vendored
7
src/application-manager.d.ts
vendored
@ -25,7 +25,6 @@ declare interface Application {
|
||||
// This is a non-exhaustive typing for ApplicationManager to avoid
|
||||
// having to recode the entire class (and all requirements in TS).
|
||||
export class ApplicationManager extends EventEmitter {
|
||||
|
||||
// These probably could be typed, but the types are so messy that we're
|
||||
// best just waiting for the relevant module to be recoded in typescript.
|
||||
// At least any types we can be sure of then.
|
||||
@ -46,12 +45,14 @@ export class ApplicationManager extends EventEmitter {
|
||||
// TODO: This actually returns an object, but we don't need the values just yet
|
||||
public setTargetVolatileForService(serviceId: number, opts: Options): void;
|
||||
|
||||
public executeStepAction(serviceAction: ServiceAction, opts: Options): Promise<void>;
|
||||
public executeStepAction(
|
||||
serviceAction: ServiceAction,
|
||||
opts: Options,
|
||||
): Promise<void>;
|
||||
|
||||
public getStatus(): Promise<DeviceApplicationState>;
|
||||
|
||||
public serviceNameFromId(serviceId: number): Promise<string>;
|
||||
|
||||
}
|
||||
|
||||
export default ApplicationManager;
|
||||
|
@ -10,10 +10,7 @@ export class InvalidNetworkNameError extends TypedError {
|
||||
}
|
||||
|
||||
export class ResourceRecreationAttemptError extends TypedError {
|
||||
public constructor(
|
||||
public resource: string,
|
||||
public name: string,
|
||||
) {
|
||||
public constructor(public resource: string, public name: string) {
|
||||
super(
|
||||
`Trying to create ${resource} with name: ${name}, but a ${resource} ` +
|
||||
'with that name and a different configuration already exists',
|
||||
|
@ -18,55 +18,73 @@ export class NetworkManager {
|
||||
}
|
||||
|
||||
public getAll(): Bluebird<Network[]> {
|
||||
return this.getWithBothLabels()
|
||||
.map((network: { Name: string }) => {
|
||||
return this.docker.getNetwork(network.Name).inspect()
|
||||
.then((net) => {
|
||||
return Network.fromDockerNetwork({
|
||||
return this.getWithBothLabels().map((network: { Name: string }) => {
|
||||
return this.docker
|
||||
.getNetwork(network.Name)
|
||||
.inspect()
|
||||
.then(net => {
|
||||
return Network.fromDockerNetwork(
|
||||
{
|
||||
docker: this.docker,
|
||||
logger: this.logger,
|
||||
}, net);
|
||||
},
|
||||
net,
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public getAllByAppId(appId: number): Bluebird<Network[]> {
|
||||
return this.getAll()
|
||||
.filter((network: Network) => network.appId === appId);
|
||||
return this.getAll().filter((network: Network) => network.appId === appId);
|
||||
}
|
||||
|
||||
public get(network: { name: string, appId: number }): Bluebird<Network> {
|
||||
return Network.fromNameAndAppId({
|
||||
public get(network: { name: string; appId: number }): Bluebird<Network> {
|
||||
return Network.fromNameAndAppId(
|
||||
{
|
||||
logger: this.logger,
|
||||
docker: this.docker,
|
||||
}, network.name, network.appId);
|
||||
},
|
||||
network.name,
|
||||
network.appId,
|
||||
);
|
||||
}
|
||||
|
||||
public supervisorNetworkReady(): Bluebird<boolean> {
|
||||
return Bluebird.resolve(fs.stat(`/sys/class/net/${constants.supervisorNetworkInterface}`))
|
||||
return Bluebird.resolve(
|
||||
fs.stat(`/sys/class/net/${constants.supervisorNetworkInterface}`),
|
||||
)
|
||||
.then(() => {
|
||||
return this.docker.getNetwork(constants.supervisorNetworkInterface).inspect();
|
||||
return this.docker
|
||||
.getNetwork(constants.supervisorNetworkInterface)
|
||||
.inspect();
|
||||
})
|
||||
.then((network) => {
|
||||
return network.Options['com.docker.network.bridge.name'] ===
|
||||
constants.supervisorNetworkInterface;
|
||||
.then(network => {
|
||||
return (
|
||||
network.Options['com.docker.network.bridge.name'] ===
|
||||
constants.supervisorNetworkInterface
|
||||
);
|
||||
})
|
||||
.catchReturn(NotFoundError, false)
|
||||
.catchReturn(ENOENT, false);
|
||||
}
|
||||
|
||||
public ensureSupervisorNetwork(): Bluebird<void> {
|
||||
|
||||
const removeIt = () => {
|
||||
return Bluebird.resolve(this.docker.getNetwork(constants.supervisorNetworkInterface).remove())
|
||||
.then(() => {
|
||||
return Bluebird.resolve(
|
||||
this.docker.getNetwork(constants.supervisorNetworkInterface).remove(),
|
||||
).then(() => {
|
||||
this.docker.getNetwork(constants.supervisorNetworkInterface).inspect();
|
||||
});
|
||||
};
|
||||
|
||||
return Bluebird.resolve(this.docker.getNetwork(constants.supervisorNetworkInterface).inspect())
|
||||
.then((net) => {
|
||||
if (net.Options['com.docker.network.bridge.name'] !== constants.supervisorNetworkInterface) {
|
||||
return Bluebird.resolve(
|
||||
this.docker.getNetwork(constants.supervisorNetworkInterface).inspect(),
|
||||
)
|
||||
.then(net => {
|
||||
if (
|
||||
net.Options['com.docker.network.bridge.name'] !==
|
||||
constants.supervisorNetworkInterface
|
||||
) {
|
||||
return removeIt();
|
||||
} else {
|
||||
return Bluebird.resolve(
|
||||
@ -78,12 +96,15 @@ export class NetworkManager {
|
||||
})
|
||||
.catch(NotFoundError, () => {
|
||||
console.log(`Creating ${constants.supervisorNetworkInterface} network`);
|
||||
return Bluebird.resolve(this.docker.createNetwork({
|
||||
return Bluebird.resolve(
|
||||
this.docker.createNetwork({
|
||||
Name: constants.supervisorNetworkInterface,
|
||||
Options: {
|
||||
'com.docker.network.bridge.name': constants.supervisorNetworkInterface,
|
||||
'com.docker.network.bridge.name':
|
||||
constants.supervisorNetworkInterface,
|
||||
},
|
||||
}));
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@ -104,5 +125,4 @@ export class NetworkManager {
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -2,10 +2,7 @@ import * as Bluebird from 'bluebird';
|
||||
import * as _ from 'lodash';
|
||||
|
||||
import Docker = require('../lib/docker-utils');
|
||||
import {
|
||||
InvalidAppIdError,
|
||||
NotFoundError,
|
||||
} from '../lib/errors';
|
||||
import { InvalidAppIdError, NotFoundError } from '../lib/errors';
|
||||
import logTypes = require('../lib/log-types');
|
||||
import { checkInt } from '../lib/validation';
|
||||
import { Logger } from '../logger';
|
||||
@ -30,7 +27,6 @@ export interface NetworkOptions {
|
||||
}
|
||||
|
||||
export class Network {
|
||||
|
||||
public appId: number;
|
||||
public name: string;
|
||||
public config: NetworkConfig;
|
||||
@ -66,7 +62,7 @@ export class Network {
|
||||
driver: network.Driver,
|
||||
ipam: {
|
||||
driver: network.IPAM.Driver,
|
||||
config: _.map(network.IPAM.Config, (conf) => {
|
||||
config: _.map(network.IPAM.Config, conf => {
|
||||
const newConf: NetworkConfig['ipam']['config'][0] = {
|
||||
subnet: conf.Subnet,
|
||||
gateway: conf.Gateway,
|
||||
@ -84,7 +80,9 @@ export class Network {
|
||||
},
|
||||
enableIPv6: network.EnableIPv6,
|
||||
internal: network.Internal,
|
||||
labels: _.omit(ComposeUtils.normalizeLabels(network.Labels), [ 'io.balena.supervised' ]),
|
||||
labels: _.omit(ComposeUtils.normalizeLabels(network.Labels), [
|
||||
'io.balena.supervised',
|
||||
]),
|
||||
options: network.Options,
|
||||
};
|
||||
|
||||
@ -132,10 +130,12 @@ export class Network {
|
||||
}
|
||||
|
||||
public create(): Bluebird<void> {
|
||||
this.logger.logSystemEvent(logTypes.createNetwork, { network: { name: this.name } });
|
||||
this.logger.logSystemEvent(logTypes.createNetwork, {
|
||||
network: { name: this.name },
|
||||
});
|
||||
|
||||
return Network.fromNameAndAppId(this.networkOpts, this.name, this.appId)
|
||||
.then((current) => {
|
||||
.then(current => {
|
||||
if (!this.isEqualConfig(current)) {
|
||||
throw new ResourceRecreationAttemptError('network', this.name);
|
||||
}
|
||||
@ -146,7 +146,7 @@ export class Network {
|
||||
.catch(NotFoundError, () => {
|
||||
return this.docker.createNetwork(this.toDockerConfig());
|
||||
})
|
||||
.tapCatch((err) => {
|
||||
.tapCatch(err => {
|
||||
this.logger.logSystemEvent(logTypes.createNetworkError, {
|
||||
network: { name: this.name, appId: this.appId },
|
||||
error: err,
|
||||
@ -161,7 +161,7 @@ export class Network {
|
||||
CheckDuplicate: true,
|
||||
IPAM: {
|
||||
Driver: this.config.ipam.driver,
|
||||
Config: _.map(this.config.ipam.config, (conf) => {
|
||||
Config: _.map(this.config.ipam.config, conf => {
|
||||
const ipamConf: DockerIPAMConfig = {
|
||||
Subnet: conf.subnet,
|
||||
Gateway: conf.gateway,
|
||||
@ -178,30 +178,32 @@ export class Network {
|
||||
},
|
||||
EnableIPv6: this.config.enableIPv6,
|
||||
Internal: this.config.internal,
|
||||
Labels: _.merge({}, {
|
||||
Labels: _.merge(
|
||||
{},
|
||||
{
|
||||
'io.balena.supervised': 'true',
|
||||
}, this.config.labels),
|
||||
},
|
||||
this.config.labels,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
public remove(): Bluebird<void> {
|
||||
this.logger.logSystemEvent(
|
||||
logTypes.removeNetwork,
|
||||
{ network: { name: this.name, appId: this.appId } },
|
||||
);
|
||||
|
||||
return Bluebird.resolve(this.docker.getNetwork(this.getDockerName()).remove())
|
||||
.tapCatch((error) => {
|
||||
this.logger.logSystemEvent(
|
||||
logTypes.createNetworkError,
|
||||
{ network: { name: this.name, appId: this.appId }, error },
|
||||
);
|
||||
this.logger.logSystemEvent(logTypes.removeNetwork, {
|
||||
network: { name: this.name, appId: this.appId },
|
||||
});
|
||||
|
||||
return Bluebird.resolve(
|
||||
this.docker.getNetwork(this.getDockerName()).remove(),
|
||||
).tapCatch(error => {
|
||||
this.logger.logSystemEvent(logTypes.createNetworkError, {
|
||||
network: { name: this.name, appId: this.appId },
|
||||
error,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public isEqualConfig(network: Network): boolean {
|
||||
|
||||
// don't compare the ipam.config if it's not present
|
||||
// in the target state (as it will be present in the
|
||||
// current state, due to docker populating it with
|
||||
@ -221,15 +223,12 @@ export class Network {
|
||||
|
||||
private static validateComposeConfig(config: NetworkConfig): void {
|
||||
// Check if every ipam config entry has both a subnet and a gateway
|
||||
_.each(
|
||||
_.get(config, 'config.ipam.config', []),
|
||||
({ subnet, gateway }) => {
|
||||
_.each(_.get(config, 'config.ipam.config', []), ({ subnet, gateway }) => {
|
||||
if (subnet == null || gateway == null) {
|
||||
throw new InvalidNetworkConfigurationError(
|
||||
'Network IPAM config entries must have both a subnet and gateway',
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -2,8 +2,7 @@ import * as _ from 'lodash';
|
||||
import TypedError = require('typed-error');
|
||||
|
||||
// Adapted from https://github.com/docker/docker-py/blob/master/docker/utils/ports.py#L3
|
||||
const PORTS_REGEX =
|
||||
/^(?:(?:([a-fA-F\d.:]+):)?([\d]*)(?:-([\d]+))?:)?([\d]+)(?:-([\d]+))?(?:\/(udp|tcp))?$/;
|
||||
const PORTS_REGEX = /^(?:(?:([a-fA-F\d.:]+):)?([\d]*)(?:-([\d]+))?:)?([\d]+)(?:-([\d]+))?(?:\/(udp|tcp))?$/;
|
||||
|
||||
// A regex to extract the protocol and internal port of the incoming Docker options
|
||||
const DOCKER_OPTS_PORTS_REGEX = /(\d+)(?:\/?([a-z]+))?/i;
|
||||
@ -11,7 +10,7 @@ const DOCKER_OPTS_PORTS_REGEX = /(\d+)(?:\/?([a-z]+))?/i;
|
||||
export class InvalidPortDefinition extends TypedError {}
|
||||
|
||||
export interface PortBindings {
|
||||
[key: string]: Array<{ HostIp: string, HostPort: string }>;
|
||||
[key: string]: Array<{ HostIp: string; HostPort: string }>;
|
||||
}
|
||||
|
||||
export interface DockerPortOptions {
|
||||
@ -29,7 +28,6 @@ interface PortRange {
|
||||
}
|
||||
|
||||
export class PortMap {
|
||||
|
||||
private ports: PortRange;
|
||||
|
||||
public constructor(portStrOrObj: string | PortRange) {
|
||||
@ -41,8 +39,14 @@ export class PortMap {
|
||||
}
|
||||
|
||||
public toDockerOpts(): DockerPortOptions {
|
||||
const internalRange = this.generatePortRange(this.ports.internalStart, this.ports.internalEnd);
|
||||
const externalRange = this.generatePortRange(this.ports.externalStart, this.ports.externalEnd);
|
||||
const internalRange = this.generatePortRange(
|
||||
this.ports.internalStart,
|
||||
this.ports.internalEnd,
|
||||
);
|
||||
const externalRange = this.generatePortRange(
|
||||
this.ports.externalStart,
|
||||
this.ports.externalEnd,
|
||||
);
|
||||
|
||||
const exposedPorts: { [key: string]: {} } = {};
|
||||
const portBindings: PortBindings = {};
|
||||
@ -62,8 +66,11 @@ export class PortMap {
|
||||
}
|
||||
|
||||
public toExposedPortArray(): string[] {
|
||||
const internalRange = this.generatePortRange(this.ports.internalStart, this.ports.internalEnd);
|
||||
return _.map(internalRange, (internal) => {
|
||||
const internalRange = this.generatePortRange(
|
||||
this.ports.internalStart,
|
||||
this.ports.internalEnd,
|
||||
);
|
||||
return _.map(internalRange, internal => {
|
||||
return `${internal}/${this.ports.protocol}`;
|
||||
});
|
||||
}
|
||||
@ -80,14 +87,10 @@ export class PortMap {
|
||||
* and produces a list of PortMap objects, which can then be compared.
|
||||
*
|
||||
*/
|
||||
public static fromDockerOpts(
|
||||
portBindings: PortBindings,
|
||||
): PortMap[] {
|
||||
|
||||
public static fromDockerOpts(portBindings: PortBindings): PortMap[] {
|
||||
// Create a list of portBindings, rather than the map (which we can't
|
||||
// order)
|
||||
const portMaps = _.map(portBindings, (hostObj, internalStr) => {
|
||||
|
||||
const match = internalStr.match(DOCKER_OPTS_PORTS_REGEX);
|
||||
if (match == null) {
|
||||
throw new Error(`Could not parse docker port output: ${internalStr}`);
|
||||
@ -114,13 +117,14 @@ export class PortMap {
|
||||
public static normalisePortMaps(portMaps: PortMap[]): PortMap[] {
|
||||
// Fold any ranges into each other if possible
|
||||
return _(portMaps)
|
||||
.sortBy((p) => p.ports.protocol)
|
||||
.sortBy((p) => p.ports.host)
|
||||
.sortBy((p) => p.ports.internalStart)
|
||||
.sortBy(p => p.ports.protocol)
|
||||
.sortBy(p => p.ports.host)
|
||||
.sortBy(p => p.ports.internalStart)
|
||||
.reduce((res: PortMap[], p: PortMap) => {
|
||||
const last = _.last(res);
|
||||
|
||||
if (last != null &&
|
||||
if (
|
||||
last != null &&
|
||||
last.ports.internalEnd + 1 === p.ports.internalStart &&
|
||||
last.ports.externalEnd + 1 === p.ports.externalStart &&
|
||||
last.ports.protocol === p.ports.protocol &&
|
||||
@ -138,7 +142,9 @@ export class PortMap {
|
||||
private parsePortString(portStr: string): void {
|
||||
const match = portStr.match(PORTS_REGEX);
|
||||
if (match == null) {
|
||||
throw new InvalidPortDefinition(`Could not parse port definition: ${portStr}`);
|
||||
throw new InvalidPortDefinition(
|
||||
`Could not parse port definition: ${portStr}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Ignore the first parameter (the complete match) and separate the matched
|
||||
@ -178,7 +184,10 @@ export class PortMap {
|
||||
};
|
||||
|
||||
// Ensure we have the same range
|
||||
if (this.ports.internalEnd - this.ports.internalStart !== this.ports.externalEnd - this.ports.externalStart) {
|
||||
if (
|
||||
this.ports.internalEnd - this.ports.internalStart !==
|
||||
this.ports.externalEnd - this.ports.externalStart
|
||||
) {
|
||||
throw new InvalidPortDefinition(
|
||||
`Range for internal and external ports does not match: ${portStr}`,
|
||||
);
|
||||
@ -187,7 +196,9 @@ export class PortMap {
|
||||
|
||||
private generatePortRange(start: number, end: number): number[] {
|
||||
if (start > end) {
|
||||
throw new Error('Incorrect port range! The end port cannot be larger than the start port!');
|
||||
throw new Error(
|
||||
'Incorrect port range! The end port cannot be larger than the start port!',
|
||||
);
|
||||
}
|
||||
|
||||
return _.range(start, end + 1);
|
||||
|
@ -71,7 +71,11 @@ export function sanitiseComposeConfig(
|
||||
}) as ServiceComposeConfig;
|
||||
|
||||
if (filtered.length > 0) {
|
||||
console.log(`Warning: Ignoring unsupported or unknown compose fields: ${filtered.join(', ')}`);
|
||||
console.log(
|
||||
`Warning: Ignoring unsupported or unknown compose fields: ${filtered.join(
|
||||
', ',
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return toReturn;
|
||||
|
@ -22,7 +22,6 @@ import { sanitiseComposeConfig } from './sanitise';
|
||||
import * as constants from '../lib/constants';
|
||||
|
||||
export class Service {
|
||||
|
||||
public appId: number | null;
|
||||
public imageId: number | null;
|
||||
public config: ServiceConfig;
|
||||
@ -64,8 +63,7 @@ export class Service {
|
||||
'cpus',
|
||||
].concat(Service.configArrayFields);
|
||||
|
||||
private constructor() {
|
||||
}
|
||||
private constructor() {}
|
||||
|
||||
// The type here is actually ServiceComposeConfig, except that the
|
||||
// keys must be camelCase'd first
|
||||
@ -77,7 +75,9 @@ export class Service {
|
||||
|
||||
appConfig = ComposeUtils.camelCaseConfig(appConfig);
|
||||
|
||||
const intOrNull = (val: string | number | null | undefined): number | null => {
|
||||
const intOrNull = (
|
||||
val: string | number | null | undefined,
|
||||
): number | null => {
|
||||
return checkInt(val) || null;
|
||||
};
|
||||
|
||||
@ -114,7 +114,7 @@ export class Service {
|
||||
// First process the networks correctly
|
||||
let networks: ServiceConfig['networks'] = {};
|
||||
if (_.isArray(config.networks)) {
|
||||
_.each(config.networks, (name) => {
|
||||
_.each(config.networks, name => {
|
||||
networks[name] = {};
|
||||
});
|
||||
} else if (_.isObject(config.networks)) {
|
||||
@ -127,17 +127,24 @@ export class Service {
|
||||
// Check for unsupported networkMode entries
|
||||
if (config.networkMode != null) {
|
||||
if (/service:(\s*)?.+/.test(config.networkMode)) {
|
||||
console.log('Warning: A network_mode referencing a service is not yet supported. Ignoring.');
|
||||
console.log(
|
||||
'Warning: A network_mode referencing a service is not yet supported. Ignoring.',
|
||||
);
|
||||
delete config.networkMode;
|
||||
} else if (/container:(\s*)?.+/.test(config.networkMode)) {
|
||||
console.log('Warning: A network_mode referencing a container is not supported. Ignoring.');
|
||||
console.log(
|
||||
'Warning: A network_mode referencing a container is not supported. Ignoring.',
|
||||
);
|
||||
delete config.networkMode;
|
||||
}
|
||||
}
|
||||
|
||||
// memory strings
|
||||
const memLimit = ComposeUtils.parseMemoryNumber(config.memLimit, '0');
|
||||
const memReservation = ComposeUtils.parseMemoryNumber(config.memReservation, '0');
|
||||
const memReservation = ComposeUtils.parseMemoryNumber(
|
||||
config.memReservation,
|
||||
'0',
|
||||
);
|
||||
const shmSize = ComposeUtils.parseMemoryNumber(config.shmSize, '64m');
|
||||
delete config.memLimit;
|
||||
delete config.memReservation;
|
||||
@ -201,13 +208,15 @@ export class Service {
|
||||
service.appId || 0,
|
||||
service.serviceName || '',
|
||||
);
|
||||
config.labels = ComposeUtils.normalizeLabels(Service.extendLabels(
|
||||
config.labels = ComposeUtils.normalizeLabels(
|
||||
Service.extendLabels(
|
||||
config.labels || {},
|
||||
options,
|
||||
service.appId || 0,
|
||||
service.serviceId || 0,
|
||||
service.serviceName || '',
|
||||
));
|
||||
),
|
||||
);
|
||||
|
||||
// Any other special case handling
|
||||
if (config.networkMode === 'host' && !config.hostname) {
|
||||
@ -215,12 +224,24 @@ export class Service {
|
||||
}
|
||||
config.restart = ComposeUtils.createRestartPolicy(config.restart);
|
||||
config.command = ComposeUtils.getCommand(config.command, options.imageInfo);
|
||||
config.entrypoint = ComposeUtils.getEntryPoint(config.entrypoint, options.imageInfo);
|
||||
config.stopSignal = ComposeUtils.getStopSignal(config.stopSignal, options.imageInfo);
|
||||
config.workingDir = ComposeUtils.getWorkingDir(config.workingDir, options.imageInfo);
|
||||
config.entrypoint = ComposeUtils.getEntryPoint(
|
||||
config.entrypoint,
|
||||
options.imageInfo,
|
||||
);
|
||||
config.stopSignal = ComposeUtils.getStopSignal(
|
||||
config.stopSignal,
|
||||
options.imageInfo,
|
||||
);
|
||||
config.workingDir = ComposeUtils.getWorkingDir(
|
||||
config.workingDir,
|
||||
options.imageInfo,
|
||||
);
|
||||
config.user = ComposeUtils.getUser(config.user, options.imageInfo);
|
||||
|
||||
const healthcheck = ComposeUtils.getHealthcheck(config.healthcheck, options.imageInfo);
|
||||
const healthcheck = ComposeUtils.getHealthcheck(
|
||||
config.healthcheck,
|
||||
options.imageInfo,
|
||||
);
|
||||
delete config.healthcheck;
|
||||
|
||||
config.volumes = Service.extendAndSanitiseVolumes(
|
||||
@ -232,7 +253,7 @@ export class Service {
|
||||
|
||||
let portMaps: PortMap[] = [];
|
||||
if (config.ports != null) {
|
||||
portMaps = _.map(config.ports, (p) => new PortMap(p));
|
||||
portMaps = _.map(config.ports, p => new PortMap(p));
|
||||
}
|
||||
delete config.ports;
|
||||
|
||||
@ -241,11 +262,17 @@ export class Service {
|
||||
if (config.expose != null) {
|
||||
expose = _.map(config.expose, ComposeUtils.sanitiseExposeFromCompose);
|
||||
}
|
||||
const imageExposedPorts = _.get(options.imageInfo, 'Config.ExposedPorts', { });
|
||||
const imageExposedPorts = _.get(
|
||||
options.imageInfo,
|
||||
'Config.ExposedPorts',
|
||||
{},
|
||||
);
|
||||
expose = expose.concat(_.keys(imageExposedPorts));
|
||||
expose = _.uniq(expose);
|
||||
// Also add any exposed ports which are implied from the portMaps
|
||||
const exposedFromPortMappings = _.flatMap(portMaps, (port) => port.toExposedPortArray());
|
||||
const exposedFromPortMappings = _.flatMap(portMaps, port =>
|
||||
port.toExposedPortArray(),
|
||||
);
|
||||
expose = expose.concat(exposedFromPortMappings);
|
||||
delete config.expose;
|
||||
|
||||
@ -263,11 +290,11 @@ export class Service {
|
||||
}
|
||||
|
||||
if (_.isArray(config.sysctls)) {
|
||||
config.sysctls = _.fromPairs(_.map(config.sysctls, (v) => _.split(v, '=')));
|
||||
config.sysctls = _.fromPairs(_.map(config.sysctls, v => _.split(v, '=')));
|
||||
}
|
||||
config.sysctls = _.mapValues(config.sysctls, String);
|
||||
|
||||
_.each([ 'cpuShares', 'cpuQuota', 'oomScoreAdj' ], (key)=> {
|
||||
_.each(['cpuShares', 'cpuQuota', 'oomScoreAdj'], key => {
|
||||
const numVal = checkInt(config[key]);
|
||||
if (numVal) {
|
||||
config[key] = numVal;
|
||||
@ -354,7 +381,9 @@ export class Service {
|
||||
return service;
|
||||
}
|
||||
|
||||
public static fromDockerContainer(container: Dockerode.ContainerInspectInfo): Service {
|
||||
public static fromDockerContainer(
|
||||
container: Dockerode.ContainerInspectInfo,
|
||||
): Service {
|
||||
const svc = new Service();
|
||||
|
||||
if (container.State.Running) {
|
||||
@ -379,7 +408,9 @@ export class Service {
|
||||
|
||||
let networks: ServiceConfig['networks'] = {};
|
||||
if (_.get(container, 'NetworkSettings.Networks', null) != null) {
|
||||
networks = ComposeUtils.dockerNetworkToServiceNetwork(container.NetworkSettings.Networks);
|
||||
networks = ComposeUtils.dockerNetworkToServiceNetwork(
|
||||
container.NetworkSettings.Networks,
|
||||
);
|
||||
}
|
||||
|
||||
const ulimits: ServiceConfig['ulimits'] = {};
|
||||
@ -389,11 +420,13 @@ export class Service {
|
||||
|
||||
const portMaps = PortMap.fromDockerOpts(container.HostConfig.PortBindings);
|
||||
let expose = _.flatMap(
|
||||
_.flatMap(portMaps, (p) => p.toDockerOpts().exposedPorts),
|
||||
_.flatMap(portMaps, p => p.toDockerOpts().exposedPorts),
|
||||
_.keys,
|
||||
);
|
||||
if (container.Config.ExposedPorts != null) {
|
||||
expose = expose.concat(_.map(container.Config.ExposedPorts, (_v, k) => k.toString()));
|
||||
expose = expose.concat(
|
||||
_.map(container.Config.ExposedPorts, (_v, k) => k.toString()),
|
||||
);
|
||||
}
|
||||
expose = _.uniq(expose);
|
||||
|
||||
@ -425,12 +458,15 @@ export class Service {
|
||||
hostname,
|
||||
command: container.Config.Cmd || '',
|
||||
entrypoint: container.Config.Entrypoint || '',
|
||||
volumes: _.concat(container.HostConfig.Binds || [], _.keys(container.Config.Volumes || { })),
|
||||
volumes: _.concat(
|
||||
container.HostConfig.Binds || [],
|
||||
_.keys(container.Config.Volumes || {}),
|
||||
),
|
||||
image: container.Config.Image,
|
||||
environment: _.omit(conversions.envArrayToObject(container.Config.Env || [ ]), [
|
||||
'RESIN_DEVICE_NAME_AT_INIT',
|
||||
'BALENA_DEVICE_NAME_AT_INIT',
|
||||
]),
|
||||
environment: _.omit(
|
||||
conversions.envArrayToObject(container.Config.Env || []),
|
||||
['RESIN_DEVICE_NAME_AT_INIT', 'BALENA_DEVICE_NAME_AT_INIT'],
|
||||
),
|
||||
privileged: container.HostConfig.Privileged || false,
|
||||
labels: ComposeUtils.normalizeLabels(container.Config.Labels || {}),
|
||||
running: container.State.Running,
|
||||
@ -488,12 +524,14 @@ export class Service {
|
||||
return svc;
|
||||
}
|
||||
|
||||
public toDockerContainer(opts: { deviceName: string }): Dockerode.ContainerCreateOptions {
|
||||
public toDockerContainer(opts: {
|
||||
deviceName: string;
|
||||
}): Dockerode.ContainerCreateOptions {
|
||||
const { binds, volumes } = this.getBindsAndVolumes();
|
||||
const { exposedPorts, portBindings } = this.generateExposeAndPorts();
|
||||
|
||||
const tmpFs: Dictionary<''> = {};
|
||||
_.each(this.config.tmpfs, (tmp) => {
|
||||
_.each(this.config.tmpfs, tmp => {
|
||||
tmpFs[tmp] = '';
|
||||
});
|
||||
|
||||
@ -509,14 +547,21 @@ export class Service {
|
||||
Volumes: volumes,
|
||||
// Typings are wrong here, the docker daemon accepts a string or string[],
|
||||
Entrypoint: this.config.entrypoint as string,
|
||||
Env: conversions.envObjectToArray(_.assign({
|
||||
Env: conversions.envObjectToArray(
|
||||
_.assign(
|
||||
{
|
||||
RESIN_DEVICE_NAME_AT_INIT: opts.deviceName,
|
||||
BALENA_DEVICE_NAME_AT_INIT: opts.deviceName,
|
||||
}, this.config.environment)),
|
||||
},
|
||||
this.config.environment,
|
||||
),
|
||||
),
|
||||
ExposedPorts: exposedPorts,
|
||||
Image: this.config.image,
|
||||
Labels: this.config.labels,
|
||||
NetworkingConfig: ComposeUtils.serviceNetworksToDockerNetworks(mainNetwork),
|
||||
NetworkingConfig: ComposeUtils.serviceNetworksToDockerNetworks(
|
||||
mainNetwork,
|
||||
),
|
||||
StopSignal: this.config.stopSignal,
|
||||
Domainname: this.config.domainname,
|
||||
Hostname: this.config.hostname,
|
||||
@ -542,8 +587,12 @@ export class Service {
|
||||
PidsLimit: this.config.pidsLimit,
|
||||
SecurityOpt: this.config.securityOpt,
|
||||
Sysctls: this.config.sysctls,
|
||||
Ulimits: ComposeUtils.serviceUlimitsToDockerUlimits(this.config.ulimits),
|
||||
RestartPolicy: ComposeUtils.serviceRestartToDockerRestartPolicy(this.config.restart),
|
||||
Ulimits: ComposeUtils.serviceUlimitsToDockerUlimits(
|
||||
this.config.ulimits,
|
||||
),
|
||||
RestartPolicy: ComposeUtils.serviceRestartToDockerRestartPolicy(
|
||||
this.config.restart,
|
||||
),
|
||||
CpuShares: this.config.cpuShares,
|
||||
CpuQuota: this.config.cpuQuota,
|
||||
// Type missing, and HostConfig isn't defined as a seperate object
|
||||
@ -561,7 +610,9 @@ export class Service {
|
||||
NanoCpus: this.config.cpus,
|
||||
IpcMode: this.config.ipc,
|
||||
} as Dockerode.ContainerCreateOptions['HostConfig'],
|
||||
Healthcheck: ComposeUtils.serviceHealthcheckToDockerHealthcheck(this.config.healthcheck),
|
||||
Healthcheck: ComposeUtils.serviceHealthcheckToDockerHealthcheck(
|
||||
this.config.healthcheck,
|
||||
),
|
||||
StopTimeout: this.config.stopGracePeriod,
|
||||
};
|
||||
}
|
||||
@ -574,21 +625,21 @@ export class Service {
|
||||
sameNetworks = false;
|
||||
return;
|
||||
}
|
||||
sameNetworks = sameNetworks && this.isSameNetwork(this.config.networks[name], network);
|
||||
sameNetworks =
|
||||
sameNetworks && this.isSameNetwork(this.config.networks[name], network);
|
||||
});
|
||||
|
||||
// Check the configuration for any changes
|
||||
const thisOmitted = _.omit(this.config, Service.omitFields);
|
||||
const otherOmitted = _.omit(service.config, Service.omitFields);
|
||||
let sameConfig = _.isEqual(
|
||||
thisOmitted,
|
||||
otherOmitted,
|
||||
);
|
||||
let sameConfig = _.isEqual(thisOmitted, otherOmitted);
|
||||
const nonArrayEquals = sameConfig;
|
||||
|
||||
// Check for array fields which don't match
|
||||
const differentArrayFields: string[] = [];
|
||||
sameConfig = sameConfig && _.every(Service.configArrayFields, (field: ServiceConfigArrayField) => {
|
||||
sameConfig =
|
||||
sameConfig &&
|
||||
_.every(Service.configArrayFields, (field: ServiceConfigArrayField) => {
|
||||
return _.isEmpty(
|
||||
_.xorWith(
|
||||
// TODO: The typings here aren't accepted, even though we
|
||||
@ -609,12 +660,19 @@ export class Service {
|
||||
if (!(sameConfig && sameNetworks)) {
|
||||
// Add some console output for why a service is not matching
|
||||
// so that if we end up in a restart loop, we know exactly why
|
||||
console.log(`Replacing container for service ${this.serviceName} because of config changes:`);
|
||||
console.log(
|
||||
`Replacing container for service ${
|
||||
this.serviceName
|
||||
} because of config changes:`,
|
||||
);
|
||||
if (!nonArrayEquals) {
|
||||
// Try not to leak any sensitive information
|
||||
const diffObj = diff(thisOmitted, otherOmitted) as ServiceConfig;
|
||||
if (diffObj.environment != null) {
|
||||
diffObj.environment = _.mapValues(diffObj.environment, () => 'hidden');
|
||||
diffObj.environment = _.mapValues(
|
||||
diffObj.environment,
|
||||
() => 'hidden',
|
||||
);
|
||||
}
|
||||
console.log(' Non-array fields: ', JSON.stringify(diffObj));
|
||||
}
|
||||
@ -625,7 +683,6 @@ export class Service {
|
||||
if (!sameNetworks) {
|
||||
console.log(' Network changes detected');
|
||||
}
|
||||
|
||||
}
|
||||
return sameNetworks && sameConfig;
|
||||
}
|
||||
@ -635,19 +692,26 @@ export class Service {
|
||||
}
|
||||
|
||||
public isEqualExceptForRunningState(service: Service): boolean {
|
||||
return this.isEqualConfig(service) &&
|
||||
return (
|
||||
this.isEqualConfig(service) &&
|
||||
this.releaseId === service.releaseId &&
|
||||
this.imageId === service.imageId;
|
||||
this.imageId === service.imageId
|
||||
);
|
||||
}
|
||||
|
||||
public isEqual(service: Service): boolean {
|
||||
return this.isEqualExceptForRunningState(service) &&
|
||||
this.config.running === service.config.running;
|
||||
return (
|
||||
this.isEqualExceptForRunningState(service) &&
|
||||
this.config.running === service.config.running
|
||||
);
|
||||
}
|
||||
|
||||
public getNamedVolumes() {
|
||||
const defaults = Service.defaultBinds(this.appId || 0, this.serviceName || '');
|
||||
const validVolumes = _.map(this.config.volumes, (volume) => {
|
||||
const defaults = Service.defaultBinds(
|
||||
this.appId || 0,
|
||||
this.serviceName || '',
|
||||
);
|
||||
const validVolumes = _.map(this.config.volumes, volume => {
|
||||
if (_.includes(defaults, volume) || !_.includes(volume, ':')) {
|
||||
return null;
|
||||
}
|
||||
@ -655,7 +719,9 @@ export class Service {
|
||||
if (!path.isAbsolute(bindSource)) {
|
||||
const match = bindSource.match(/[0-9]+_(.+)/);
|
||||
if (match == null) {
|
||||
console.log('Error: There was an error parsing a volume bind source, ignoring.');
|
||||
console.log(
|
||||
'Error: There was an error parsing a volume bind source, ignoring.',
|
||||
);
|
||||
console.log(' bind source: ', bindSource);
|
||||
return null;
|
||||
}
|
||||
@ -675,16 +741,19 @@ export class Service {
|
||||
}
|
||||
|
||||
private handoverCompletePathOnHost(): string {
|
||||
return path.join(constants.rootMountPoint, updateLock.lockPath(this.appId || 0, this.serviceName || ''));
|
||||
return path.join(
|
||||
constants.rootMountPoint,
|
||||
updateLock.lockPath(this.appId || 0, this.serviceName || ''),
|
||||
);
|
||||
}
|
||||
|
||||
private getBindsAndVolumes(): {
|
||||
binds: string[],
|
||||
volumes: { [volName: string]: { } }
|
||||
binds: string[];
|
||||
volumes: { [volName: string]: {} };
|
||||
} {
|
||||
const binds: string[] = [];
|
||||
const volumes: { [volName: string]: {} } = {};
|
||||
_.each(this.config.volumes, (volume) => {
|
||||
_.each(this.config.volumes, volume => {
|
||||
if (_.includes(volume, ':')) {
|
||||
binds.push(volume);
|
||||
} else {
|
||||
@ -699,7 +768,7 @@ export class Service {
|
||||
const exposed: DockerPortOptions['exposedPorts'] = {};
|
||||
const ports: DockerPortOptions['portBindings'] = {};
|
||||
|
||||
_.each(this.config.portMaps, (pmap) => {
|
||||
_.each(this.config.portMaps, pmap => {
|
||||
const { exposedPorts, portBindings } = pmap.toDockerOpts();
|
||||
_.merge(exposed, exposedPorts);
|
||||
_.merge(ports, portBindings);
|
||||
@ -708,7 +777,7 @@ export class Service {
|
||||
// We also want to merge the compose and image exposedPorts
|
||||
// into the list of exposedPorts
|
||||
const composeExposed: DockerPortOptions['exposedPorts'] = {};
|
||||
_.each(this.config.expose, (port) => {
|
||||
_.each(this.config.expose, port => {
|
||||
composeExposed[port] = {};
|
||||
});
|
||||
_.merge(exposed, composeExposed);
|
||||
@ -724,7 +793,10 @@ export class Service {
|
||||
): { [envVarName: string]: string } {
|
||||
let defaultEnv: { [envVarName: string]: string } = {};
|
||||
for (let namespace of ['BALENA', 'RESIN']) {
|
||||
_.assign(defaultEnv, _.mapKeys({
|
||||
_.assign(
|
||||
defaultEnv,
|
||||
_.mapKeys(
|
||||
{
|
||||
APP_ID: appId.toString(),
|
||||
APP_NAME: options.appName,
|
||||
SERVICE_NAME: serviceName,
|
||||
@ -733,11 +805,15 @@ export class Service {
|
||||
HOST_OS_VERSION: options.osVersion,
|
||||
SUPERVISOR_VERSION: options.version,
|
||||
APP_LOCK_PATH: '/tmp/balena/updates.lock',
|
||||
}, (_val, key) => `${namespace}_${key}`));
|
||||
},
|
||||
(_val, key) => `${namespace}_${key}`,
|
||||
),
|
||||
);
|
||||
defaultEnv[namespace] = '1';
|
||||
}
|
||||
defaultEnv['RESIN_SERVICE_KILL_ME_PATH'] = '/tmp/balena/handover-complete';
|
||||
defaultEnv['BALENA_SERVICE_HANDOVER_COMPLETE_PATH'] = '/tmp/balena/handover-complete';
|
||||
defaultEnv['BALENA_SERVICE_HANDOVER_COMPLETE_PATH'] =
|
||||
'/tmp/balena/handover-complete';
|
||||
defaultEnv['USER'] = 'root';
|
||||
|
||||
let env = _.defaults(environment, defaultEnv);
|
||||
@ -775,18 +851,23 @@ export class Service {
|
||||
.isEqual(targetAliases);
|
||||
} else {
|
||||
// Otherwise compare them literally
|
||||
sameNetwork = _.isEmpty(_.xorWith(currentAliases, targetAliases, _.isEqual));
|
||||
sameNetwork = _.isEmpty(
|
||||
_.xorWith(currentAliases, targetAliases, _.isEqual),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (target.ipv4Address != null) {
|
||||
sameNetwork = sameNetwork && _.isEqual(current.ipv4Address, target.ipv4Address);
|
||||
sameNetwork =
|
||||
sameNetwork && _.isEqual(current.ipv4Address, target.ipv4Address);
|
||||
}
|
||||
if (target.ipv6Address != null) {
|
||||
sameNetwork = sameNetwork && _.isEqual(current.ipv6Address, target.ipv6Address);
|
||||
sameNetwork =
|
||||
sameNetwork && _.isEqual(current.ipv6Address, target.ipv6Address);
|
||||
}
|
||||
if (target.linkLocalIps != null) {
|
||||
sameNetwork = sameNetwork && _.isEqual(current.linkLocalIps, target.linkLocalIps);
|
||||
sameNetwork =
|
||||
sameNetwork && _.isEqual(current.linkLocalIps, target.linkLocalIps);
|
||||
}
|
||||
return sameNetwork;
|
||||
}
|
||||
@ -818,7 +899,7 @@ export class Service {
|
||||
): ServiceConfig['volumes'] {
|
||||
let volumes: ServiceConfig['volumes'] = [];
|
||||
|
||||
_.each(composeVolumes, (volume) => {
|
||||
_.each(composeVolumes, volume => {
|
||||
const isBind = _.includes(volume, ':');
|
||||
if (isBind) {
|
||||
const [bindSource, bindDest, mode] = volume.split(':');
|
||||
@ -850,5 +931,4 @@ export class Service {
|
||||
`${updateLock.lockPath(appId, serviceName)}:/tmp/balena`,
|
||||
];
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -38,7 +38,12 @@ export interface NetworkConfig {
|
||||
driver: string;
|
||||
ipam: {
|
||||
driver: string;
|
||||
config: Array<{ subnet: string, gateway: string, ipRange?: string, auxAddress?: string }>;
|
||||
config: Array<{
|
||||
subnet: string;
|
||||
gateway: string;
|
||||
ipRange?: string;
|
||||
auxAddress?: string;
|
||||
}>;
|
||||
options: { [optName: string]: string };
|
||||
};
|
||||
enableIPv6: boolean;
|
||||
|
@ -47,13 +47,15 @@ export interface ServiceComposeConfig {
|
||||
labels?: { [labelName: string]: string };
|
||||
running: boolean;
|
||||
networkMode?: string;
|
||||
networks?: string[] | {
|
||||
networks?:
|
||||
| string[]
|
||||
| {
|
||||
[networkName: string]: {
|
||||
aliases?: string[];
|
||||
ipv4Address?: string;
|
||||
ipv6Address?: string;
|
||||
linkLocalIps?: string[];
|
||||
}
|
||||
};
|
||||
};
|
||||
pid?: string;
|
||||
pidsLimit?: number;
|
||||
@ -63,7 +65,7 @@ export interface ServiceComposeConfig {
|
||||
stopSignal?: string;
|
||||
sysctls?: { [name: string]: string };
|
||||
ulimits?: {
|
||||
[ulimitName: string]: number | { soft: number, hard: number };
|
||||
[ulimitName: string]: number | { soft: number; hard: number };
|
||||
};
|
||||
usernsMode?: string;
|
||||
volumes?: string[];
|
||||
@ -118,7 +120,7 @@ export interface ServiceConfig {
|
||||
ipv4Address?: string;
|
||||
ipv6Address?: string;
|
||||
linkLocalIps?: string[];
|
||||
}
|
||||
};
|
||||
};
|
||||
pid: string;
|
||||
pidsLimit: number;
|
||||
@ -127,7 +129,7 @@ export interface ServiceConfig {
|
||||
stopSignal: string;
|
||||
sysctls: { [name: string]: string };
|
||||
ulimits: {
|
||||
[ulimitName: string]: { soft: number, hard: number };
|
||||
[ulimitName: string]: { soft: number; hard: number };
|
||||
};
|
||||
usernsMode: string;
|
||||
volumes: string[];
|
||||
@ -152,19 +154,20 @@ export interface ServiceConfig {
|
||||
tty: boolean;
|
||||
}
|
||||
|
||||
export type ServiceConfigArrayField = 'volumes' |
|
||||
'devices' |
|
||||
'capAdd' |
|
||||
'capDrop' |
|
||||
'dns' |
|
||||
'dnsSearch' |
|
||||
'dnsOpt' |
|
||||
'expose' |
|
||||
'tmpfs' |
|
||||
'extraHosts' |
|
||||
'ulimitsArray' |
|
||||
'groupAdd' |
|
||||
'securityOpt';
|
||||
export type ServiceConfigArrayField =
|
||||
| 'volumes'
|
||||
| 'devices'
|
||||
| 'capAdd'
|
||||
| 'capDrop'
|
||||
| 'dns'
|
||||
| 'dnsSearch'
|
||||
| 'dnsOpt'
|
||||
| 'expose'
|
||||
| 'tmpfs'
|
||||
| 'extraHosts'
|
||||
| 'ulimitsArray'
|
||||
| 'groupAdd'
|
||||
| 'securityOpt';
|
||||
|
||||
// The config directly from the application manager, which contains
|
||||
// application information, plus the compose data
|
||||
@ -201,4 +204,3 @@ export interface DockerDevice {
|
||||
PathInContainer: string;
|
||||
CgroupPermissions: string;
|
||||
}
|
||||
|
||||
|
@ -16,7 +16,9 @@ import {
|
||||
ServiceHealthcheck,
|
||||
} from './types/service';
|
||||
|
||||
export function camelCaseConfig(literalConfig: ConfigMap): ServiceComposeConfig {
|
||||
export function camelCaseConfig(
|
||||
literalConfig: ConfigMap,
|
||||
): ServiceComposeConfig {
|
||||
const config = _.mapKeys(literalConfig, (_v, k) => _.camelCase(k));
|
||||
|
||||
// Networks can either be an object or array, but given _.isObject
|
||||
@ -31,7 +33,10 @@ export function camelCaseConfig(literalConfig: ConfigMap): ServiceComposeConfig
|
||||
return config as ServiceComposeConfig;
|
||||
}
|
||||
|
||||
export function parseMemoryNumber(valueAsString: string | null | undefined, defaultValue?: string): number {
|
||||
export function parseMemoryNumber(
|
||||
valueAsString: string | null | undefined,
|
||||
defaultValue?: string,
|
||||
): number {
|
||||
if (valueAsString == null) {
|
||||
if (defaultValue != null) {
|
||||
return parseMemoryNumber(defaultValue);
|
||||
@ -46,7 +51,17 @@ export function parseMemoryNumber(valueAsString: string | null | undefined, defa
|
||||
return 0;
|
||||
}
|
||||
const num = match[1];
|
||||
const pow: { [key: string]: number } = { '': 0, b: 0, B: 0, K: 1, k: 1, m: 2, M: 2, g: 3, G: 3 };
|
||||
const pow: { [key: string]: number } = {
|
||||
'': 0,
|
||||
b: 0,
|
||||
B: 0,
|
||||
K: 1,
|
||||
k: 1,
|
||||
m: 2,
|
||||
M: 2,
|
||||
g: 3,
|
||||
G: 3,
|
||||
};
|
||||
return parseInt(num, 10) * 1024 ** pow[match[2]];
|
||||
}
|
||||
|
||||
@ -57,9 +72,7 @@ export const validRestartPolicies = [
|
||||
'unless-stopped',
|
||||
];
|
||||
|
||||
export function createRestartPolicy(
|
||||
name?: string,
|
||||
): string {
|
||||
export function createRestartPolicy(name?: string): string {
|
||||
if (name == null) {
|
||||
return 'always';
|
||||
}
|
||||
@ -67,7 +80,9 @@ export function createRestartPolicy(
|
||||
// Ensure that name is a string, otherwise the below could
|
||||
// throw
|
||||
if (!_.isString(name)) {
|
||||
console.log(`Warning: Non-string argument for restart field: ${name} - ignoring.`);
|
||||
console.log(
|
||||
`Warning: Non-string argument for restart field: ${name} - ignoring.`,
|
||||
);
|
||||
return 'always';
|
||||
}
|
||||
|
||||
@ -87,7 +102,9 @@ function processCommandString(command: string): string {
|
||||
return command.replace(/(\$)/g, '\\$1');
|
||||
}
|
||||
|
||||
function processCommandParsedArrayElement(arg: string | { [key: string]: string}): string {
|
||||
function processCommandParsedArrayElement(
|
||||
arg: string | { [key: string]: string },
|
||||
): string {
|
||||
if (_.isString(arg)) {
|
||||
return arg;
|
||||
}
|
||||
@ -175,9 +192,7 @@ export function dockerHealthcheckToServiceHealthcheck(
|
||||
return serviceHC;
|
||||
}
|
||||
|
||||
function buildHealthcheckTest(
|
||||
test: string | string[],
|
||||
): string[] {
|
||||
function buildHealthcheckTest(test: string | string[]): string[] {
|
||||
if (_.isString(test)) {
|
||||
return ['CMD-SHELL', test];
|
||||
}
|
||||
@ -191,7 +206,6 @@ function getNanoseconds(timeStr: string): number {
|
||||
export function composeHealthcheckToServiceHealthcheck(
|
||||
healthcheck: ComposeHealthcheck | null | undefined,
|
||||
): ServiceHealthcheck | {} {
|
||||
|
||||
if (healthcheck == null) {
|
||||
return {};
|
||||
}
|
||||
@ -236,7 +250,11 @@ export function getHealthcheck(
|
||||
);
|
||||
|
||||
// Overlay any compose healthcheck fields on the image healthchecks
|
||||
return _.assign({ test: [ 'NONE' ] }, imageServiceHealthcheck, composeServiceHealthcheck);
|
||||
return _.assign(
|
||||
{ test: ['NONE'] },
|
||||
imageServiceHealthcheck,
|
||||
composeServiceHealthcheck,
|
||||
);
|
||||
}
|
||||
|
||||
export function serviceHealthcheckToDockerHealthcheck(
|
||||
@ -255,8 +273,10 @@ export function getWorkingDir(
|
||||
workingDir: string | null | undefined,
|
||||
imageInfo?: Dockerode.ImageInspectInfo,
|
||||
): string {
|
||||
return (workingDir != null ? workingDir : _.get(imageInfo, 'Config.WorkingDir', ''))
|
||||
.replace(/(^.+)\/$/, '$1');
|
||||
return (workingDir != null
|
||||
? workingDir
|
||||
: _.get(imageInfo, 'Config.WorkingDir', '')
|
||||
).replace(/(^.+)\/$/, '$1');
|
||||
}
|
||||
|
||||
export function getUser(
|
||||
@ -266,18 +286,14 @@ export function getUser(
|
||||
return user != null ? user : _.get(imageInfo, 'Config.User', '');
|
||||
}
|
||||
|
||||
export function sanitiseExposeFromCompose(
|
||||
portStr: string,
|
||||
): string {
|
||||
export function sanitiseExposeFromCompose(portStr: string): string {
|
||||
if (/^[0-9]*$/.test(portStr)) {
|
||||
return `${portStr}/tcp`;
|
||||
}
|
||||
return portStr;
|
||||
}
|
||||
|
||||
export function formatDevice(
|
||||
deviceStr: string,
|
||||
): DockerDevice {
|
||||
export function formatDevice(deviceStr: string): DockerDevice {
|
||||
const [pathOnHost, ...parts] = deviceStr.split(':');
|
||||
let [pathInContainer, cgroup] = parts;
|
||||
if (pathInContainer == null) {
|
||||
@ -323,13 +339,19 @@ export function addFeaturesFromLabels(
|
||||
}
|
||||
|
||||
if (checkTruthy(service.config.labels['io.balena.features.balena-socket'])) {
|
||||
service.config.volumes.push(`${constants.dockerSocket}:${constants.dockerSocket}`);
|
||||
service.config.volumes.push(
|
||||
`${constants.dockerSocket}:${constants.dockerSocket}`,
|
||||
);
|
||||
if (service.config.environment['DOCKER_HOST'] == null) {
|
||||
service.config.environment['DOCKER_HOST'] = `unix://${constants.dockerSocket}`;
|
||||
service.config.environment['DOCKER_HOST'] = `unix://${
|
||||
constants.dockerSocket
|
||||
}`;
|
||||
}
|
||||
// We keep balena.sock for backwards compatibility
|
||||
if (constants.dockerSocket != '/var/run/balena.sock') {
|
||||
service.config.volumes.push(`${constants.dockerSocket}:/var/run/balena.sock`);
|
||||
service.config.volumes.push(
|
||||
`${constants.dockerSocket}:/var/run/balena.sock`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -342,10 +364,16 @@ export function addFeaturesFromLabels(
|
||||
setEnvVariables('SUPERVISOR_API_KEY', options.apiSecret);
|
||||
if (service.config.networkMode === 'host') {
|
||||
setEnvVariables('SUPERVISOR_HOST', '127.0.0.1');
|
||||
setEnvVariables('SUPERVISOR_ADDRESS', `http://127.0.0.1:${options.listenPort}`);
|
||||
setEnvVariables(
|
||||
'SUPERVISOR_ADDRESS',
|
||||
`http://127.0.0.1:${options.listenPort}`,
|
||||
);
|
||||
} else {
|
||||
setEnvVariables('SUPERVISOR_HOST', options.supervisorApiHost);
|
||||
setEnvVariables('SUPERVISOR_ADDRESS', `http://${options.supervisorApiHost}:${options.listenPort}`);
|
||||
setEnvVariables(
|
||||
'SUPERVISOR_ADDRESS',
|
||||
`http://${options.supervisorApiHost}:${options.listenPort}`,
|
||||
);
|
||||
service.config.networks[constants.supervisorNetworkInterface] = {};
|
||||
}
|
||||
} else {
|
||||
@ -353,30 +381,30 @@ export function addFeaturesFromLabels(
|
||||
// of networks
|
||||
delete service.config.networks[constants.supervisorNetworkInterface];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export function serviceUlimitsToDockerUlimits(
|
||||
ulimits: ServiceConfig['ulimits'] | null | undefined,
|
||||
): Array<{ Name: string, Soft: number, Hard: number }> {
|
||||
|
||||
const ret: Array<{ Name: string, Soft: number, Hard: number }> = [];
|
||||
): Array<{ Name: string; Soft: number; Hard: number }> {
|
||||
const ret: Array<{ Name: string; Soft: number; Hard: number }> = [];
|
||||
_.each(ulimits, ({ soft, hard }, name) => {
|
||||
ret.push({ Name: name, Soft: soft, Hard: hard });
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
|
||||
export function serviceRestartToDockerRestartPolicy(restart: string): { Name: string, MaximumRetryCount: number } {
|
||||
export function serviceRestartToDockerRestartPolicy(
|
||||
restart: string,
|
||||
): { Name: string; MaximumRetryCount: number } {
|
||||
return {
|
||||
Name: restart,
|
||||
MaximumRetryCount: 0,
|
||||
};
|
||||
}
|
||||
|
||||
export function serviceNetworksToDockerNetworks(networks: ServiceConfig['networks'])
|
||||
: Dockerode.ContainerCreateOptions['NetworkingConfig'] {
|
||||
|
||||
export function serviceNetworksToDockerNetworks(
|
||||
networks: ServiceConfig['networks'],
|
||||
): Dockerode.ContainerCreateOptions['NetworkingConfig'] {
|
||||
const dockerNetworks: Dockerode.ContainerCreateOptions['NetworkingConfig'] = {
|
||||
EndpointsConfig: {},
|
||||
};
|
||||
@ -450,13 +478,23 @@ export function normalizeNullValues(obj: Dictionary<any>): void {
|
||||
});
|
||||
}
|
||||
|
||||
export function normalizeLabels(
|
||||
labels: { [key: string]: string },
|
||||
): { [key: string]: string } {
|
||||
const legacyLabels = _.mapKeys(_.pickBy(labels, (_v, k) => _.startsWith(k, 'io.resin.')), (_v, k) => {
|
||||
export function normalizeLabels(labels: {
|
||||
[key: string]: string;
|
||||
}): { [key: string]: string } {
|
||||
const legacyLabels = _.mapKeys(
|
||||
_.pickBy(labels, (_v, k) => _.startsWith(k, 'io.resin.')),
|
||||
(_v, k) => {
|
||||
return k.replace(/resin/g, 'balena'); // e.g. io.resin.features.resin-api -> io.balena.features.balena-api
|
||||
});
|
||||
const balenaLabels = _.pickBy(labels, (_v, k) => _.startsWith(k, 'io.balena.'));
|
||||
const otherLabels = _.pickBy(labels, (_v, k) => !(_.startsWith(k, 'io.balena.') || _.startsWith(k, 'io.resin.')));
|
||||
return _.assign({}, otherLabels, legacyLabels, balenaLabels) as { [key: string]: string };
|
||||
},
|
||||
);
|
||||
const balenaLabels = _.pickBy(labels, (_v, k) =>
|
||||
_.startsWith(k, 'io.balena.'),
|
||||
);
|
||||
const otherLabels = _.pickBy(
|
||||
labels,
|
||||
(_v, k) => !(_.startsWith(k, 'io.balena.') || _.startsWith(k, 'io.resin.')),
|
||||
);
|
||||
return _.assign({}, otherLabels, legacyLabels, balenaLabels) as {
|
||||
[key: string]: string;
|
||||
};
|
||||
}
|
||||
|
114
src/config.ts
114
src/config.ts
@ -6,7 +6,10 @@ import { generateUniqueKey } from 'resin-register-device';
|
||||
|
||||
import ConfigJsonConfigBackend from './config/configJson';
|
||||
|
||||
import { ConfigProviderFunctions, createProviderFunctions } from './config/functions';
|
||||
import {
|
||||
ConfigProviderFunctions,
|
||||
createProviderFunctions,
|
||||
} from './config/functions';
|
||||
import * as constants from './lib/constants';
|
||||
import { ConfigMap, ConfigSchema, ConfigValue } from './lib/types';
|
||||
|
||||
@ -18,7 +21,6 @@ interface ConfigOpts {
|
||||
}
|
||||
|
||||
class Config extends EventEmitter {
|
||||
|
||||
private db: DB;
|
||||
private configJsonBackend: ConfigJsonConfigBackend;
|
||||
private providerFunctions: ConfigProviderFunctions;
|
||||
@ -37,8 +39,15 @@ class Config extends EventEmitter {
|
||||
deviceId: { source: 'config.json', mutable: true },
|
||||
registered_at: { source: 'config.json', mutable: true },
|
||||
applicationId: { source: 'config.json' },
|
||||
appUpdatePollInterval: { source: 'config.json', mutable: true, default: 60000 },
|
||||
mixpanelToken: { source: 'config.json', default: constants.defaultMixpanelToken },
|
||||
appUpdatePollInterval: {
|
||||
source: 'config.json',
|
||||
mutable: true,
|
||||
default: 60000,
|
||||
},
|
||||
mixpanelToken: {
|
||||
source: 'config.json',
|
||||
default: constants.defaultMixpanelToken,
|
||||
},
|
||||
bootstrapRetryDelay: { source: 'config.json', default: 30000 },
|
||||
supervisorOfflineMode: { source: 'config.json', default: false },
|
||||
hostname: { source: 'config.json', mutable: true },
|
||||
@ -81,13 +90,15 @@ class Config extends EventEmitter {
|
||||
public constructor({ db, configPath }: ConfigOpts) {
|
||||
super();
|
||||
this.db = db;
|
||||
this.configJsonBackend = new ConfigJsonConfigBackend(this.schema, configPath);
|
||||
this.configJsonBackend = new ConfigJsonConfigBackend(
|
||||
this.schema,
|
||||
configPath,
|
||||
);
|
||||
this.providerFunctions = createProviderFunctions(this);
|
||||
}
|
||||
|
||||
public init(): Bluebird<void> {
|
||||
return this.configJsonBackend.init()
|
||||
.then(() => {
|
||||
return this.configJsonBackend.init().then(() => {
|
||||
return this.generateRequiredFields();
|
||||
});
|
||||
}
|
||||
@ -101,15 +112,16 @@ class Config extends EventEmitter {
|
||||
}
|
||||
switch (this.schema[key].source) {
|
||||
case 'func':
|
||||
return this.providerFunctions[key].get()
|
||||
.catch((e) => {
|
||||
return this.providerFunctions[key].get().catch(e => {
|
||||
console.error(`Error getting config value for ${key}`, e, e.stack);
|
||||
return null;
|
||||
});
|
||||
case 'config.json':
|
||||
return this.configJsonBackend.get(key);
|
||||
case 'db':
|
||||
return db('config').select('value').where({ key })
|
||||
return db('config')
|
||||
.select('value')
|
||||
.where({ key })
|
||||
.then(([conf]: [{ value: string }]) => {
|
||||
if (conf != null) {
|
||||
return conf.value;
|
||||
@ -117,8 +129,7 @@ class Config extends EventEmitter {
|
||||
return;
|
||||
});
|
||||
}
|
||||
})
|
||||
.then((value) => {
|
||||
}).then(value => {
|
||||
const schemaEntry = this.schema[key];
|
||||
if (value == null && schemaEntry != null && schemaEntry.default != null) {
|
||||
return schemaEntry.default;
|
||||
@ -128,20 +139,28 @@ class Config extends EventEmitter {
|
||||
}
|
||||
|
||||
public getMany(keys: string[], trx?: Transaction): Bluebird<ConfigMap> {
|
||||
return Bluebird.map(keys, (key: string) => this.get(key, trx))
|
||||
.then((values) => {
|
||||
return Bluebird.map(keys, (key: string) => this.get(key, trx)).then(
|
||||
values => {
|
||||
return _.zipObject(keys, values);
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
public set(keyValues: ConfigMap, trx?: Transaction): Bluebird<void> {
|
||||
return Bluebird.try(() => {
|
||||
|
||||
// Split the values based on which storage backend they use
|
||||
type SplitConfigBackend = { configJsonVals: ConfigMap, dbVals: ConfigMap, fnVals: ConfigMap };
|
||||
const { configJsonVals, dbVals, fnVals }: SplitConfigBackend = _.reduce(keyValues, (acc: SplitConfigBackend, val, key) => {
|
||||
type SplitConfigBackend = {
|
||||
configJsonVals: ConfigMap;
|
||||
dbVals: ConfigMap;
|
||||
fnVals: ConfigMap;
|
||||
};
|
||||
const { configJsonVals, dbVals, fnVals }: SplitConfigBackend = _.reduce(
|
||||
keyValues,
|
||||
(acc: SplitConfigBackend, val, key) => {
|
||||
if (this.schema[key] == null || !this.schema[key].mutable) {
|
||||
throw new Error(`Config field ${key} not found or is immutable in config.set`);
|
||||
throw new Error(
|
||||
`Config field ${key} not found or is immutable in config.set`,
|
||||
);
|
||||
}
|
||||
if (this.schema[key].source === 'config.json') {
|
||||
acc.configJsonVals[key] = val;
|
||||
@ -150,20 +169,31 @@ class Config extends EventEmitter {
|
||||
} else if (this.schema[key].source === 'func') {
|
||||
acc.fnVals[key] = val;
|
||||
} else {
|
||||
throw new Error(`Unknown config backend for key: ${key}, backend: ${this.schema[key].source}`);
|
||||
throw new Error(
|
||||
`Unknown config backend for key: ${key}, backend: ${
|
||||
this.schema[key].source
|
||||
}`,
|
||||
);
|
||||
}
|
||||
return acc;
|
||||
}, { configJsonVals: { }, dbVals: { }, fnVals: { } });
|
||||
},
|
||||
{ configJsonVals: {}, dbVals: {}, fnVals: {} },
|
||||
);
|
||||
|
||||
// Set these values, taking into account the knex transaction
|
||||
const setValuesInTransaction = (tx: Transaction): Bluebird<void> => {
|
||||
const dbKeys = _.keys(dbVals);
|
||||
return this.getMany(dbKeys, tx)
|
||||
.then((oldValues) => {
|
||||
.then(oldValues => {
|
||||
return Bluebird.map(dbKeys, (key: string) => {
|
||||
const value = dbVals[key];
|
||||
if (oldValues[key] !== value) {
|
||||
return this.db.upsertModel('config', { key, value }, { key }, tx);
|
||||
return this.db.upsertModel(
|
||||
'config',
|
||||
{ key, value },
|
||||
{ key },
|
||||
tx,
|
||||
);
|
||||
}
|
||||
});
|
||||
})
|
||||
@ -171,7 +201,9 @@ class Config extends EventEmitter {
|
||||
return Bluebird.map(_.toPairs(fnVals), ([key, value]) => {
|
||||
const fn = this.providerFunctions[key];
|
||||
if (fn.set == null) {
|
||||
throw new Error(`Attempting to set provider function without set() method implemented - key: ${key}`);
|
||||
throw new Error(
|
||||
`Attempting to set provider function without set() method implemented - key: ${key}`,
|
||||
);
|
||||
}
|
||||
return fn.set(value, tx);
|
||||
});
|
||||
@ -186,11 +218,12 @@ class Config extends EventEmitter {
|
||||
if (trx != null) {
|
||||
return setValuesInTransaction(trx).return();
|
||||
} else {
|
||||
return this.db.transaction((tx) => {
|
||||
return this.db
|
||||
.transaction(tx => {
|
||||
return setValuesInTransaction(tx);
|
||||
}).return();
|
||||
})
|
||||
.return();
|
||||
}
|
||||
|
||||
})
|
||||
.then(() => {
|
||||
return setImmediate(() => {
|
||||
@ -203,23 +236,34 @@ class Config extends EventEmitter {
|
||||
public remove(key: string): Bluebird<void> {
|
||||
return Bluebird.try(() => {
|
||||
if (this.schema[key] == null || !this.schema[key].mutable) {
|
||||
throw new Error(`Attempt to delete non-existent or immutable key ${key}`);
|
||||
throw new Error(
|
||||
`Attempt to delete non-existent or immutable key ${key}`,
|
||||
);
|
||||
}
|
||||
if (this.schema[key].source === 'config.json') {
|
||||
return this.configJsonBackend.remove(key);
|
||||
} else if (this.schema[key].source === 'db') {
|
||||
return this.db.models('config').del().where({ key });
|
||||
return this.db
|
||||
.models('config')
|
||||
.del()
|
||||
.where({ key });
|
||||
} else if (this.schema[key].source === 'func') {
|
||||
const mutFn = this.providerFunctions[key];
|
||||
if (mutFn == null) {
|
||||
throw new Error(`Could not find provider function for config ${key}!`);
|
||||
throw new Error(
|
||||
`Could not find provider function for config ${key}!`,
|
||||
);
|
||||
}
|
||||
if (mutFn.remove == null) {
|
||||
throw new Error(`Could not find removal provider function for config ${key}`);
|
||||
throw new Error(
|
||||
`Could not find removal provider function for config ${key}`,
|
||||
);
|
||||
}
|
||||
return mutFn.remove();
|
||||
} else {
|
||||
throw new Error(`Unknown or unsupported config backend: ${this.schema[key].source}`);
|
||||
throw new Error(
|
||||
`Unknown or unsupported config backend: ${this.schema[key].source}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -241,15 +285,13 @@ class Config extends EventEmitter {
|
||||
'deviceApiKey',
|
||||
'apiSecret',
|
||||
'offlineMode',
|
||||
])
|
||||
.then(({ uuid, deviceApiKey, apiSecret, offlineMode }) => {
|
||||
]).then(({ uuid, deviceApiKey, apiSecret, offlineMode }) => {
|
||||
// These fields need to be set regardless
|
||||
if (uuid == null || apiSecret == null) {
|
||||
uuid = uuid || this.newUniqueKey();
|
||||
apiSecret = apiSecret || this.newUniqueKey();
|
||||
}
|
||||
return this.set({ uuid, apiSecret })
|
||||
.then(() => {
|
||||
return this.set({ uuid, apiSecret }).then(() => {
|
||||
if (offlineMode) {
|
||||
return;
|
||||
}
|
||||
|
@ -26,7 +26,13 @@ const bootMountPoint = `${constants.rootMountPoint}${constants.bootMountPoint}`;
|
||||
function remountAndWriteAtomic(file: string, data: string): Promise<void> {
|
||||
// TODO: Find out why the below Promise.resolve() is required
|
||||
// Here's the dangerous part:
|
||||
return Promise.resolve(childProcess.execAsync(`mount -t vfat -o remount,rw ${constants.bootBlockDevice} ${bootMountPoint}`))
|
||||
return Promise.resolve(
|
||||
childProcess.execAsync(
|
||||
`mount -t vfat -o remount,rw ${
|
||||
constants.bootBlockDevice
|
||||
} ${bootMountPoint}`,
|
||||
),
|
||||
)
|
||||
.then(() => {
|
||||
return fsUtils.writeFileAtomic(file, data);
|
||||
})
|
||||
@ -34,7 +40,6 @@ function remountAndWriteAtomic(file: string, data: string): Promise<void> {
|
||||
}
|
||||
|
||||
export abstract class DeviceConfigBackend {
|
||||
|
||||
// Does this config backend support the given device type?
|
||||
public abstract matches(deviceType: string): boolean;
|
||||
|
||||
@ -59,17 +64,24 @@ export abstract class DeviceConfigBackend {
|
||||
|
||||
// Process the value if the environment variable, ready to be written to
|
||||
// the backend
|
||||
public abstract processConfigVarValue(key: string, value: string): string | string[];
|
||||
public abstract processConfigVarValue(
|
||||
key: string,
|
||||
value: string,
|
||||
): string | string[];
|
||||
|
||||
// Return the env var name for this config option
|
||||
public abstract createConfigVarName(configName: string): string;
|
||||
}
|
||||
|
||||
export class RPiConfigBackend extends DeviceConfigBackend {
|
||||
private static bootConfigVarPrefix = `${constants.hostConfigVarPrefix}CONFIG_`;
|
||||
private static bootConfigVarPrefix = `${
|
||||
constants.hostConfigVarPrefix
|
||||
}CONFIG_`;
|
||||
private static bootConfigPath = `${bootMountPoint}/config.txt`;
|
||||
|
||||
public static bootConfigVarRegex = new RegExp('(' + _.escapeRegExp(RPiConfigBackend.bootConfigVarPrefix) + ')(.+)');
|
||||
public static bootConfigVarRegex = new RegExp(
|
||||
'(' + _.escapeRegExp(RPiConfigBackend.bootConfigVarPrefix) + ')(.+)',
|
||||
);
|
||||
|
||||
private static arrayConfigKeys = [
|
||||
'dtparam',
|
||||
@ -97,9 +109,9 @@ export class RPiConfigBackend extends DeviceConfigBackend {
|
||||
}
|
||||
|
||||
public getBootConfig(): Promise<ConfigOptions> {
|
||||
return Promise.resolve(fs.readFile(RPiConfigBackend.bootConfigPath, 'utf-8'))
|
||||
.then((confStr) => {
|
||||
|
||||
return Promise.resolve(
|
||||
fs.readFile(RPiConfigBackend.bootConfigPath, 'utf-8'),
|
||||
).then(confStr => {
|
||||
const conf: ConfigOptions = {};
|
||||
const configStatements = confStr.split(/\r?\n/);
|
||||
|
||||
@ -129,9 +141,10 @@ export class RPiConfigBackend extends DeviceConfigBackend {
|
||||
const [, key, value] = keyValue;
|
||||
conf[key] = value;
|
||||
} else {
|
||||
console.log(`Warning - Could not parse config.txt entry: ${configStr}. Ignoring.`);
|
||||
console.log(
|
||||
`Warning - Could not parse config.txt entry: ${configStr}. Ignoring.`,
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return conf;
|
||||
@ -145,7 +158,9 @@ export class RPiConfigBackend extends DeviceConfigBackend {
|
||||
if (key === 'initramfs') {
|
||||
confStatements.push(`${key} ${value}`);
|
||||
} else if (_.isArray(value)) {
|
||||
confStatements = confStatements.concat(_.map(value, (entry) => `${key}=${entry}`));
|
||||
confStatements = confStatements.concat(
|
||||
_.map(value, entry => `${key}=${entry}`),
|
||||
);
|
||||
} else {
|
||||
confStatements.push(`${key}=${value}`);
|
||||
}
|
||||
@ -185,22 +200,25 @@ export class RPiConfigBackend extends DeviceConfigBackend {
|
||||
}
|
||||
|
||||
export class ExtlinuxConfigBackend extends DeviceConfigBackend {
|
||||
private static bootConfigVarPrefix = `${constants.hostConfigVarPrefix}EXTLINUX_`;
|
||||
private static bootConfigVarPrefix = `${
|
||||
constants.hostConfigVarPrefix
|
||||
}EXTLINUX_`;
|
||||
private static bootConfigPath = `${bootMountPoint}/extlinux/extlinux.conf`;
|
||||
|
||||
public static bootConfigVarRegex = new RegExp('(' + _.escapeRegExp(ExtlinuxConfigBackend.bootConfigVarPrefix) + ')(.+)');
|
||||
public static bootConfigVarRegex = new RegExp(
|
||||
'(' + _.escapeRegExp(ExtlinuxConfigBackend.bootConfigVarPrefix) + ')(.+)',
|
||||
);
|
||||
|
||||
private static suppportedConfigKeys = [
|
||||
'isolcpus',
|
||||
];
|
||||
private static suppportedConfigKeys = ['isolcpus'];
|
||||
|
||||
public matches(deviceType: string): boolean {
|
||||
return _.startsWith(deviceType, 'jetson-tx');
|
||||
}
|
||||
|
||||
public getBootConfig(): Promise<ConfigOptions> {
|
||||
return Promise.resolve(fs.readFile(ExtlinuxConfigBackend.bootConfigPath, 'utf-8'))
|
||||
.then((confStr) => {
|
||||
return Promise.resolve(
|
||||
fs.readFile(ExtlinuxConfigBackend.bootConfigPath, 'utf-8'),
|
||||
).then(confStr => {
|
||||
const parsedBootFile = ExtlinuxConfigBackend.parseExtlinuxFile(confStr);
|
||||
|
||||
// First find the default label name
|
||||
@ -218,14 +236,18 @@ export class ExtlinuxConfigBackend extends DeviceConfigBackend {
|
||||
const labelEntry = parsedBootFile.labels[defaultLabel];
|
||||
|
||||
if (labelEntry == null) {
|
||||
throw new Error(`Cannot find default label entry (label: ${defaultLabel}) for extlinux.conf file`);
|
||||
throw new Error(
|
||||
`Cannot find default label entry (label: ${defaultLabel}) for extlinux.conf file`,
|
||||
);
|
||||
}
|
||||
|
||||
// All configuration options come from the `APPEND` directive in the default label entry
|
||||
const appendEntry = labelEntry.APPEND;
|
||||
|
||||
if (appendEntry == null) {
|
||||
throw new Error('Could not find APPEND directive in default extlinux.conf boot entry');
|
||||
throw new Error(
|
||||
'Could not find APPEND directive in default extlinux.conf boot entry',
|
||||
);
|
||||
}
|
||||
|
||||
const conf: ConfigOptions = {};
|
||||
@ -234,7 +256,9 @@ export class ExtlinuxConfigBackend extends DeviceConfigBackend {
|
||||
const parts = value.split('=');
|
||||
if (this.isSupportedConfig(parts[0])) {
|
||||
if (parts.length !== 2) {
|
||||
throw new Error(`Could not parse extlinux configuration entry: ${values} [value with error: ${value}]`);
|
||||
throw new Error(
|
||||
`Could not parse extlinux configuration entry: ${values} [value with error: ${value}]`,
|
||||
);
|
||||
}
|
||||
conf[parts[0]] = parts[1];
|
||||
}
|
||||
@ -246,23 +270,32 @@ export class ExtlinuxConfigBackend extends DeviceConfigBackend {
|
||||
|
||||
public setBootConfig(opts: ConfigOptions): Promise<void> {
|
||||
// First get a representation of the configuration file, with all balena-supported configuration removed
|
||||
return Promise.resolve(fs.readFile(ExtlinuxConfigBackend.bootConfigPath))
|
||||
.then((data) => {
|
||||
const extlinuxFile = ExtlinuxConfigBackend.parseExtlinuxFile(data.toString());
|
||||
return Promise.resolve(
|
||||
fs.readFile(ExtlinuxConfigBackend.bootConfigPath),
|
||||
).then(data => {
|
||||
const extlinuxFile = ExtlinuxConfigBackend.parseExtlinuxFile(
|
||||
data.toString(),
|
||||
);
|
||||
const defaultLabel = extlinuxFile.globals.DEFAULT;
|
||||
if (defaultLabel == null) {
|
||||
throw new Error('Could not find DEFAULT directive entry in extlinux.conf');
|
||||
throw new Error(
|
||||
'Could not find DEFAULT directive entry in extlinux.conf',
|
||||
);
|
||||
}
|
||||
const defaultEntry = extlinuxFile.labels[defaultLabel];
|
||||
if (defaultEntry == null) {
|
||||
throw new Error(`Could not find default extlinux.conf entry: ${defaultLabel}`);
|
||||
throw new Error(
|
||||
`Could not find default extlinux.conf entry: ${defaultLabel}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (defaultEntry.APPEND == null) {
|
||||
throw new Error(`extlinux.conf APPEND directive not found for default entry: ${defaultLabel}, not sure how to proceed!`);
|
||||
throw new Error(
|
||||
`extlinux.conf APPEND directive not found for default entry: ${defaultLabel}, not sure how to proceed!`,
|
||||
);
|
||||
}
|
||||
|
||||
const appendLine = _.filter(defaultEntry.APPEND.split(' '), (entry) => {
|
||||
const appendLine = _.filter(defaultEntry.APPEND.split(' '), entry => {
|
||||
const lhs = entry.split('=');
|
||||
return !this.isSupportedConfig(lhs[0]);
|
||||
});
|
||||
@ -274,9 +307,14 @@ export class ExtlinuxConfigBackend extends DeviceConfigBackend {
|
||||
});
|
||||
|
||||
defaultEntry.APPEND = appendLine.join(' ');
|
||||
const extlinuxString = ExtlinuxConfigBackend.extlinuxFileToString(extlinuxFile);
|
||||
const extlinuxString = ExtlinuxConfigBackend.extlinuxFileToString(
|
||||
extlinuxFile,
|
||||
);
|
||||
|
||||
return remountAndWriteAtomic(ExtlinuxConfigBackend.bootConfigPath, extlinuxString);
|
||||
return remountAndWriteAtomic(
|
||||
ExtlinuxConfigBackend.bootConfigPath,
|
||||
extlinuxString,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@ -301,7 +339,6 @@ export class ExtlinuxConfigBackend extends DeviceConfigBackend {
|
||||
}
|
||||
|
||||
private static parseExtlinuxFile(confStr: string): ExtlinuxFile {
|
||||
|
||||
const file: ExtlinuxFile = {
|
||||
globals: {},
|
||||
labels: {},
|
||||
@ -309,7 +346,7 @@ export class ExtlinuxConfigBackend extends DeviceConfigBackend {
|
||||
|
||||
// Firstly split by line and filter any comments and empty lines
|
||||
let lines = confStr.split(/\r?\n/);
|
||||
lines = _.filter(lines, (l) => {
|
||||
lines = _.filter(lines, l => {
|
||||
const trimmed = _.trimStart(l);
|
||||
return trimmed !== '' && !_.startsWith(trimmed, '#');
|
||||
});
|
||||
@ -344,7 +381,6 @@ export class ExtlinuxConfigBackend extends DeviceConfigBackend {
|
||||
lastLabel = value;
|
||||
file.labels[lastLabel] = {};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return file;
|
||||
@ -363,5 +399,4 @@ export class ExtlinuxConfigBackend extends DeviceConfigBackend {
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -13,7 +13,6 @@ import * as osRelease from '../lib/os-release';
|
||||
type LockCallback = (file: string) => Promise<() => void>;
|
||||
|
||||
export default class ConfigJsonConfigBackend {
|
||||
|
||||
private lock: Lock;
|
||||
private readLockConfigJson: () => Promise.Disposer<() => void>;
|
||||
private writeLockConfigJson: () => Promise.Disposer<() => void>;
|
||||
@ -24,20 +23,22 @@ export default class ConfigJsonConfigBackend {
|
||||
private schema: ConfigSchema;
|
||||
|
||||
public constructor(schema: ConfigSchema, configPath?: string) {
|
||||
|
||||
this.configPath = configPath;
|
||||
this.schema = schema;
|
||||
this.lock = new Lock();
|
||||
|
||||
const writeLock: LockCallback = Promise.promisify(this.lock.async.writeLock);
|
||||
const writeLock: LockCallback = Promise.promisify(
|
||||
this.lock.async.writeLock,
|
||||
);
|
||||
const readLock: LockCallback = Promise.promisify(this.lock.async.readLock);
|
||||
this.writeLockConfigJson = () => writeLock('config.json').disposer((release) => release());
|
||||
this.readLockConfigJson = () => readLock('config.json').disposer((release) => release());
|
||||
this.writeLockConfigJson = () =>
|
||||
writeLock('config.json').disposer(release => release());
|
||||
this.readLockConfigJson = () =>
|
||||
readLock('config.json').disposer(release => release());
|
||||
}
|
||||
|
||||
public init(): Promise<void> {
|
||||
return this.read()
|
||||
.then((configJson) => {
|
||||
return this.read().then(configJson => {
|
||||
_.assign(this.cache, configJson);
|
||||
});
|
||||
}
|
||||
@ -45,22 +46,23 @@ export default class ConfigJsonConfigBackend {
|
||||
public set(keyVals: { [key: string]: ConfigValue }): Promise<void> {
|
||||
let changed = false;
|
||||
return Promise.using(this.writeLockConfigJson(), () => {
|
||||
|
||||
return Promise.mapSeries(_.keys(keyVals), (key: string) => {
|
||||
|
||||
const value = keyVals[key];
|
||||
|
||||
if (this.cache[key] !== value) {
|
||||
this.cache[key] = value;
|
||||
|
||||
if (value == null && this.schema[key] != null && this.schema[key].removeIfNull) {
|
||||
if (
|
||||
value == null &&
|
||||
this.schema[key] != null &&
|
||||
this.schema[key].removeIfNull
|
||||
) {
|
||||
delete this.cache[key];
|
||||
}
|
||||
|
||||
changed = true;
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
}).then(() => {
|
||||
if (changed) {
|
||||
return this.write();
|
||||
}
|
||||
@ -91,8 +93,7 @@ export default class ConfigJsonConfigBackend {
|
||||
}
|
||||
|
||||
public path(): Promise<string> {
|
||||
return this.pathOnHost()
|
||||
.catch((err) => {
|
||||
return this.pathOnHost().catch(err => {
|
||||
console.error(err.message);
|
||||
return constants.configJsonNonAtomicPath;
|
||||
});
|
||||
@ -101,12 +102,12 @@ export default class ConfigJsonConfigBackend {
|
||||
private write(): Promise<void> {
|
||||
let atomicWritePossible = true;
|
||||
return this.pathOnHost()
|
||||
.catch((err) => {
|
||||
.catch(err => {
|
||||
console.error(err.message);
|
||||
atomicWritePossible = false;
|
||||
return constants.configJsonNonAtomicPath;
|
||||
})
|
||||
.then((configPath) => {
|
||||
.then(configPath => {
|
||||
if (atomicWritePossible) {
|
||||
return writeFileAtomic(configPath, JSON.stringify(this.cache));
|
||||
} else {
|
||||
@ -117,7 +118,7 @@ export default class ConfigJsonConfigBackend {
|
||||
|
||||
private read(): Promise<string> {
|
||||
return this.path()
|
||||
.then((filename) => {
|
||||
.then(filename => {
|
||||
return fs.readFile(filename, 'utf-8');
|
||||
})
|
||||
.then(JSON.parse);
|
||||
@ -130,9 +131,9 @@ export default class ConfigJsonConfigBackend {
|
||||
if (constants.configJsonPathOnHost != null) {
|
||||
return constants.configJsonPathOnHost;
|
||||
}
|
||||
return osRelease.getOSVersion(constants.hostOSVersionPath)
|
||||
.then((osVersion) => {
|
||||
|
||||
return osRelease
|
||||
.getOSVersion(constants.hostOSVersionPath)
|
||||
.then(osVersion => {
|
||||
if (osVersion == null) {
|
||||
throw new Error('Failed to detect OS version!');
|
||||
}
|
||||
@ -147,13 +148,13 @@ export default class ConfigJsonConfigBackend {
|
||||
// In non-resinOS hosts (or older than 1.0.0), if CONFIG_JSON_PATH wasn't passed
|
||||
// then we can't do atomic changes (only access to config.json we have is in /boot,
|
||||
// which is assumed to be a file bind mount where rename is impossible)
|
||||
throw new Error('Could not determine config.json path on host, atomic write will not be possible');
|
||||
throw new Error(
|
||||
'Could not determine config.json path on host, atomic write will not be possible',
|
||||
);
|
||||
}
|
||||
});
|
||||
})
|
||||
.then((file) => {
|
||||
}).then(file => {
|
||||
return path.join(constants.rootMountPoint, file);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -11,7 +11,10 @@ import { ConfigValue } from '../lib/types';
|
||||
|
||||
// A provider for schema entries with source 'func'
|
||||
type ConfigProviderFunctionGetter = () => Bluebird<any>;
|
||||
type ConfigProviderFunctionSetter = (value: ConfigValue, tx?: Transaction) => Bluebird<void>;
|
||||
type ConfigProviderFunctionSetter = (
|
||||
value: ConfigValue,
|
||||
tx?: Transaction,
|
||||
) => Bluebird<void>;
|
||||
type ConfigProviderFunctionRemover = () => Bluebird<void>;
|
||||
|
||||
interface ConfigProviderFunction {
|
||||
@ -24,7 +27,9 @@ export interface ConfigProviderFunctions {
|
||||
[key: string]: ConfigProviderFunction;
|
||||
}
|
||||
|
||||
export function createProviderFunctions(config: Config): ConfigProviderFunctions {
|
||||
export function createProviderFunctions(
|
||||
config: Config,
|
||||
): ConfigProviderFunctions {
|
||||
return {
|
||||
version: {
|
||||
get: () => {
|
||||
@ -33,7 +38,8 @@ export function createProviderFunctions(config: Config): ConfigProviderFunctions
|
||||
},
|
||||
currentApiKey: {
|
||||
get: () => {
|
||||
return config.getMany([ 'apiKey', 'deviceApiKey' ])
|
||||
return config
|
||||
.getMany(['apiKey', 'deviceApiKey'])
|
||||
.then(({ apiKey, deviceApiKey }) => {
|
||||
return apiKey || deviceApiKey;
|
||||
});
|
||||
@ -41,7 +47,8 @@ export function createProviderFunctions(config: Config): ConfigProviderFunctions
|
||||
},
|
||||
offlineMode: {
|
||||
get: () => {
|
||||
return config.getMany([ 'apiEndpoint', 'supervisorOfflineMode' ])
|
||||
return config
|
||||
.getMany(['apiEndpoint', 'supervisorOfflineMode'])
|
||||
.then(({ apiEndpoint, supervisorOfflineMode }) => {
|
||||
return Boolean(supervisorOfflineMode) || !Boolean(apiEndpoint);
|
||||
});
|
||||
@ -49,13 +56,9 @@ export function createProviderFunctions(config: Config): ConfigProviderFunctions
|
||||
},
|
||||
provisioned: {
|
||||
get: () => {
|
||||
return config.getMany([
|
||||
'uuid',
|
||||
'apiEndpoint',
|
||||
'registered_at',
|
||||
'deviceId',
|
||||
])
|
||||
.then((requiredValues) => {
|
||||
return config
|
||||
.getMany(['uuid', 'apiEndpoint', 'registered_at', 'deviceId'])
|
||||
.then(requiredValues => {
|
||||
return _.every(_.values(requiredValues), Boolean);
|
||||
});
|
||||
},
|
||||
@ -72,7 +75,8 @@ export function createProviderFunctions(config: Config): ConfigProviderFunctions
|
||||
},
|
||||
provisioningOptions: {
|
||||
get: () => {
|
||||
return config.getMany([
|
||||
return config
|
||||
.getMany([
|
||||
'uuid',
|
||||
'userId',
|
||||
'applicationId',
|
||||
@ -83,7 +87,8 @@ export function createProviderFunctions(config: Config): ConfigProviderFunctions
|
||||
'apiTimeout',
|
||||
'registered_at',
|
||||
'deviceId',
|
||||
]).then((conf) => {
|
||||
])
|
||||
.then(conf => {
|
||||
return {
|
||||
uuid: conf.uuid,
|
||||
applicationId: conf.applicationId,
|
||||
@ -101,8 +106,7 @@ export function createProviderFunctions(config: Config): ConfigProviderFunctions
|
||||
},
|
||||
mixpanelHost: {
|
||||
get: () => {
|
||||
return config.get('apiEndpoint')
|
||||
.then((apiEndpoint) => {
|
||||
return config.get('apiEndpoint').then(apiEndpoint => {
|
||||
return `${apiEndpoint}/mixpanel`;
|
||||
});
|
||||
},
|
||||
|
@ -8,25 +8,22 @@ import {
|
||||
RPiConfigBackend,
|
||||
} from './backend';
|
||||
|
||||
|
||||
const configBackends = [
|
||||
new ExtlinuxConfigBackend(),
|
||||
new RPiConfigBackend(),
|
||||
];
|
||||
const configBackends = [new ExtlinuxConfigBackend(), new RPiConfigBackend()];
|
||||
|
||||
export function isConfigDeviceType(deviceType: string): boolean {
|
||||
return getConfigBackend(deviceType) != null;
|
||||
}
|
||||
|
||||
export function getConfigBackend(deviceType: string): DeviceConfigBackend | undefined {
|
||||
return _.find(configBackends, (backend) => backend.matches(deviceType));
|
||||
export function getConfigBackend(
|
||||
deviceType: string,
|
||||
): DeviceConfigBackend | undefined {
|
||||
return _.find(configBackends, backend => backend.matches(deviceType));
|
||||
}
|
||||
|
||||
export function envToBootConfig(
|
||||
configBackend: DeviceConfigBackend | null,
|
||||
env: EnvVarObject,
|
||||
): ConfigOptions {
|
||||
|
||||
if (configBackend == null) {
|
||||
return {};
|
||||
}
|
||||
@ -34,7 +31,9 @@ export function envToBootConfig(
|
||||
return _(env)
|
||||
.pickBy((_val, key) => configBackend.isBootConfigVar(key))
|
||||
.mapKeys((_val, key) => configBackend.processConfigVarName(key))
|
||||
.mapValues((val, key) => configBackend.processConfigVarValue(key, val || ''))
|
||||
.mapValues((val, key) =>
|
||||
configBackend.processConfigVarValue(key, val || ''),
|
||||
)
|
||||
.value();
|
||||
}
|
||||
|
||||
@ -42,10 +41,9 @@ export function bootConfigToEnv(
|
||||
configBackend: DeviceConfigBackend,
|
||||
config: ConfigOptions,
|
||||
): EnvVarObject {
|
||||
|
||||
return _(config)
|
||||
.mapKeys((_val, key) => configBackend.createConfigVarName(key))
|
||||
.mapValues((val) => {
|
||||
.mapValues(val => {
|
||||
if (_.isArray(val)) {
|
||||
return JSON.stringify(val).replace(/^\[(.*)\]$/, '$1');
|
||||
}
|
||||
@ -58,11 +56,14 @@ function filterNamespaceFromConfig(
|
||||
namespace: RegExp,
|
||||
conf: { [key: string]: any },
|
||||
): { [key: string]: any } {
|
||||
return _.mapKeys(_.pickBy(conf, (_v, k) => {
|
||||
return _.mapKeys(
|
||||
_.pickBy(conf, (_v, k) => {
|
||||
return namespace.test(k);
|
||||
}), (_v,k) => {
|
||||
}),
|
||||
(_v, k) => {
|
||||
return k.replace(namespace, '$1');
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
export function formatConfigKeys(
|
||||
@ -70,18 +71,27 @@ export function formatConfigKeys(
|
||||
allowedKeys: string[],
|
||||
conf: { [key: string]: any },
|
||||
): { [key: string]: any } {
|
||||
|
||||
const isConfigType = configBackend != null;
|
||||
const namespaceRegex = /^BALENA_(.*)/;
|
||||
const legacyNamespaceRegex = /^RESIN_(.*)/;
|
||||
const confFromNamespace = filterNamespaceFromConfig(namespaceRegex, conf);
|
||||
const confFromLegacyNamespace = filterNamespaceFromConfig(legacyNamespaceRegex, conf);
|
||||
const confFromLegacyNamespace = filterNamespaceFromConfig(
|
||||
legacyNamespaceRegex,
|
||||
conf,
|
||||
);
|
||||
const noNamespaceConf = _.pickBy(conf, (_v, k) => {
|
||||
return !_.startsWith(k, 'RESIN_') && !_.startsWith(k, 'BALENA_');
|
||||
});
|
||||
const confWithoutNamespace = _.defaults(confFromNamespace, confFromLegacyNamespace, noNamespaceConf);
|
||||
const confWithoutNamespace = _.defaults(
|
||||
confFromNamespace,
|
||||
confFromLegacyNamespace,
|
||||
noNamespaceConf,
|
||||
);
|
||||
|
||||
return _.pickBy(confWithoutNamespace, (_v, k) => {
|
||||
return _.includes(allowedKeys, k) || (isConfigType && configBackend!.isBootConfigVar(k));
|
||||
return (
|
||||
_.includes(allowedKeys, k) ||
|
||||
(isConfigType && configBackend!.isBootConfigVar(k))
|
||||
);
|
||||
});
|
||||
}
|
||||
|
14
src/db.ts
14
src/db.ts
@ -23,12 +23,14 @@ class DB {
|
||||
},
|
||||
useNullAsDefault: true,
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
public init(): Bluebird<void> {
|
||||
return this.knex('knex_migrations_lock').update({ is_locked: 0})
|
||||
.catch(() => { return; })
|
||||
return this.knex('knex_migrations_lock')
|
||||
.update({ is_locked: 0 })
|
||||
.catch(() => {
|
||||
return;
|
||||
})
|
||||
.then(() => {
|
||||
return this.knex.migrate.latest({
|
||||
directory: path.join(__dirname, 'migrations'),
|
||||
@ -46,10 +48,11 @@ class DB {
|
||||
id: number | { [key: string]: string },
|
||||
trx?: Knex.Transaction,
|
||||
): Bluebird<any> {
|
||||
|
||||
const knex = trx || this.knex;
|
||||
|
||||
return knex(modelName).update(obj).where(id)
|
||||
return knex(modelName)
|
||||
.update(obj)
|
||||
.where(id)
|
||||
.then((n: number) => {
|
||||
if (n === 0) {
|
||||
return knex(modelName).insert(obj);
|
||||
@ -60,7 +63,6 @@ class DB {
|
||||
public transaction(cb: DBTransactionCallback): Bluebird<Knex.Transaction> {
|
||||
return this.knex.transaction(cb);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export = DB;
|
||||
|
12
src/device-api/common.d.ts
vendored
12
src/device-api/common.d.ts
vendored
@ -9,9 +9,17 @@ export interface ServiceAction {
|
||||
options: any;
|
||||
}
|
||||
|
||||
declare function doRestart(applications: ApplicationManager, appId: number, force: boolean): Promise<void>;
|
||||
declare function doRestart(
|
||||
applications: ApplicationManager,
|
||||
appId: number,
|
||||
force: boolean,
|
||||
): Promise<void>;
|
||||
|
||||
declare function doPurge(applications: ApplicationManager, appId: number, force: boolean): Promise<void>;
|
||||
declare function doPurge(
|
||||
applications: ApplicationManager,
|
||||
appId: number,
|
||||
force: boolean,
|
||||
): Promise<void>;
|
||||
|
||||
declare function serviceAction(
|
||||
action: string,
|
||||
|
@ -12,7 +12,6 @@ import { doPurge, doRestart, serviceAction } from './common';
|
||||
import supervisorVersion = require('../lib/supervisor-version');
|
||||
|
||||
export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
|
||||
const { _lockingIfNecessary, deviceState } = applications;
|
||||
|
||||
const messageFromError = (err?: Error | string | null): string => {
|
||||
@ -36,8 +35,9 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
const { appId } = req.params;
|
||||
|
||||
return _lockingIfNecessary(appId, { force }, () => {
|
||||
return applications.getCurrentApp(appId)
|
||||
.then((app) => {
|
||||
return applications
|
||||
.getCurrentApp(appId)
|
||||
.then(app => {
|
||||
if (app == null) {
|
||||
res.status(404).send(appNotFoundMessage);
|
||||
return;
|
||||
@ -47,31 +47,29 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
res.status(404).send(serviceNotFoundMessage);
|
||||
return;
|
||||
}
|
||||
applications.setTargetVolatileForService(
|
||||
service.imageId!,
|
||||
{ running: action !== 'stop' },
|
||||
);
|
||||
return applications.executeStepAction(
|
||||
serviceAction(
|
||||
action,
|
||||
service.serviceId!,
|
||||
service,
|
||||
service,
|
||||
{ wait: true },
|
||||
),
|
||||
applications.setTargetVolatileForService(service.imageId!, {
|
||||
running: action !== 'stop',
|
||||
});
|
||||
return applications
|
||||
.executeStepAction(
|
||||
serviceAction(action, service.serviceId!, service, service, {
|
||||
wait: true,
|
||||
}),
|
||||
{ skipLock: true },
|
||||
)
|
||||
.then(() => {
|
||||
res.status(200).send('OK');
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
.catch(err => {
|
||||
res.status(503).send(messageFromError(err));
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
router.post('/v2/applications/:appId/purge', (req: Request, res: Response) => {
|
||||
router.post(
|
||||
'/v2/applications/:appId/purge',
|
||||
(req: Request, res: Response) => {
|
||||
const { force } = req.body;
|
||||
const { appId } = req.params;
|
||||
|
||||
@ -79,7 +77,7 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
.then(() => {
|
||||
res.status(200).send('OK');
|
||||
})
|
||||
.catch((err) => {
|
||||
.catch(err => {
|
||||
let message;
|
||||
if (err != null) {
|
||||
message = err.message;
|
||||
@ -91,21 +89,33 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
}
|
||||
res.status(503).send(message);
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
router.post('/v2/applications/:appId/restart-service', (req: Request, res: Response) => {
|
||||
router.post(
|
||||
'/v2/applications/:appId/restart-service',
|
||||
(req: Request, res: Response) => {
|
||||
return handleServiceAction(req, res, 'restart');
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
router.post('/v2/applications/:appId/stop-service', (req: Request, res: Response) => {
|
||||
router.post(
|
||||
'/v2/applications/:appId/stop-service',
|
||||
(req: Request, res: Response) => {
|
||||
return handleServiceAction(req, res, 'stop');
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
router.post('/v2/applications/:appId/start-service', (req: Request, res: Response) => {
|
||||
router.post(
|
||||
'/v2/applications/:appId/start-service',
|
||||
(req: Request, res: Response) => {
|
||||
return handleServiceAction(req, res, 'start');
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
router.post('/v2/applications/:appId/restart', (req: Request, res: Response) => {
|
||||
router.post(
|
||||
'/v2/applications/:appId/restart',
|
||||
(req: Request, res: Response) => {
|
||||
const { force } = req.body;
|
||||
const { appId } = req.params;
|
||||
|
||||
@ -113,14 +123,14 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
.then(() => {
|
||||
res.status(200).send('OK');
|
||||
})
|
||||
.catch((err) => {
|
||||
.catch(err => {
|
||||
res.status(503).send(messageFromError(err));
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// TODO: Support dependent applications when this feature is complete
|
||||
router.get('/v2/applications/state', (_req: Request, res: Response) => {
|
||||
|
||||
// It's kinda hacky to access the services and db via the application manager
|
||||
// maybe refactor this code
|
||||
Bluebird.join(
|
||||
@ -130,7 +140,7 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
(
|
||||
services,
|
||||
images,
|
||||
apps: Array<{ appId: string, commit: string, name: string }>,
|
||||
apps: Array<{ appId: string; commit: string; name: string }>,
|
||||
) => {
|
||||
// Create an object which is keyed my application name
|
||||
const response: {
|
||||
@ -142,14 +152,14 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
status: string;
|
||||
releaseId: number;
|
||||
downloadProgress: number | null;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
};
|
||||
} = {};
|
||||
|
||||
const appNameById: { [id: number]: string } = {};
|
||||
|
||||
apps.forEach((app) => {
|
||||
apps.forEach(app => {
|
||||
const appId = parseInt(app.appId, 10);
|
||||
response[app.name] = {
|
||||
appId,
|
||||
@ -160,7 +170,7 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
appNameById[appId] = app.name;
|
||||
});
|
||||
|
||||
images.forEach((img) => {
|
||||
images.forEach(img => {
|
||||
const appName = appNameById[img.appId];
|
||||
if (appName == null) {
|
||||
console.log('Image found for unknown application!');
|
||||
@ -186,16 +196,19 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
});
|
||||
|
||||
res.status(200).json(response);
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
router.get('/v2/applications/:appId/state', (_req: Request, res: Response) => {
|
||||
router.get(
|
||||
'/v2/applications/:appId/state',
|
||||
(_req: Request, res: Response) => {
|
||||
// Get all services and their statuses, and return it
|
||||
applications.getStatus()
|
||||
.then((apps) => {
|
||||
applications.getStatus().then(apps => {
|
||||
res.status(200).json(apps);
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
router.get('/v2/local/target-state', async (_req, res) => {
|
||||
try {
|
||||
@ -249,7 +262,6 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
const message = 'Could not apply target state: ';
|
||||
res.status(503).json({
|
||||
@ -263,13 +275,14 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
// Return the device type and slug so that local mode builds can use this to
|
||||
// resolve builds
|
||||
try {
|
||||
|
||||
// FIXME: We should be mounting the following file into the supervisor from the
|
||||
// start-resin-supervisor script, changed in meta-resin - but until then, hardcode it
|
||||
const data = await fs.readFile('/mnt/root/resin-boot/device-type.json', 'utf8');
|
||||
const data = await fs.readFile(
|
||||
'/mnt/root/resin-boot/device-type.json',
|
||||
'utf8',
|
||||
);
|
||||
const deviceInfo = JSON.parse(data);
|
||||
|
||||
|
||||
return res.status(200).json({
|
||||
status: 'sucess',
|
||||
info: {
|
||||
@ -277,7 +290,6 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
deviceType: deviceInfo.slug,
|
||||
},
|
||||
});
|
||||
|
||||
} catch (e) {
|
||||
const message = 'Could not fetch device information: ';
|
||||
res.status(503).json({
|
||||
@ -289,7 +301,9 @@ export function createV2Api(router: Router, applications: ApplicationManager) {
|
||||
|
||||
router.get('/v2/local/logs', async (_req, res) => {
|
||||
const backend = applications.logger.getLocalBackend();
|
||||
backend.assignServiceNameResolver(applications.serviceNameFromId.bind(applications));
|
||||
backend.assignServiceNameResolver(
|
||||
applications.serviceNameFromId.bind(applications),
|
||||
);
|
||||
|
||||
// Get the stream, and stream it into res
|
||||
const listenStream = backend.attachListener();
|
||||
|
@ -258,7 +258,6 @@ module.exports = class DeviceState extends EventEmitter
|
||||
@config.get('apiEndpoint'),
|
||||
validateState(target),
|
||||
(apiEndpoint) =>
|
||||
source = apiEndpoint
|
||||
@usingWriteLockTarget =>
|
||||
# Apps, deviceConfig, dependent
|
||||
@db.transaction (trx) =>
|
||||
@ -318,9 +317,9 @@ module.exports = class DeviceState extends EventEmitter
|
||||
_.assign(@_currentVolatile, newState)
|
||||
@emitAsync('change')
|
||||
|
||||
_convertLegacyAppsJson: (appsArray) =>
|
||||
Promise.try =>
|
||||
deviceConf = _.reduce(appsArray, (conf, app) =>
|
||||
_convertLegacyAppsJson: (appsArray) ->
|
||||
Promise.try ->
|
||||
deviceConf = _.reduce(appsArray, (conf, app) ->
|
||||
return _.merge({}, conf, app.config)
|
||||
, {})
|
||||
apps = _.keyBy(_.map(appsArray, singleToMulticontainerApp), 'appId')
|
||||
|
@ -32,7 +32,6 @@ const mixpanelMask = [
|
||||
].join(',');
|
||||
|
||||
export class EventTracker {
|
||||
|
||||
private defaultProperties: EventTrackProperties | null;
|
||||
private client: any;
|
||||
|
||||
@ -60,11 +59,7 @@ export class EventTracker {
|
||||
});
|
||||
}
|
||||
|
||||
public track(
|
||||
event: string,
|
||||
properties: EventTrackProperties | Error = { },
|
||||
) {
|
||||
|
||||
public track(event: string, properties: EventTrackProperties | Error = {}) {
|
||||
if (properties instanceof Error) {
|
||||
properties = { error: properties };
|
||||
}
|
||||
@ -89,12 +84,19 @@ export class EventTracker {
|
||||
this.throttleddLogger(event)(properties);
|
||||
}
|
||||
|
||||
private throttleddLogger = memoizee((event: string) => {
|
||||
private throttleddLogger = memoizee(
|
||||
(event: string) => {
|
||||
// Call this function at maximum once every minute
|
||||
return _.throttle((properties) => {
|
||||
return _.throttle(
|
||||
properties => {
|
||||
this.client.track(event, properties);
|
||||
}, eventDebounceTime, { leading: true });
|
||||
}, { primitive: true });
|
||||
},
|
||||
eventDebounceTime,
|
||||
{ leading: true },
|
||||
);
|
||||
},
|
||||
{ primitive: true },
|
||||
);
|
||||
|
||||
private logEvent(...args: string[]) {
|
||||
console.log(...args);
|
||||
|
@ -7,14 +7,19 @@ const supervisorNetworkInterface = 'supervisor0';
|
||||
|
||||
const constants = {
|
||||
rootMountPoint,
|
||||
databasePath: checkString(process.env.DATABASE_PATH) || '/data/database.sqlite',
|
||||
databasePath:
|
||||
checkString(process.env.DATABASE_PATH) || '/data/database.sqlite',
|
||||
dockerSocket: process.env.DOCKER_SOCKET || '/var/run/docker.sock',
|
||||
supervisorImage: checkString(process.env.SUPERVISOR_IMAGE) || 'resin/rpi-supervisor',
|
||||
ledFile: checkString(process.env.LED_FILE) || '/sys/class/leds/led0/brightness',
|
||||
supervisorImage:
|
||||
checkString(process.env.SUPERVISOR_IMAGE) || 'resin/rpi-supervisor',
|
||||
ledFile:
|
||||
checkString(process.env.LED_FILE) || '/sys/class/leds/led0/brightness',
|
||||
vpnStatusPath:
|
||||
checkString(process.env.VPN_STATUS_PATH) || `${rootMountPoint}/run/openvpn/vpn_status`,
|
||||
checkString(process.env.VPN_STATUS_PATH) ||
|
||||
`${rootMountPoint}/run/openvpn/vpn_status`,
|
||||
hostOSVersionPath:
|
||||
checkString(process.env.HOST_OS_VERSION_PATH) || `${rootMountPoint}/etc/os-release`,
|
||||
checkString(process.env.HOST_OS_VERSION_PATH) ||
|
||||
`${rootMountPoint}/etc/os-release`,
|
||||
privateAppEnvVars: [
|
||||
'RESIN_SUPERVISOR_API_KEY',
|
||||
'RESIN_API_KEY',
|
||||
@ -28,7 +33,13 @@ const constants = {
|
||||
configJsonNonAtomicPath: '/boot/config.json',
|
||||
defaultMixpanelToken: process.env.DEFAULT_MIXPANEL_TOKEN,
|
||||
supervisorNetworkInterface: supervisorNetworkInterface,
|
||||
allowedInterfaces: [ 'resin-vpn', 'tun0', 'docker0', 'lo', supervisorNetworkInterface ],
|
||||
allowedInterfaces: [
|
||||
'resin-vpn',
|
||||
'tun0',
|
||||
'docker0',
|
||||
'lo',
|
||||
supervisorNetworkInterface,
|
||||
],
|
||||
appsJsonPath: process.env.APPS_JSON_PATH || '/boot/apps.json',
|
||||
ipAddressUpdateInterval: 30 * 1000,
|
||||
imageCleanupErrorIgnoreTimeout: 3600 * 1000,
|
||||
|
@ -6,8 +6,10 @@ export function envArrayToObject(env: string[]): EnvVarObject {
|
||||
const toPair = (keyVal: string) => {
|
||||
const m = keyVal.match(/^([^=]+)=\s*(.*)\s*$/);
|
||||
if (m == null) {
|
||||
console.log(`WARNING: Could not correctly parse env var ${keyVal}. ` +
|
||||
'Please fix this var and recreate the container.');
|
||||
console.log(
|
||||
`WARNING: Could not correctly parse env var ${keyVal}. ` +
|
||||
'Please fix this var and recreate the container.',
|
||||
);
|
||||
return [null, null];
|
||||
}
|
||||
return m.slice(1);
|
||||
|
@ -78,7 +78,7 @@ module.exports = class DockerUtils extends DockerToolbelt
|
||||
# Since the supervisor never calls this function without a source anymore,
|
||||
# this should never happen, but we handle it anyways.
|
||||
if !deltaSource?
|
||||
log("Falling back to regular pull due to lack of a delta source")
|
||||
log('Falling back to regular pull due to lack of a delta source')
|
||||
return @fetchImageWithProgress(imgDest, fullDeltaOpts, onProgress)
|
||||
|
||||
docker = this
|
||||
|
9
src/lib/docker-utils.d.ts
vendored
9
src/lib/docker-utils.d.ts
vendored
@ -19,17 +19,20 @@ declare class DockerUtils extends DockerToolbelt {
|
||||
|
||||
getRepoAndTag(image: string): Bluebird<TaggedRepoImage>;
|
||||
|
||||
fetchDeltaWithProgress(imgDest: string, fullDeltaOpts: any, onProgress: (args: any) => void): Bluebird<void>;
|
||||
fetchDeltaWithProgress(
|
||||
imgDest: string,
|
||||
fullDeltaOpts: any,
|
||||
onProgress: (args: any) => void,
|
||||
): Bluebird<void>;
|
||||
|
||||
fetchImageWithProgress(
|
||||
image: string,
|
||||
config: { uuid: string, currentApiKey: string },
|
||||
config: { uuid: string; currentApiKey: string },
|
||||
onProgress: (args: any) => void,
|
||||
): Bluebird<void>;
|
||||
|
||||
getImageEnv(id: string): Bluebird<EnvVarObject>;
|
||||
getNetworkGateway(netName: string): Bluebird<string>;
|
||||
|
||||
}
|
||||
|
||||
export = DockerUtils;
|
||||
|
@ -3,8 +3,7 @@ import { fs } from 'mz';
|
||||
import * as path from 'path';
|
||||
|
||||
export function writeAndSyncFile(path: string, data: string): Bluebird<void> {
|
||||
return Bluebird.resolve(fs.open(path, 'w'))
|
||||
.then((fd) => {
|
||||
return Bluebird.resolve(fs.open(path, 'w')).then(fd => {
|
||||
fs.write(fd, data, 0, 'utf8')
|
||||
.then(() => fs.fsync(fd))
|
||||
.then(() => fs.close(fd));
|
||||
@ -12,8 +11,9 @@ export function writeAndSyncFile(path: string, data: string): Bluebird<void> {
|
||||
}
|
||||
|
||||
export function writeFileAtomic(path: string, data: string): Bluebird<void> {
|
||||
return Bluebird.resolve(writeAndSyncFile(`${path}.new`, data))
|
||||
.then(() => fs.rename(`${path}.new`, path));
|
||||
return Bluebird.resolve(writeAndSyncFile(`${path}.new`, data)).then(() =>
|
||||
fs.rename(`${path}.new`, path),
|
||||
);
|
||||
}
|
||||
|
||||
export function safeRename(src: string, dest: string): Bluebird<void> {
|
||||
|
@ -28,11 +28,21 @@ export function rejectOnAllInterfacesExcept(
|
||||
): Promise<void> {
|
||||
// We delete each rule and create it again to ensure ordering (all ACCEPTs before the REJECT/DROP).
|
||||
// This is especially important after a supervisor update.
|
||||
return Promise.each(allowedInterfaces, (iface) => clearAndInsertIptablesRule(`INPUT -p tcp --dport ${port} -i ${iface} -j ACCEPT`))
|
||||
.then(() => clearAndAppendIptablesRule(`INPUT -p tcp --dport ${port} -j REJECT`))
|
||||
return (
|
||||
Promise.each(allowedInterfaces, iface =>
|
||||
clearAndInsertIptablesRule(
|
||||
`INPUT -p tcp --dport ${port} -i ${iface} -j ACCEPT`,
|
||||
),
|
||||
)
|
||||
.then(() =>
|
||||
clearAndAppendIptablesRule(`INPUT -p tcp --dport ${port} -j REJECT`),
|
||||
)
|
||||
// On systems without REJECT support, fall back to DROP
|
||||
.catch(() => clearAndAppendIptablesRule(`INPUT -p tcp --dport ${port} -j DROP`))
|
||||
.return();
|
||||
.catch(() =>
|
||||
clearAndAppendIptablesRule(`INPUT -p tcp --dport ${port} -j DROP`),
|
||||
)
|
||||
.return()
|
||||
);
|
||||
}
|
||||
|
||||
export function removeRejections(port: number): Promise<void> {
|
||||
|
@ -2,4 +2,5 @@ export const appNotFoundMessage = `App not found: an app needs to be installed f
|
||||
If you've recently moved this device from another app,
|
||||
please push an app and wait for it to be installed first.`;
|
||||
|
||||
export const serviceNotFoundMessage = 'Service not found, a container must exist for this endpoint to work';
|
||||
export const serviceNotFoundMessage =
|
||||
'Service not found, a container must exist for this endpoint to work';
|
||||
|
@ -16,9 +16,9 @@ export interface ConfigMap {
|
||||
|
||||
export interface ConfigSchema {
|
||||
[key: string]: {
|
||||
source: string,
|
||||
default?: any,
|
||||
mutable?: boolean,
|
||||
removeIfNull?: boolean,
|
||||
source: string;
|
||||
default?: any;
|
||||
mutable?: boolean;
|
||||
removeIfNull?: boolean;
|
||||
};
|
||||
}
|
||||
|
3
src/lib/update-lock.d.ts
vendored
3
src/lib/update-lock.d.ts
vendored
@ -4,8 +4,7 @@ export interface LockCallback {
|
||||
(appId: number, opts: { force: boolean }, fn: () => void): Promise<void>;
|
||||
}
|
||||
|
||||
export class UpdatesLockedError extends TypedError {
|
||||
}
|
||||
export class UpdatesLockedError extends TypedError {}
|
||||
|
||||
export function lock(): LockCallback;
|
||||
export function lockPath(appId: number, serviceName: string): string;
|
||||
|
@ -19,7 +19,10 @@ type NullableLiteral = number | NullableString;
|
||||
* Check an input string as a number, optionally specifying a requirement
|
||||
* to be positive
|
||||
*/
|
||||
export function checkInt(s: NullableLiteral, options: CheckIntOptions = {}): number | void {
|
||||
export function checkInt(
|
||||
s: NullableLiteral,
|
||||
options: CheckIntOptions = {},
|
||||
): number | void {
|
||||
if (s == null) {
|
||||
return;
|
||||
}
|
||||
@ -102,7 +105,9 @@ export function isValidEnv(obj: EnvVarObject): boolean {
|
||||
|
||||
return _.every(obj, (val, key) => {
|
||||
if (!isValidShortText(key)) {
|
||||
console.log('debug: Non-valid short text env var key passed to validation.isValidEnv');
|
||||
console.log(
|
||||
'debug: Non-valid short text env var key passed to validation.isValidEnv',
|
||||
);
|
||||
console.log(`\tKey: ${inspect(key)}`);
|
||||
return false;
|
||||
}
|
||||
@ -136,19 +141,25 @@ export function isValidLabelsObject(obj: LabelObject): boolean {
|
||||
|
||||
return _.every(obj, (val, key) => {
|
||||
if (!isValidShortText(key)) {
|
||||
console.log('debug: Non-valid short text label key passed to validation.isValidLabelsObject');
|
||||
console.log(
|
||||
'debug: Non-valid short text label key passed to validation.isValidLabelsObject',
|
||||
);
|
||||
console.log(`\tkey: ${inspect(key)}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!LABEL_NAME_REGEX.test(key)) {
|
||||
console.log('debug: Invalid label name passed to validation.isValidLabelsObject');
|
||||
console.log(
|
||||
'debug: Invalid label name passed to validation.isValidLabelsObject',
|
||||
);
|
||||
console.log(`\tkey: ${inspect(key)}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!_.isString(val)) {
|
||||
console.log('debug: Non-string value passed to validation.isValidLabelsObject');
|
||||
console.log(
|
||||
'debug: Non-string value passed to validation.isValidLabelsObject',
|
||||
);
|
||||
console.log(`\tval: ${inspect(val)}`);
|
||||
return false;
|
||||
}
|
||||
@ -161,7 +172,9 @@ export function isValidDeviceName(name: string): boolean {
|
||||
// currently the only disallowed value in a device name is a newline
|
||||
const newline = name.indexOf('\n') !== -1;
|
||||
if (newline) {
|
||||
console.log('debug: newline found in device name. This is invalid and should be removed');
|
||||
console.log(
|
||||
'debug: newline found in device name. This is invalid and should be removed',
|
||||
);
|
||||
}
|
||||
return !newline;
|
||||
}
|
||||
@ -179,7 +192,9 @@ function undefinedOrValidEnv(val: EnvVarObject): boolean {
|
||||
*/
|
||||
export function isValidDependentAppsObject(apps: any): boolean {
|
||||
if (!_.isObject(apps)) {
|
||||
console.log('debug: non-object passed to validation.isValidDependentAppsObject');
|
||||
console.log(
|
||||
'debug: non-object passed to validation.isValidDependentAppsObject',
|
||||
);
|
||||
console.log(`\tapps: ${inspect(apps)}`);
|
||||
return false;
|
||||
}
|
||||
@ -193,7 +208,9 @@ export function isValidDependentAppsObject(apps: any): boolean {
|
||||
});
|
||||
|
||||
if (!isValidShortText(appId) || !checkInt(appId)) {
|
||||
console.log('debug: Invalid appId passed to validation.isValidDependentAppsObject');
|
||||
console.log(
|
||||
'debug: Invalid appId passed to validation.isValidDependentAppsObject',
|
||||
);
|
||||
console.log(`\tappId: ${inspect(appId)}`);
|
||||
return false;
|
||||
}
|
||||
@ -201,7 +218,9 @@ export function isValidDependentAppsObject(apps: any): boolean {
|
||||
return _.conformsTo(val, {
|
||||
name: (n: any) => {
|
||||
if (!isValidShortText(n)) {
|
||||
console.log('debug: Invalid name passed to validation.isValidDependentAppsObject');
|
||||
console.log(
|
||||
'debug: Invalid name passed to validation.isValidDependentAppsObject',
|
||||
);
|
||||
console.log(`\tname: ${inspect(n)}`);
|
||||
return false;
|
||||
}
|
||||
@ -209,7 +228,9 @@ export function isValidDependentAppsObject(apps: any): boolean {
|
||||
},
|
||||
image: (i: any) => {
|
||||
if (val.commit != null && !isValidShortText(i)) {
|
||||
console.log('debug: non valid image passed to validation.isValidDependentAppsObject');
|
||||
console.log(
|
||||
'debug: non valid image passed to validation.isValidDependentAppsObject',
|
||||
);
|
||||
console.log(`\timage: ${inspect(i)}`);
|
||||
return false;
|
||||
}
|
||||
@ -217,7 +238,9 @@ export function isValidDependentAppsObject(apps: any): boolean {
|
||||
},
|
||||
commit: (c: any) => {
|
||||
if (c != null && !isValidShortText(c)) {
|
||||
console.log('debug: invalid commit passed to validation.isValidDependentAppsObject');
|
||||
console.log(
|
||||
'debug: invalid commit passed to validation.isValidDependentAppsObject',
|
||||
);
|
||||
console.log(`\tcommit: ${inspect(c)}`);
|
||||
return false;
|
||||
}
|
||||
@ -225,7 +248,9 @@ export function isValidDependentAppsObject(apps: any): boolean {
|
||||
},
|
||||
config: (c: any) => {
|
||||
if (!undefinedOrValidEnv(c)) {
|
||||
console.log('debug; Invalid config passed to validation.isValidDependentAppsObject');
|
||||
console.log(
|
||||
'debug; Invalid config passed to validation.isValidDependentAppsObject',
|
||||
);
|
||||
console.log(`\tconfig: ${inspect(c)}`);
|
||||
return false;
|
||||
}
|
||||
@ -233,7 +258,9 @@ export function isValidDependentAppsObject(apps: any): boolean {
|
||||
},
|
||||
environment: (e: any) => {
|
||||
if (!undefinedOrValidEnv(e)) {
|
||||
console.log('debug; Invalid environment passed to validation.isValidDependentAppsObject');
|
||||
console.log(
|
||||
'debug; Invalid environment passed to validation.isValidDependentAppsObject',
|
||||
);
|
||||
console.log(`\tenvironment: ${inspect(e)}`);
|
||||
return false;
|
||||
}
|
||||
@ -245,7 +272,9 @@ export function isValidDependentAppsObject(apps: any): boolean {
|
||||
|
||||
function isValidService(service: any, serviceId: string): boolean {
|
||||
if (!isValidShortText(serviceId) || !checkInt(serviceId)) {
|
||||
console.log('debug: Invalid service id passed to validation.isValidService');
|
||||
console.log(
|
||||
'debug: Invalid service id passed to validation.isValidService',
|
||||
);
|
||||
console.log(`\tserviceId: ${inspect(serviceId)}`);
|
||||
return false;
|
||||
}
|
||||
@ -253,7 +282,9 @@ function isValidService(service: any, serviceId: string): boolean {
|
||||
return _.conformsTo(service, {
|
||||
serviceName: (n: any) => {
|
||||
if (!isValidShortText(n)) {
|
||||
console.log('debug: Invalid service name passed to validation.isValidService');
|
||||
console.log(
|
||||
'debug: Invalid service name passed to validation.isValidService',
|
||||
);
|
||||
console.log(`\tserviceName: ${inspect(n)}`);
|
||||
return false;
|
||||
}
|
||||
@ -277,7 +308,9 @@ function isValidService(service: any, serviceId: string): boolean {
|
||||
},
|
||||
imageId: (i: any) => {
|
||||
if (checkInt(i) == null) {
|
||||
console.log('debug: Invalid image id passed to validation.isValidService');
|
||||
console.log(
|
||||
'debug: Invalid image id passed to validation.isValidService',
|
||||
);
|
||||
console.log(`\timageId: ${inspect(i)}`);
|
||||
return false;
|
||||
}
|
||||
@ -285,7 +318,9 @@ function isValidService(service: any, serviceId: string): boolean {
|
||||
},
|
||||
labels: (l: any) => {
|
||||
if (!isValidLabelsObject(l)) {
|
||||
console.log('debug: Invalid labels object passed to validation.isValidService');
|
||||
console.log(
|
||||
'debug: Invalid labels object passed to validation.isValidService',
|
||||
);
|
||||
console.log(`\tlabels: ${inspect(l)}`);
|
||||
return false;
|
||||
}
|
||||
@ -311,7 +346,9 @@ export function isValidAppsObject(obj: any): boolean {
|
||||
|
||||
return _.every(obj, (val, appId) => {
|
||||
if (!isValidShortText(appId) || !checkInt(appId)) {
|
||||
console.log('debug: Invalid appId passed to validation.isValidAppsObject');
|
||||
console.log(
|
||||
'debug: Invalid appId passed to validation.isValidAppsObject',
|
||||
);
|
||||
console.log(`\tappId: ${inspect(appId)}`);
|
||||
return false;
|
||||
}
|
||||
@ -319,7 +356,9 @@ export function isValidAppsObject(obj: any): boolean {
|
||||
return _.conformsTo(_.defaults(_.clone(val), { releaseId: undefined }), {
|
||||
name: (n: any) => {
|
||||
if (!isValidShortText(n)) {
|
||||
console.log('debug: Invalid service name passed to validation.isValidAppsObject');
|
||||
console.log(
|
||||
'debug: Invalid service name passed to validation.isValidAppsObject',
|
||||
);
|
||||
console.log(`\tname: ${inspect(n)}`);
|
||||
return false;
|
||||
}
|
||||
@ -327,7 +366,9 @@ export function isValidAppsObject(obj: any): boolean {
|
||||
},
|
||||
releaseId: (r: any) => {
|
||||
if (r != null && checkInt(r) == null) {
|
||||
console.log('debug: Invalid releaseId passed to validation.isValidAppsObject');
|
||||
console.log(
|
||||
'debug: Invalid releaseId passed to validation.isValidAppsObject',
|
||||
);
|
||||
console.log(`\treleaseId: ${inspect(r)}`);
|
||||
return false;
|
||||
}
|
||||
@ -335,14 +376,18 @@ export function isValidAppsObject(obj: any): boolean {
|
||||
},
|
||||
services: (s: any) => {
|
||||
if (!_.isObject(s)) {
|
||||
console.log('debug: Non-object service passed to validation.isValidAppsObject');
|
||||
console.log(
|
||||
'debug: Non-object service passed to validation.isValidAppsObject',
|
||||
);
|
||||
console.log(`\tservices: ${inspect(s)}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return _.every(s, (svc, svcId) => {
|
||||
if (!isValidService(svc, svcId)) {
|
||||
console.log('debug: Invalid service object passed to validation.isValidAppsObject');
|
||||
console.log(
|
||||
'debug: Invalid service object passed to validation.isValidAppsObject',
|
||||
);
|
||||
console.log(`\tsvc: ${inspect(svc)}`);
|
||||
return false;
|
||||
}
|
||||
@ -359,17 +404,19 @@ export function isValidAppsObject(obj: any): boolean {
|
||||
* Validate a dependent devices object from the state endpoint.
|
||||
*/
|
||||
export function isValidDependentDevicesObject(devices: any): boolean {
|
||||
|
||||
if (!_.isObject(devices)) {
|
||||
console.log('debug: Non-object passed to validation.isValidDependentDevicesObject');
|
||||
console.log(
|
||||
'debug: Non-object passed to validation.isValidDependentDevicesObject',
|
||||
);
|
||||
console.log(`\tdevices: ${inspect(devices)}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return _.every(devices, (val, uuid) => {
|
||||
|
||||
if (!isValidShortText(uuid)) {
|
||||
console.log('debug: Invalid uuid passed to validation.isValidDependentDevicesObject');
|
||||
console.log(
|
||||
'debug: Invalid uuid passed to validation.isValidDependentDevicesObject',
|
||||
);
|
||||
console.log(`\tuuid: ${inspect(uuid)}`);
|
||||
return false;
|
||||
}
|
||||
@ -377,7 +424,9 @@ export function isValidDependentDevicesObject(devices: any): boolean {
|
||||
return _.conformsTo(val, {
|
||||
name: (n: any) => {
|
||||
if (!isValidShortText(n)) {
|
||||
console.log('debug: Invalid device name passed to validation.isValidDependentDevicesObject');
|
||||
console.log(
|
||||
'debug: Invalid device name passed to validation.isValidDependentDevicesObject',
|
||||
);
|
||||
console.log(`\tname: ${inspect(n)}`);
|
||||
return false;
|
||||
}
|
||||
@ -385,22 +434,31 @@ export function isValidDependentDevicesObject(devices: any): boolean {
|
||||
},
|
||||
apps: (a: any) => {
|
||||
if (!_.isObject(a)) {
|
||||
console.log('debug: Invalid apps object passed to validation.isValidDependentDevicesObject');
|
||||
console.log(
|
||||
'debug: Invalid apps object passed to validation.isValidDependentDevicesObject',
|
||||
);
|
||||
console.log(`\tapps: ${inspect(a)}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (_.isEmpty(a)) {
|
||||
console.log('debug: Empty object passed to validation.isValidDependentDevicesObject');
|
||||
console.log(
|
||||
'debug: Empty object passed to validation.isValidDependentDevicesObject',
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
return _.every(a, (app) => {
|
||||
app = _.defaults(_.clone(app), { config: undefined, environment: undefined });
|
||||
return _.every(a, app => {
|
||||
app = _.defaults(_.clone(app), {
|
||||
config: undefined,
|
||||
environment: undefined,
|
||||
});
|
||||
return _.conformsTo(app, {
|
||||
config: (c: any) => {
|
||||
if (!undefinedOrValidEnv(c)) {
|
||||
console.log('debug: Invalid config passed to validation.isValidDependentDevicesObject');
|
||||
console.log(
|
||||
'debug: Invalid config passed to validation.isValidDependentDevicesObject',
|
||||
);
|
||||
console.log(`\tconfig: ${inspect(c)}`);
|
||||
return false;
|
||||
}
|
||||
@ -408,7 +466,9 @@ export function isValidDependentDevicesObject(devices: any): boolean {
|
||||
},
|
||||
environment: (e: any) => {
|
||||
if (!undefinedOrValidEnv(e)) {
|
||||
console.log('debug: Invalid environment passed to validation.isValidDependentDevicesObject');
|
||||
console.log(
|
||||
'debug: Invalid environment passed to validation.isValidDependentDevicesObject',
|
||||
);
|
||||
console.log(`\tconfig: ${inspect(e)}`);
|
||||
return false;
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ export class LocalModeManager {
|
||||
|
||||
public async init() {
|
||||
// Setup a listener to catch state changes relating to local mode
|
||||
this.config.on('change', (changed) => {
|
||||
this.config.on('change', changed => {
|
||||
if (changed.localMode != null) {
|
||||
const localMode = checkTruthy(changed.localMode) || false;
|
||||
|
||||
@ -40,7 +40,9 @@ export class LocalModeManager {
|
||||
}
|
||||
});
|
||||
|
||||
const localMode = checkTruthy(await this.config.get('localMode') || false);
|
||||
const localMode = checkTruthy(
|
||||
(await this.config.get('localMode')) || false,
|
||||
);
|
||||
if (!localMode) {
|
||||
// Remove any leftovers if necessary
|
||||
await this.removeLocalModeArtifacts();
|
||||
@ -52,35 +54,43 @@ export class LocalModeManager {
|
||||
const images = await this.getLocalModeImages();
|
||||
const containers = await this.getLocalModeContainers(images);
|
||||
|
||||
await Bluebird.map(containers, (containerId) => {
|
||||
await Bluebird.map(containers, containerId => {
|
||||
console.log('Removing local mode container: ', containerId);
|
||||
return this.docker.getContainer(containerId).remove({ force: true });
|
||||
});
|
||||
await Bluebird.map(images, (imageId) => {
|
||||
await Bluebird.map(images, imageId => {
|
||||
console.log('Removing local mode image: ', imageId);
|
||||
return this.docker.getImage(imageId).remove({ force: true });
|
||||
});
|
||||
|
||||
// Remove any local mode state added to the database
|
||||
await this.db.models('app').del().where({ source: 'local' });
|
||||
await this.db
|
||||
.models('app')
|
||||
.del()
|
||||
.where({ source: 'local' });
|
||||
} catch (e) {
|
||||
console.log('There was an error clearing local mode artifacts: ', e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private async getLocalModeImages(): Promise<string[]> {
|
||||
// Return all local mode images present on the local docker daemon
|
||||
return _.map(await this.docker.listImages({ filters: { label: [ 'io.resin.local.image=1' ] } }), 'Id');
|
||||
return _.map(
|
||||
await this.docker.listImages({
|
||||
filters: { label: ['io.resin.local.image=1'] },
|
||||
}),
|
||||
'Id',
|
||||
);
|
||||
}
|
||||
|
||||
private async getLocalModeContainers(localModeImageIds: string[]): Promise<string[]> {
|
||||
private async getLocalModeContainers(
|
||||
localModeImageIds: string[],
|
||||
): Promise<string[]> {
|
||||
return _(await this.docker.listContainers())
|
||||
.filter(({ Image }) => _.includes(localModeImageIds, Image))
|
||||
.map('Id')
|
||||
.value();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default LocalModeManager;
|
||||
|
@ -62,8 +62,7 @@ export class Logger {
|
||||
offlineMode,
|
||||
enableLogs,
|
||||
localMode,
|
||||
}: LoggerSetupOptions,
|
||||
) {
|
||||
}: LoggerSetupOptions) {
|
||||
this.balenaBackend = new BalenaLogBackend(apiEndpoint, uuid, deviceApiKey);
|
||||
this.localBackend = new LocalLogBackend();
|
||||
|
||||
@ -131,8 +130,7 @@ export class Logger {
|
||||
}
|
||||
|
||||
public lock(containerId: string): Bluebird.Disposer<() => void> {
|
||||
return this.writeLock(containerId)
|
||||
.disposer((release) => {
|
||||
return this.writeLock(containerId).disposer(release => {
|
||||
release();
|
||||
});
|
||||
}
|
||||
@ -140,12 +138,21 @@ export class Logger {
|
||||
public attach(
|
||||
docker: Docker,
|
||||
containerId: string,
|
||||
serviceInfo: { serviceId: string, imageId: string },
|
||||
serviceInfo: { serviceId: string; imageId: string },
|
||||
): Bluebird<void> {
|
||||
return Bluebird.using(this.lock(containerId), () => {
|
||||
return this.attachStream(docker, OutputStream.Stdout, containerId, serviceInfo)
|
||||
.then(() => {
|
||||
return this.attachStream(docker, OutputStream.Stderr, containerId, serviceInfo);
|
||||
return this.attachStream(
|
||||
docker,
|
||||
OutputStream.Stdout,
|
||||
containerId,
|
||||
serviceInfo,
|
||||
).then(() => {
|
||||
return this.attachStream(
|
||||
docker,
|
||||
OutputStream.Stderr,
|
||||
containerId,
|
||||
serviceInfo,
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -169,7 +176,7 @@ export class Logger {
|
||||
|
||||
public logConfigChange(
|
||||
config: { [configName: string]: string },
|
||||
{ success = false, err = null }: { success?: boolean, err?: Error } = { },
|
||||
{ success = false, err = null }: { success?: boolean; err?: Error } = {},
|
||||
) {
|
||||
const obj: LogEventObject = { config };
|
||||
let message: string;
|
||||
@ -196,9 +203,8 @@ export class Logger {
|
||||
docker: Docker,
|
||||
streamType: OutputStream,
|
||||
containerId: string,
|
||||
{ serviceId, imageId }: { serviceId: string, imageId: string },
|
||||
{ serviceId, imageId }: { serviceId: string; imageId: string },
|
||||
): Bluebird<void> {
|
||||
|
||||
return Bluebird.try(() => {
|
||||
if (this.attached[streamType][containerId]) {
|
||||
return;
|
||||
@ -212,12 +218,14 @@ export class Logger {
|
||||
since: Math.floor(Date.now() / 1000),
|
||||
};
|
||||
|
||||
return docker.getContainer(containerId).logs(logsOpts)
|
||||
.then((stream) => {
|
||||
return docker
|
||||
.getContainer(containerId)
|
||||
.logs(logsOpts)
|
||||
.then(stream => {
|
||||
this.attached[streamType][containerId] = true;
|
||||
|
||||
stream
|
||||
.on('error', (err) => {
|
||||
.on('error', err => {
|
||||
console.error('Error on container logs', err);
|
||||
this.attached[streamType][containerId] = false;
|
||||
})
|
||||
@ -240,7 +248,7 @@ export class Logger {
|
||||
this.log(message);
|
||||
}
|
||||
})
|
||||
.on('error', (err) => {
|
||||
.on('error', err => {
|
||||
console.error('Error on container logs', err);
|
||||
this.attached[streamType][containerId] = false;
|
||||
})
|
||||
@ -248,16 +256,15 @@ export class Logger {
|
||||
this.attached[streamType][containerId] = false;
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private objectNameForLogs(eventObj: LogEventObject): string | null {
|
||||
if (eventObj == null) {
|
||||
return null;
|
||||
}
|
||||
if (eventObj.service != null &&
|
||||
if (
|
||||
eventObj.service != null &&
|
||||
eventObj.service.serviceName != null &&
|
||||
eventObj.service.config != null &&
|
||||
eventObj.service.config.image != null
|
||||
@ -286,20 +293,17 @@ export class Logger {
|
||||
|
||||
private static extractContainerMessage(
|
||||
msgBuf: Buffer,
|
||||
): { message: string, timestamp: number } | null {
|
||||
): { message: string; timestamp: number } | null {
|
||||
// Non-tty message format from:
|
||||
// https://docs.docker.com/engine/api/v1.30/#operation/ContainerAttach
|
||||
if (
|
||||
msgBuf[0] in [0, 1, 2] &&
|
||||
_.every(msgBuf.slice(1, 7), (c) => c === 0)
|
||||
) {
|
||||
if (msgBuf[0] in [0, 1, 2] && _.every(msgBuf.slice(1, 7), c => c === 0)) {
|
||||
// Take the header from this message, and parse it as normal
|
||||
msgBuf = msgBuf.slice(8);
|
||||
}
|
||||
const logLine = msgBuf.toString();
|
||||
const space = logLine.indexOf(' ');
|
||||
if (space > 0) {
|
||||
let timestamp = (new Date(logLine.substr(0, space))).getTime();
|
||||
let timestamp = new Date(logLine.substr(0, space)).getTime();
|
||||
if (_.isNaN(timestamp)) {
|
||||
timestamp = Date.now();
|
||||
}
|
||||
|
@ -21,7 +21,6 @@ interface Options extends url.UrlWithParsedQuery {
|
||||
}
|
||||
|
||||
export class BalenaLogBackend extends LogBackend {
|
||||
|
||||
private req: ClientRequest | null = null;
|
||||
private dropCount: number = 0;
|
||||
private writable: boolean = true;
|
||||
@ -30,11 +29,7 @@ export class BalenaLogBackend extends LogBackend {
|
||||
private stream: stream.PassThrough;
|
||||
timeout: NodeJS.Timer;
|
||||
|
||||
public constructor(
|
||||
apiEndpoint: string,
|
||||
uuid: string,
|
||||
deviceApiKey: string,
|
||||
) {
|
||||
public constructor(apiEndpoint: string, uuid: string, deviceApiKey: string) {
|
||||
super();
|
||||
|
||||
this.opts = url.parse(`${apiEndpoint}/device/v2/${uuid}/log-stream`) as any;
|
||||
@ -62,7 +57,9 @@ export class BalenaLogBackend extends LogBackend {
|
||||
this.flush();
|
||||
if (this.dropCount > 0) {
|
||||
this.write({
|
||||
message: `Warning: Suppressed ${this.dropCount} message(s) due to high load`,
|
||||
message: `Warning: Suppressed ${
|
||||
this.dropCount
|
||||
} message(s) due to high load`,
|
||||
timestamp: Date.now(),
|
||||
isSystem: true,
|
||||
isStdErr: true,
|
||||
@ -81,10 +78,13 @@ export class BalenaLogBackend extends LogBackend {
|
||||
return;
|
||||
}
|
||||
|
||||
message = _.assign({
|
||||
message = _.assign(
|
||||
{
|
||||
timestamp: Date.now(),
|
||||
message: '',
|
||||
}, message);
|
||||
},
|
||||
message,
|
||||
);
|
||||
|
||||
if (!message.isSystem && message.serviceId == null) {
|
||||
return;
|
||||
@ -104,14 +104,17 @@ export class BalenaLogBackend extends LogBackend {
|
||||
// Since we haven't sent the request body yet, and never will,the
|
||||
// only reason for the server to prematurely respond is to
|
||||
// communicate an error. So teardown the connection immediately
|
||||
this.req.on('response', (res) => {
|
||||
console.log('LogBackend: server responded with status code:', res.statusCode);
|
||||
this.req.on('response', res => {
|
||||
console.log(
|
||||
'LogBackend: server responded with status code:',
|
||||
res.statusCode,
|
||||
);
|
||||
this.teardown();
|
||||
});
|
||||
|
||||
this.req.on('timeout', () => this.teardown());
|
||||
this.req.on('close', () => this.teardown());
|
||||
this.req.on('error', (err) => {
|
||||
this.req.on('error', err => {
|
||||
console.log('LogBackend: unexpected error:', err);
|
||||
this.teardown();
|
||||
});
|
||||
@ -120,7 +123,6 @@ export class BalenaLogBackend extends LogBackend {
|
||||
// respond with potential errors such as 401 authentication error
|
||||
this.req.flushHeaders();
|
||||
|
||||
|
||||
// We want a very low writable high watermark to prevent having many
|
||||
// chunks stored in the writable queue of @_gzip and have them in
|
||||
// @_stream instead. This is desirable because once @_gzip.flush() is
|
||||
@ -142,7 +144,6 @@ export class BalenaLogBackend extends LogBackend {
|
||||
this.flush();
|
||||
}
|
||||
}, RESPONSE_GRACE_PERIOD);
|
||||
|
||||
}, COOLDOWN_PERIOD);
|
||||
|
||||
private snooze = _.debounce(this.teardown, KEEPALIVE_TIMEOUT);
|
||||
@ -150,11 +151,15 @@ export class BalenaLogBackend extends LogBackend {
|
||||
// Flushing every ZLIB_TIMEOUT hits a balance between compression and
|
||||
// latency. When ZLIB_TIMEOUT is 0 the compression ratio is around 5x
|
||||
// whereas when ZLIB_TIMEOUT is infinity the compession ratio is around 10x.
|
||||
private flush = _.throttle(() => {
|
||||
private flush = _.throttle(
|
||||
() => {
|
||||
if (this.gzip != null) {
|
||||
this.gzip.flush(zlib.Z_SYNC_FLUSH);
|
||||
}
|
||||
}, ZLIB_TIMEOUT, { leading: false });
|
||||
},
|
||||
ZLIB_TIMEOUT,
|
||||
{ leading: false },
|
||||
);
|
||||
|
||||
private teardown() {
|
||||
if (this.req != null) {
|
||||
|
@ -2,9 +2,4 @@ import { LocalLogBackend } from './local-backend';
|
||||
import { LogBackend, LogMessage } from './log-backend';
|
||||
import { BalenaLogBackend } from './balena-backend';
|
||||
|
||||
export {
|
||||
LocalLogBackend,
|
||||
LogBackend,
|
||||
LogMessage,
|
||||
BalenaLogBackend,
|
||||
};
|
||||
export { LocalLogBackend, LogBackend, LogMessage, BalenaLogBackend };
|
||||
|
@ -6,14 +6,12 @@ import { checkInt } from '../lib/validation';
|
||||
import { LogBackend, LogMessage } from './log-backend';
|
||||
|
||||
export class LocalLogBackend extends LogBackend {
|
||||
|
||||
private globalListeners: Readable[] = [];
|
||||
|
||||
private serviceNameResolver: (serviceId: number) => Bluebird<string>;
|
||||
|
||||
public log(message: LogMessage): void {
|
||||
if (this.publishEnabled) {
|
||||
|
||||
Bluebird.try(() => {
|
||||
if (!message.isSystem) {
|
||||
if (this.serviceNameResolver == null) {
|
||||
@ -23,13 +21,15 @@ export class LocalLogBackend extends LogBackend {
|
||||
}
|
||||
const svcId = checkInt(message.serviceId);
|
||||
if (svcId == null) {
|
||||
console.log('Warning: Non-integer service id found in local logs: ');
|
||||
console.log(
|
||||
'Warning: Non-integer service id found in local logs: ',
|
||||
);
|
||||
console.log(` ${JSON.stringify(message)}`);
|
||||
return null;
|
||||
}
|
||||
// TODO: Can we cache this value? The service ids are reused, so
|
||||
// we would need a way of invalidating the cache
|
||||
return this.serviceNameResolver(svcId).then((serviceName) => {
|
||||
return this.serviceNameResolver(svcId).then(serviceName => {
|
||||
return _.assign({}, { serviceName }, message);
|
||||
});
|
||||
} else {
|
||||
@ -38,12 +38,12 @@ export class LocalLogBackend extends LogBackend {
|
||||
})
|
||||
.then((message: LogMessage | null) => {
|
||||
if (message != null) {
|
||||
_.each(this.globalListeners, (listener) => {
|
||||
_.each(this.globalListeners, listener => {
|
||||
listener.push(`${JSON.stringify(message)}\n`);
|
||||
});
|
||||
}
|
||||
})
|
||||
.catch((e) => {
|
||||
.catch(e => {
|
||||
console.log('Error streaming local log output: ', e);
|
||||
});
|
||||
}
|
||||
@ -62,10 +62,11 @@ export class LocalLogBackend extends LogBackend {
|
||||
return stream;
|
||||
}
|
||||
|
||||
public assignServiceNameResolver(resolver: (serviceId: number) => Bluebird<string>) {
|
||||
public assignServiceNameResolver(
|
||||
resolver: (serviceId: number) => Bluebird<string>,
|
||||
) {
|
||||
this.serviceNameResolver = resolver;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default LocalLogBackend;
|
||||
|
@ -1,4 +1,3 @@
|
||||
|
||||
export type LogMessage = Dictionary<any>;
|
||||
|
||||
export abstract class LogBackend {
|
||||
|
@ -7,7 +7,7 @@ APIBinder = require './api-binder'
|
||||
DeviceState = require './device-state'
|
||||
SupervisorAPI = require './supervisor-api'
|
||||
{ Logger } = require './logger'
|
||||
{ checkTruthy } = require './lib/validation';
|
||||
{ checkTruthy } = require './lib/validation'
|
||||
|
||||
constants = require './lib/constants'
|
||||
|
||||
|
@ -7,14 +7,14 @@ _ = require 'lodash'
|
||||
|
||||
configs = {
|
||||
simple: {
|
||||
compose: require('./data/docker-states/simple/compose.json');
|
||||
imageInfo: require('./data/docker-states/simple/imageInfo.json');
|
||||
inspect: require('./data/docker-states/simple/inspect.json');
|
||||
compose: require('./data/docker-states/simple/compose.json')
|
||||
imageInfo: require('./data/docker-states/simple/imageInfo.json')
|
||||
inspect: require('./data/docker-states/simple/inspect.json')
|
||||
}
|
||||
entrypoint: {
|
||||
compose: require('./data/docker-states/entrypoint/compose.json');
|
||||
imageInfo: require('./data/docker-states/entrypoint/imageInfo.json');
|
||||
inspect: require('./data/docker-states/entrypoint/inspect.json');
|
||||
compose: require('./data/docker-states/entrypoint/compose.json')
|
||||
imageInfo: require('./data/docker-states/entrypoint/imageInfo.json')
|
||||
inspect: require('./data/docker-states/entrypoint/inspect.json')
|
||||
}
|
||||
}
|
||||
|
||||
@ -199,8 +199,8 @@ describe 'compose/service', ->
|
||||
serviceId: 123456,
|
||||
serviceName: 'test',
|
||||
ports: [
|
||||
"80:80"
|
||||
"100:100"
|
||||
'80:80'
|
||||
'100:100'
|
||||
]
|
||||
}, { appName: 'test' })
|
||||
|
||||
@ -302,14 +302,14 @@ describe 'compose/service', ->
|
||||
}, { appName: 'test' })
|
||||
|
||||
expect(makeComposeServiceWithNetwork({
|
||||
"balena": {
|
||||
"ipv4Address": "1.2.3.4"
|
||||
'balena': {
|
||||
'ipv4Address': '1.2.3.4'
|
||||
}
|
||||
}).toDockerContainer({ deviceName: 'foo' }).NetworkingConfig).to.deep.equal({
|
||||
EndpointsConfig: {
|
||||
"123456_balena": {
|
||||
'123456_balena': {
|
||||
IPAMConfig: {
|
||||
IPV4Address: "1.2.3.4"
|
||||
IPV4Address: '1.2.3.4'
|
||||
},
|
||||
Aliases: []
|
||||
}
|
||||
@ -325,7 +325,7 @@ describe 'compose/service', ->
|
||||
}
|
||||
}).toDockerContainer({ deviceName: 'foo' }).NetworkingConfig).to.deep.equal({
|
||||
EndpointsConfig: {
|
||||
"123456_balena": {
|
||||
'123456_balena': {
|
||||
IPAMConfig: {
|
||||
IPV4Address: '1.2.3.4'
|
||||
IPV6Address: '5.6.7.8'
|
||||
@ -337,23 +337,23 @@ describe 'compose/service', ->
|
||||
})
|
||||
|
||||
it 'should correctly convert Docker format to service format', ->
|
||||
dockerCfg = require('./data/docker-states/simple/inspect.json');
|
||||
dockerCfg = require('./data/docker-states/simple/inspect.json')
|
||||
makeServiceFromDockerWithNetwork = (networks) ->
|
||||
Service.fromDockerContainer(
|
||||
newConfig = _.cloneDeep(dockerCfg);
|
||||
newConfig = _.cloneDeep(dockerCfg)
|
||||
newConfig.NetworkSettings = { Networks: networks }
|
||||
)
|
||||
|
||||
expect(makeServiceFromDockerWithNetwork({
|
||||
'123456_balena': {
|
||||
IPAMConfig: {
|
||||
IPv4Address: "1.2.3.4"
|
||||
IPv4Address: '1.2.3.4'
|
||||
},
|
||||
Aliases: []
|
||||
}
|
||||
}).config.networks).to.deep.equal({
|
||||
'123456_balena': {
|
||||
"ipv4Address": "1.2.3.4"
|
||||
'ipv4Address': '1.2.3.4'
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -225,7 +225,7 @@ describe 'deviceState', ->
|
||||
@deviceState.loadTargetFromFile(process.env.ROOT_MOUNTPOINT + '/apps.json')
|
||||
.then =>
|
||||
@deviceState.getTarget()
|
||||
.then (targetState) =>
|
||||
.then (targetState) ->
|
||||
testTarget = _.cloneDeep(testTarget1)
|
||||
testTarget.local.apps['1234'].services = _.map testTarget.local.apps['1234'].services, (s) ->
|
||||
s.imageName = s.image
|
||||
|
@ -95,33 +95,33 @@ describe 'EventTracker', ->
|
||||
it 'should rate limit events of the same type', ->
|
||||
@eventTracker.client.track.reset()
|
||||
|
||||
@eventTracker.track('test', { });
|
||||
@eventTracker.track('test', { });
|
||||
@eventTracker.track('test', { });
|
||||
@eventTracker.track('test', { });
|
||||
@eventTracker.track('test', { });
|
||||
@eventTracker.track('test', {})
|
||||
@eventTracker.track('test', {})
|
||||
@eventTracker.track('test', {})
|
||||
@eventTracker.track('test', {})
|
||||
@eventTracker.track('test', {})
|
||||
|
||||
expect(@eventTracker.client.track).to.have.callCount(1)
|
||||
|
||||
it 'should rate limit events of the same type with different arguments', ->
|
||||
@eventTracker.client.track.reset()
|
||||
|
||||
@eventTracker.track('test2', { a: 1 });
|
||||
@eventTracker.track('test2', { b: 2 });
|
||||
@eventTracker.track('test2', { c: 3 });
|
||||
@eventTracker.track('test2', { d: 4 });
|
||||
@eventTracker.track('test2', { e: 5 });
|
||||
@eventTracker.track('test2', { a: 1 })
|
||||
@eventTracker.track('test2', { b: 2 })
|
||||
@eventTracker.track('test2', { c: 3 })
|
||||
@eventTracker.track('test2', { d: 4 })
|
||||
@eventTracker.track('test2', { e: 5 })
|
||||
|
||||
expect(@eventTracker.client.track).to.have.callCount(1)
|
||||
|
||||
it 'should not rate limit events of different types', ->
|
||||
@eventTracker.client.track.reset()
|
||||
|
||||
@eventTracker.track('test3', { a: 1 });
|
||||
@eventTracker.track('test4', { b: 2 });
|
||||
@eventTracker.track('test5', { c: 3 });
|
||||
@eventTracker.track('test6', { d: 4 });
|
||||
@eventTracker.track('test7', { e: 5 });
|
||||
@eventTracker.track('test3', { a: 1 })
|
||||
@eventTracker.track('test4', { b: 2 })
|
||||
@eventTracker.track('test5', { c: 3 })
|
||||
@eventTracker.track('test6', { d: 4 })
|
||||
@eventTracker.track('test7', { e: 5 })
|
||||
|
||||
expect(@eventTracker.client.track).to.have.callCount(5)
|
||||
|
||||
|
@ -163,7 +163,7 @@ describe 'DeviceConfig', ->
|
||||
HOST_CONFIG_other: 'val',
|
||||
HOST_CONFIG_baz: 'bad',
|
||||
SUPERVISOR_POLL_INTERVAL: '100',
|
||||
});
|
||||
})
|
||||
|
||||
describe 'Extlinux files', ->
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
require('mocha');
|
||||
require('mocha')
|
||||
|
||||
{ expect } = require('chai');
|
||||
{ expect } = require('chai')
|
||||
|
||||
ComposeUtils = require('../src/compose/utils');
|
||||
ComposeUtils = require('../src/compose/utils')
|
||||
|
||||
describe 'Composition utilities', ->
|
||||
|
||||
|
3
typings/blinking.d.ts
vendored
3
typings/blinking.d.ts
vendored
@ -1,5 +1,4 @@
|
||||
declare module 'blinking' {
|
||||
|
||||
interface Pattern {
|
||||
blinks?: number;
|
||||
onDuration?: number;
|
||||
@ -8,7 +7,7 @@ declare module 'blinking' {
|
||||
}
|
||||
|
||||
interface Blink {
|
||||
start: (pattern: Pattern) => void
|
||||
start: (pattern: Pattern) => void;
|
||||
stop: () => void;
|
||||
}
|
||||
|
||||
|
2
typings/dockerode-ext.d.ts
vendored
2
typings/dockerode-ext.d.ts
vendored
@ -1,7 +1,6 @@
|
||||
import { ContainerInspectInfo } from 'dockerode';
|
||||
|
||||
declare module 'dockerode' {
|
||||
|
||||
// Extend the HostConfig interface with the missing fields.
|
||||
// TODO: Add these upstream to DefinitelyTyped
|
||||
interface HostConfig {
|
||||
@ -22,5 +21,4 @@ declare module 'dockerode' {
|
||||
Healthcheck?: DockerHealthcheck;
|
||||
StopTimeout?: number;
|
||||
}
|
||||
|
||||
}
|
3
typings/duration-js.d.ts
vendored
3
typings/duration-js.d.ts
vendored
@ -1,11 +1,10 @@
|
||||
// From: https://github.com/icholy/Duration.js/pull/15
|
||||
// Once the above is merged, use the inbuilt module types
|
||||
declare module "duration-js" {
|
||||
declare module 'duration-js' {
|
||||
type DurationLike = Duration | string | number;
|
||||
type DateLike = Date | number;
|
||||
|
||||
class Duration {
|
||||
|
||||
private _milliseconds: number;
|
||||
|
||||
constructor(value?: DurationLike);
|
||||
|
1
typings/json-mask.d.ts
vendored
1
typings/json-mask.d.ts
vendored
@ -1,5 +1,4 @@
|
||||
declare module 'json-mask' {
|
||||
|
||||
function mask(obj: Dictionary<any>, mask: string): Dictionary<any>;
|
||||
|
||||
// These types are not strictly correct, but they don't need to be for our usage
|
||||
|
2
typings/typings.d.ts
vendored
2
typings/typings.d.ts
vendored
@ -1,5 +1,5 @@
|
||||
// Allow importing of json files with typescript
|
||||
declare module "*.json" {
|
||||
declare module '*.json' {
|
||||
const value: { [key: string]: any };
|
||||
export default value;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user