Merge pull request #733 from resin-io/ts-changes

Further typescipt conversions
This commit is contained in:
CameronDiver 2018-09-04 11:43:07 -07:00 committed by GitHub
commit 8aaa50ba4e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 617 additions and 371 deletions

View File

@ -28,6 +28,7 @@
},
"devDependencies": {
"@types/bluebird": "^3.5.20",
"@types/event-stream": "^3.3.34",
"@types/express": "^4.11.1",
"@types/knex": "^0.14.14",
"@types/lodash": "^4.14.109",

View File

@ -15,7 +15,7 @@ export interface PortBindings {
}
export interface DockerPortOptions {
exposedPorts: Map<string, {}>;
exposedPorts: Dictionary<{}>;
portBindings: PortBindings;
}

View File

@ -158,7 +158,7 @@ module.exports = class DeviceState extends EventEmitter
init: ->
@config.on 'change', (changedConfig) =>
if changedConfig.loggingEnabled?
@logger.enable(changedConfig.loggingEnabled)
@logger.enable(validation.checkTruthy(changedConfig.loggingEnabled))
if changedConfig.apiSecret?
@reportCurrentState(api_secret: changedConfig.apiSecret)

View File

@ -1,54 +0,0 @@
Promise = require 'bluebird'
_ = require 'lodash'
mixpanel = require 'mixpanel'
mask = require 'json-mask'
mixpanelMask = [
'appId'
'delay'
'error'
'interval'
'image'
'app(appId,name)'
'service(appId,serviceId,serviceName,commit,releaseId,image,labels)'
'stateDiff/local(os_version,superisor_version,ip_address,apps/*/services)'
].join(',')
module.exports = class EventTracker
constructor: ->
@_client = null
@_properties = null
_logEvent: ->
console.log(arguments...)
track: (ev, properties = {}) =>
# Allow passing in an error directly and having it assigned to the error property.
if properties instanceof Error
properties = error: properties
properties = _.cloneDeep(properties)
# If the properties has an error argument that is an Error object then it treats it nicely,
# rather than letting it become `{}`
if properties.error instanceof Error
properties.error =
message: properties.error.message
stack: properties.error.stack
# Don't log private env vars (e.g. api keys) or other secrets - use a whitelist to mask what we send
properties = mask(properties, mixpanelMask)
@_logEvent('Event:', ev, JSON.stringify(properties))
if !@_client?
return
# Mutation is bad, and it should feel bad
properties = _.assign(properties, @_properties)
@_client.track(ev, properties)
init: ({ offlineMode, mixpanelToken, uuid, mixpanelHost }) ->
Promise.try =>
@_properties =
distinct_id: uuid
uuid: uuid
if offlineMode
return
@_client = mixpanel.init(mixpanelToken, { host: mixpanelHost })

93
src/event-tracker.ts Normal file
View File

@ -0,0 +1,93 @@
import * as Bluebird from 'bluebird';
import mask = require('json-mask');
import * as _ from 'lodash';
import Mixpanel = require('mixpanel');
export type EventTrackProperties = Dictionary<any>;
interface InitArgs {
uuid: string;
offlineMode: boolean;
mixpanelHost: string;
mixpanelToken: string;
}
const mixpanelMask = [
'appId',
'delay',
'error',
'interval',
'image',
'app(appId,name)',
'service(appId,serviceId,serviceName,commit,releaseId,image,labels)',
'stateDiff/local(os_version,superisor_version,ip_address,apps/*/services)',
].join(',');
export class EventTracker {
private defaultProperties: EventTrackProperties | null;
private client: any;
public constructor() {
this.client = null;
this.defaultProperties = null;
}
public init({
offlineMode,
mixpanelHost,
mixpanelToken,
uuid,
}: InitArgs): Bluebird<void> {
return Bluebird.try(() => {
this.defaultProperties = {
distinct_id: uuid,
uuid,
};
if (offlineMode) {
return;
}
this.client = Mixpanel.init(mixpanelToken, { host: mixpanelHost });
});
}
public track(
event: string,
properties: EventTrackProperties | Error,
) {
if (properties instanceof Error) {
properties = { error: properties };
}
properties = _.cloneDeep(properties);
if (properties.error instanceof Error) {
// Format the error for printing, to avoid display as { }
properties.error = {
message: properties.error.message,
stack: properties.error.stack,
};
}
// Don't send potentially sensitive information, by using a whitelist
properties = mask(properties, mixpanelMask);
this.logEvent('Event:', event, JSON.stringify(properties));
if (this.client == null) {
return;
}
properties = this.assignDefaultProperties(properties);
this.client.track(event, properties);
}
private logEvent(...args: string[]) {
console.log(...args);
}
private assignDefaultProperties(
properties: EventTrackProperties,
): EventTrackProperties {
return _.merge({ }, properties, this.defaultProperties);
}
}

View File

@ -1,169 +1,174 @@
export const stopService= {
export interface LogType {
eventName: string;
humanName: string;
}
export const stopService: LogType = {
eventName: 'Service kill',
humanName: 'Killing service',
};
export const stopServiceSuccess= {
export const stopServiceSuccess: LogType = {
eventName: 'Service stop',
humanName: 'Killed service',
};
export const stopServiceNoop = {
export const stopServiceNoop: LogType = {
eventName: 'Service already stopped',
humanName: 'Service is already stopped, removing container',
};
export const stopRemoveServiceNoop = {
export const stopRemoveServiceNoop: LogType = {
eventName: 'Service already stopped and container removed',
humanName: 'Service is already stopped and the container removed',
};
export const stopServiceError = {
export const stopServiceError: LogType = {
eventName: 'Service stop error',
humanName: 'Failed to kill service',
};
export const removeDeadService = {
export const removeDeadService: LogType = {
eventName: 'Remove dead container',
humanName: 'Removing dead container',
};
export const removeDeadServiceError = {
export const removeDeadServiceError: LogType = {
eventName: 'Remove dead container error',
humanName: 'Error removing dead container',
};
export const downloadImage = {
export const downloadImage: LogType = {
eventName: 'Docker image download',
humanName: 'Downloading image',
};
export const downloadImageDelta = {
export const downloadImageDelta: LogType = {
eventName: 'Delta image download',
humanName: 'Downloading delta for image',
};
export const downloadImageSuccess = {
export const downloadImageSuccess: LogType = {
eventName: 'Image downloaded',
humanName: 'Downloaded image',
};
export const downloadImageError = {
export const downloadImageError: LogType = {
eventName: 'Image download error',
humanName: 'Failed to download image',
};
export const installService = {
export const installService: LogType = {
eventName: 'Service install',
humanName: 'Installing service',
};
export const installServiceSuccess = {
export const installServiceSuccess: LogType = {
eventName: 'Service installed',
humanName: 'Installed service',
};
export const installServiceError = {
export const installServiceError: LogType = {
eventName: 'Service install error',
humanName: 'Failed to install service',
};
export const deleteImage = {
export const deleteImage: LogType = {
eventName: 'Image removal',
humanName: 'Deleting image',
};
export const deleteImageSuccess = {
export const deleteImageSuccess: LogType = {
eventName: 'Image removed',
humanName: 'Deleted image',
};
export const deleteImageError = {
export const deleteImageError: LogType = {
eventName: 'Image removal error',
humanName: 'Failed to delete image',
};
export const imageAlreadyDeleted = {
export const imageAlreadyDeleted: LogType = {
eventName: 'Image already deleted',
humanName: 'Image already deleted',
};
export const deltaStillProcessingError = {
export const deltaStillProcessingError: LogType = {
eventName: 'Delta still processing remotely.',
humanName: 'Delta still processing remotely. Will retry...',
};
export const startService = {
export const startService: LogType = {
eventName: 'Service start',
humanName: 'Starting service',
};
export const startServiceSuccess = {
export const startServiceSuccess: LogType = {
eventName: 'Service started',
humanName: 'Started service',
};
export const startServiceNoop = {
export const startServiceNoop: LogType = {
eventName: 'Service already running',
humanName: 'Service is already running',
};
export const startServiceError = {
export const startServiceError: LogType = {
eventName: 'Service start error',
humanName: 'Failed to start service',
};
export const updateService = {
export const updateService: LogType = {
eventName: 'Service update',
humanName: 'Updating service',
};
export const updateServiceError = {
export const updateServiceError: LogType = {
eventName: 'Service update error',
humanName: 'Failed to update service',
};
export const serviceExit = {
export const serviceExit: LogType = {
eventName: 'Service exit',
humanName: 'Service exited',
};
export const serviceRestart = {
export const serviceRestart: LogType = {
eventName: 'Service restart',
humanName: 'Restarting service',
};
export const updateServiceConfig = {
export const updateServiceConfig: LogType = {
eventName: 'Service config update',
humanName: 'Updating config for service',
};
export const updateServiceConfigSuccess = {
export const updateServiceConfigSuccess: LogType = {
eventName: 'Service config updated',
humanName: 'Updated config for service',
};
export const updateServiceConfigError = {
export const updateServiceConfigError: LogType = {
eventName: 'Service config update error',
humanName: 'Failed to update config for service',
};
export const createVolume = {
export const createVolume: LogType = {
eventName: 'Volume creation',
humanName: 'Creating volume',
};
export const createVolumeError = {
export const createVolumeError: LogType = {
eventName: 'Volume creation error',
humanName: 'Error creating volume',
};
export const removeVolume = {
export const removeVolume: LogType = {
eventName: 'Volume removal',
humanName: 'Removing volume',
};
export const removeVolumeError = {
export const removeVolumeError: LogType = {
eventName: 'Volume removal error',
humanName: 'Error removing volume',
};
export const createNetwork = {
export const createNetwork: LogType = {
eventName: 'Network creation',
humanName: 'Creating network',
};
export const createNetworkError = {
export const createNetworkError: LogType = {
eventName: 'Network creation error',
humanName: 'Error creating network',
};
export const removeNetwork = {
export const removeNetwork: LogType = {
eventName: 'Network removal',
humanName: 'Removing network',
};
export const removeNetworkError = {
export const removeNetworkError: LogType = {
eventName: 'Network removal error',
humanName: 'Error removing network',
};

View File

@ -1,257 +0,0 @@
url = require 'url'
https = require 'https'
stream = require 'stream'
zlib = require 'zlib'
_ = require 'lodash'
Promise = require 'bluebird'
es = require 'event-stream'
Lock = require 'rwlock'
{ checkTruthy } = require './lib/validation'
ZLIB_TIMEOUT = 100
COOLDOWN_PERIOD = 5 * 1000
KEEPALIVE_TIMEOUT = 60 * 1000
RESPONSE_GRACE_PERIOD = 5 * 1000
MAX_LOG_LENGTH = 10 * 1000
MAX_PENDING_BYTES = 256 * 1024
class LogBackend
constructor: (apiEndpoint, uuid, deviceApiKey) ->
@publishEnabled = true
@offlineMode = false
@_req = null
@_dropCount = 0
@_writable = true
@_gzip = null
@_opts = url.parse("#{apiEndpoint}/device/v2/#{uuid}/log-stream")
@_opts.method = 'POST'
@_opts.headers = {
'Authorization': "Bearer #{deviceApiKey}"
'Content-Type': 'application/x-ndjson'
'Content-Encoding': 'gzip'
}
# This stream serves serves as a message buffer during reconnections
# while we unpipe the old, malfunctioning connection and then repipe a
# new one.
@_stream = new stream.PassThrough({
allowHalfOpen: true
# We halve the high watermark because a passthrough stream has two
# buffers, one for the writable and one for the readable side. The
# write() call only returns false when both buffers are full.
highWaterMark: MAX_PENDING_BYTES / 2
})
@_stream.on 'drain', =>
@_writable = true
@_flush()
if @_dropCount > 0
@_write({
message: "Warning: Suppressed #{@_dropCount} message(s) due to high load"
timestamp: Date.now()
isSystem: true
isStdErr: true
})
@_dropCount = 0
@_setup = _.throttle(@_setup, COOLDOWN_PERIOD)
@_snooze = _.debounce(@_teardown, KEEPALIVE_TIMEOUT)
# Flushing every ZLIB_TIMEOUT hits a balance between compression and
# latency. When ZLIB_TIMEOUT is 0 the compression ratio is around 5x
# whereas when ZLIB_TIMEOUT is infinity the compession ratio is around 10x.
@_flush = _.throttle(@_flush, ZLIB_TIMEOUT, leading: false)
_setup: ->
@_req = https.request(@_opts)
# Since we haven't sent the request body yet, and never will,the
# only reason for the server to prematurely respond is to
# communicate an error. So teardown the connection immediately
@_req.on 'response', (res) =>
console.log('LogBackend: server responded with status code:', res.statusCode)
@_teardown()
@_req.on('timeout', => @_teardown())
@_req.on('close', => @_teardown())
@_req.on 'error', (err) =>
console.log('LogBackend: unexpected error:', err)
@_teardown()
# Immediately flush the headers. This gives a chance to the server to
# respond with potential errors such as 401 authentication error
@_req.flushHeaders()
# We want a very low writable high watermark to prevent having many
# chunks stored in the writable queue of @_gzip and have them in
# @_stream instead. This is desirable because once @_gzip.flush() is
# called it will do all pending writes with that flush flag. This is
# not what we want though. If there are 100 items in the queue we want
# to write all of them with Z_NO_FLUSH and only afterwards do a
# Z_SYNC_FLUSH to maximize compression
@_gzip = zlib.createGzip(writableHighWaterMark: 1024)
@_gzip.on('error', => @_teardown())
@_gzip.pipe(@_req)
# Only start piping if there has been no error after the header flush.
# Doing it immediately would potentialy lose logs if it turned out that
# the server is unavailalbe because @_req stream would consume our
# passthrough buffer
@_timeout = setTimeout(=>
@_stream.pipe(@_gzip)
@_flush()
, RESPONSE_GRACE_PERIOD)
_teardown: ->
if @_req isnt null
clearTimeout(@_timeout)
@_req.removeAllListeners()
@_req.on('error', _.noop)
# no-op if pipe hasn't happened yet
@_stream.unpipe(@_gzip)
@_gzip.end()
@_req = null
_flush: ->
@_gzip.flush(zlib.Z_SYNC_FLUSH)
_write: (msg) ->
@_snooze()
if @_req is null
@_setup()
if @_writable
@_writable = @_stream.write(JSON.stringify(msg) + '\n')
@_flush()
else
@_dropCount += 1
log: (msg) ->
if @offlineMode or !@publishEnabled
return
if !_.isObject(msg)
return
msg = _.assign({
timestamp: Date.now()
message: ''
}, msg)
if !(msg.isSystem or msg.serviceId?)
return
msg.message = _.truncate(msg.message, { length: MAX_LOG_LENGTH, omission: '[...]' })
@_write(msg)
module.exports = class Logger
constructor: ({ @eventTracker }) ->
_lock = new Lock()
@_writeLock = Promise.promisify(_lock.async.writeLock)
@attached = { stdout: {}, stderr: {} }
@backend = null
init: (opts) =>
@backend = new LogBackend(opts.apiEndpoint, opts.uuid, opts.deviceApiKey)
@backend.offlineMode = checkTruthy(opts.offlineMode)
enable: (val) =>
@backend.publishEnabled = checkTruthy(val) ? true
logDependent: (msg, device) =>
msg.uuid = device.uuid
@backend.log(msg)
log: (msg) =>
@backend.log(msg)
logSystemMessage: (msg, obj, eventName) =>
messageObj = { message: msg, isSystem: true }
if obj?.error?
messageObj.isStdErr = true
@log(messageObj)
@eventTracker.track(eventName ? msg, obj)
lock: (containerId) =>
@_writeLock(containerId)
.disposer (release) ->
release()
_attachStream: (docker, stdoutOrStderr, containerId, { serviceId, imageId }) =>
Promise.try =>
if stdoutOrStderr not in [ 'stdout', 'stderr' ]
throw new Error("Invalid log selection #{stdoutOrStderr}")
if !@attached[stdoutOrStderr][containerId]
logsOpts = { follow: true, stdout: stdoutOrStderr == 'stdout', stderr: stdoutOrStderr == 'stderr', timestamps: true, since: Math.floor(Date.now() / 1000) }
docker.getContainer(containerId)
.logs(logsOpts)
.then (stream) =>
@attached[stdoutOrStderr][containerId] = true
stream
.on 'error', (err) =>
console.error('Error on container logs', err, err.stack)
@attached[stdoutOrStderr][containerId] = false
.pipe(es.split())
.on 'data', (logLine) =>
space = logLine.indexOf(' ')
if space > 0
msg = { timestamp: (new Date(logLine.substr(0, space))).getTime(), message: logLine.substr(space + 1), serviceId, imageId }
if stdoutOrStderr == 'stderr'
msg.isStdErr = true
@log(msg)
.on 'error', (err) =>
console.error('Error on container logs', err, err.stack)
@attached[stdoutOrStderr][containerId] = false
.on 'end', =>
@attached[stdoutOrStderr][containerId] = false
attach: (docker, containerId, serviceInfo) =>
Promise.using @lock(containerId), =>
@_attachStream(docker, 'stdout', containerId, serviceInfo)
.then =>
@_attachStream(docker, 'stderr', containerId, serviceInfo)
objectNameForLogs: (obj = {}) ->
if obj.service?.serviceName? and obj.service?.image?
return "#{obj.service.serviceName} #{obj.service.image}"
else if obj.image?
return obj.image.name
else if obj.network?.name?
return obj.network.name
else if obj.volume?.name?
return obj.volume.name
else
return null
logSystemEvent: (logType, obj = {}) ->
message = "#{logType.humanName}"
objName = @objectNameForLogs(obj)
message += " '#{objName}'" if objName?
if obj.error?
# Report the message from the original cause to the user.
errMessage = obj.error.message
if _.isEmpty(errMessage)
errMessage = 'Unknown cause'
console.log('Warning: invalid error message', obj.error)
message += " due to '#{errMessage}'"
@logSystemMessage(message, obj, logType.eventName)
return
logConfigChange: (config, { success = false, err = null } = {}) ->
obj = { config }
if success
msg = "Applied configuration change #{JSON.stringify(config)}"
eventName = 'Apply config change success'
else if err?
msg = "Error applying configuration change: #{err}"
eventName = 'Apply config change error'
obj.error = err
else
msg = "Applying configuration change #{JSON.stringify(config)}"
eventName = 'Apply config change in progress'
@logSystemMessage(msg, obj, eventName)

432
src/logger.ts Normal file
View File

@ -0,0 +1,432 @@
import * as Bluebird from 'bluebird';
import * as es from 'event-stream';
import { ClientRequest } from 'http';
import * as https from 'https';
import * as _ from 'lodash';
import * as Lock from 'rwlock';
import * as stream from 'stream';
import * as url from 'url';
import * as zlib from 'zlib';
import { EventTracker } from './event-tracker';
import Docker = require('./lib/docker-utils');
import { LogType } from './lib/log-types';
const ZLIB_TIMEOUT = 100;
const COOLDOWN_PERIOD = 5 * 1000;
const KEEPALIVE_TIMEOUT = 60 * 1000;
const RESPONSE_GRACE_PERIOD = 5 * 1000;
const MAX_LOG_LENGTH = 10 * 1000;
const MAX_PENDING_BYTES = 256 * 1024;
interface Options extends url.UrlWithParsedQuery {
method: string;
headers: Dictionary<string>;
}
type LogMessage = Dictionary<any>;
abstract class LogBackend {
public offlineMode: boolean;
public publishEnabled: boolean = true;
public abstract log(message: LogMessage): void;
}
class ResinLogBackend extends LogBackend {
private req: ClientRequest | null = null;
private dropCount: number = 0;
private writable: boolean = true;
private gzip: zlib.Gzip | null = null;
private opts: Options;
private stream: stream.PassThrough;
timeout: NodeJS.Timer;
public constructor(
apiEndpoint: string,
uuid: string,
deviceApiKey: string,
) {
super();
this.opts = url.parse(`${apiEndpoint}/device/v2/${uuid}/log-stream`) as any;
this.opts.method = 'POST';
this.opts.headers = {
Authorization: `Bearer ${deviceApiKey}`,
'Content-Type': 'application/x-ndjson',
'Content-Encoding': 'gzip',
};
// This stream serves serves as a message buffer during reconnections
// while we unpipe the old, malfunctioning connection and then repipe a
// new one.
this.stream = new stream.PassThrough({
allowHalfOpen: true,
// We halve the high watermark because a passthrough stream has two
// buffers, one for the writable and one for the readable side. The
// write() call only returns false when both buffers are full.
highWaterMark: MAX_PENDING_BYTES / 2,
});
this.stream.on('drain', () => {
this.writable = true;
this.flush();
if (this.dropCount > 0) {
this.write({
message: `Warning: Suppressed ${this.dropCount} message(s) due to high load`,
timestamp: Date.now(),
isSystem: true,
isStdErr: true,
});
this.dropCount = 0;
}
});
}
public log(message: LogMessage) {
if (this.offlineMode || !this.publishEnabled) {
return;
}
if (!_.isObject(message)) {
return;
}
message = _.assign({
timestamp: Date.now(),
message: '',
}, message);
if (!message.isSystem && message.serviceId == null) {
return;
}
message.message = _.truncate(message.message, {
length: MAX_LOG_LENGTH,
omission: '[...]',
});
this.write(message);
}
private setup = _.throttle(() => {
this.req = https.request(this.opts);
// Since we haven't sent the request body yet, and never will,the
// only reason for the server to prematurely respond is to
// communicate an error. So teardown the connection immediately
this.req.on('response', (res) => {
console.log('LogBackend: server responded with status code:', res.statusCode);
this.teardown();
});
this.req.on('timeout', () => this.teardown());
this.req.on('close', () => this.teardown());
this.req.on('error', (err) => {
console.log('LogBackend: unexpected error:', err);
this.teardown();
});
// Immediately flush the headers. This gives a chance to the server to
// respond with potential errors such as 401 authentication error
this.req.flushHeaders();
// We want a very low writable high watermark to prevent having many
// chunks stored in the writable queue of @_gzip and have them in
// @_stream instead. This is desirable because once @_gzip.flush() is
// called it will do all pending writes with that flush flag. This is
// not what we want though. If there are 100 items in the queue we want
// to write all of them with Z_NO_FLUSH and only afterwards do a
// Z_SYNC_FLUSH to maximize compression
this.gzip = zlib.createGzip({ writableHighWaterMark: 1024 });
this.gzip.on('error', () => this.teardown());
this.gzip.pipe(this.req);
// Only start piping if there has been no error after the header flush.
// Doing it immediately would potentialy lose logs if it turned out that
// the server is unavailalbe because @_req stream would consume our
// passthrough buffer
this.timeout = setTimeout(() => {
if (this.gzip != null) {
this.stream.pipe(this.gzip);
this.flush();
}
}, RESPONSE_GRACE_PERIOD);
}, COOLDOWN_PERIOD);
private snooze = _.debounce(this.teardown, KEEPALIVE_TIMEOUT);
// Flushing every ZLIB_TIMEOUT hits a balance between compression and
// latency. When ZLIB_TIMEOUT is 0 the compression ratio is around 5x
// whereas when ZLIB_TIMEOUT is infinity the compession ratio is around 10x.
private flush = _.throttle(() => {
if (this.gzip != null) {
this.gzip.flush(zlib.Z_SYNC_FLUSH);
}
}, ZLIB_TIMEOUT, { leading: false });
private teardown() {
if (this.req != null) {
clearTimeout(this.timeout);
this.req.removeAllListeners();
this.req.on('error', _.noop);
if (this.gzip != null) {
this.stream.unpipe(this.gzip);
this.gzip.end();
}
this.req = null;
}
}
private write(message: LogMessage) {
this.snooze();
if (this.req == null) {
this.setup();
}
if (this.writable) {
this.writable = this.stream.write(JSON.stringify(message) + '\n');
this.flush();
} else {
this.dropCount += 1;
}
}
}
interface LoggerConstructOptions {
eventTracker: EventTracker;
}
interface LoggerSetupOptions {
apiEndpoint: string;
uuid: string;
deviceApiKey: string;
offlineMode: boolean;
enableLogs: boolean;
}
type LogEventObject = Dictionary<any> | null;
enum OutputStream {
Stdout,
Stderr,
}
export class Logger {
private writeLock: (key: string) => Bluebird<() => void> = Bluebird.promisify(
new Lock().async.writeLock,
);
private backend: LogBackend | null = null;
private eventTracker: EventTracker;
private attached: {
[key in OutputStream]: { [containerId: string]: boolean }
} = {
[OutputStream.Stderr]: { },
[OutputStream.Stdout]: { },
};
public constructor({ eventTracker }: LoggerConstructOptions) {
this.backend = null;
this.eventTracker = eventTracker;
}
public init({
apiEndpoint,
uuid,
deviceApiKey,
offlineMode,
enableLogs,
}: LoggerSetupOptions,
) {
this.backend = new ResinLogBackend(apiEndpoint, uuid, deviceApiKey);
this.backend.offlineMode = offlineMode;
this.backend.publishEnabled = enableLogs;
}
public enable(value: boolean = true) {
if (this.backend != null) {
this.backend.publishEnabled = value;
}
}
public logDependent(message: LogMessage, device: { uuid: string }) {
if (this.backend != null) {
message.uuid = device.uuid;
this.backend.log(message);
}
}
public log(message: LogMessage) {
if (this.backend != null) {
this.backend.log(message);
}
}
public logSystemMessage(
message: string,
eventObj?: LogEventObject,
eventName?: string,
) {
const msgObj: LogMessage = { message, isSystem: true };
if (eventObj != null && eventObj.error != null) {
msgObj.isStdErr = true;
}
this.log(msgObj);
this.eventTracker.track(
eventName != null ? eventName : message,
eventObj != null ? eventObj : { },
);
}
public lock(containerId: string): Bluebird.Disposer<() => void> {
return this.writeLock(containerId)
.disposer((release) => {
release();
});
}
public attach(
docker: Docker,
containerId: string,
serviceInfo: { serviceId: string, imageId: string },
): Bluebird<void> {
return Bluebird.using(this.lock(containerId), () => {
return this.attachStream(docker, OutputStream.Stdout, containerId, serviceInfo)
.then(() => {
return this.attachStream(docker, OutputStream.Stderr, containerId, serviceInfo);
});
});
}
public logSystemEvent(logType: LogType, obj: LogEventObject): void {
let message = logType.humanName;
const objectName = this.objectNameForLogs(obj);
if (objectName != null) {
message += ` '${objectName}'`;
}
if (obj && obj.error != null) {
let errorMessage = obj.error.message;
if (_.isEmpty(errorMessage)) {
errorMessage = 'Unknown cause';
console.error('Warning: invalid error message', obj.error);
}
message += ` due to '${errorMessage}'`;
}
this.logSystemMessage(message, obj, logType.eventName);
}
public logConfigChange(
config: { [configName: string]: string },
{ success = false, err = null }: { success?: boolean, err?: Error } = { },
) {
const obj: LogEventObject = { config };
let message: string;
let eventName: string;
if (success) {
message = `Applied configuration change ${JSON.stringify(config)}`;
eventName = 'Apply config change success';
} else if(err != null) {
message = `Error applying configuration change: ${err}`;
eventName = 'Apply config change error';
obj.error = err;
} else {
message = `Applying configuration change #{JSON.stringify(config)}`;
eventName = 'Apply config change in progress';
}
this.logSystemMessage(message, obj, eventName);
}
// TODO: This function interacts with the docker daemon directly,
// using the container id, but it would be better if this was provided
// by the Compose/Service-Manager module, as an accessor
private attachStream(
docker: Docker,
streamType: OutputStream,
containerId: string,
{ serviceId, imageId }: { serviceId: string, imageId: string},
): Bluebird<void> {
return Bluebird.try(() => {
if (this.attached[streamType][containerId]) {
return;
}
const logsOpts = {
follow: true,
stdout: streamType === OutputStream.Stdout,
stderr: streamType === OutputStream.Stderr,
timestamps: true,
since: Math.floor(Date.now() / 1000),
};
return docker.getContainer(containerId).logs(logsOpts)
.then((stream) => {
this.attached[streamType][containerId] = true;
stream
.on('error', (err) => {
console.error('Error on container logs', err);
this.attached[streamType][containerId] = false;
})
.pipe(es.split())
.on('data', (logBuf: Buffer) => {
const logLine = logBuf.toString();
const space = logLine.indexOf(' ');
if (space > 0) {
const message: LogMessage = {
timestamp: (new Date(logLine.substr(0, space))).getTime(),
message: logLine.substr(space + 1),
serviceId,
imageId,
};
if (streamType === OutputStream.Stderr) {
message.isStdErr = true;
}
this.log(message);
}
})
.on('error', (err) => {
console.error('Error on container logs', err);
this.attached[streamType][containerId] = false;
})
.on('end', () => {
this.attached[streamType][containerId] = false;
});
});
});
}
private objectNameForLogs(eventObj: LogEventObject): string | null {
if (eventObj == null) {
return null;
}
if (eventObj.service != null &&
eventObj.service.serviceName != null &&
eventObj.service.image != null
) {
return `${eventObj.service.serviceName} ${eventObj.service.image}`;
}
if (eventObj.image != null) {
return eventObj.image.name;
}
if (eventObj.network != null && eventObj.network.name != null) {
return eventObj.network.name;
}
if (eventObj.volume != null && eventObj.volume.name != null) {
return eventObj.volume.name;
}
return null;
}
}

View File

@ -1,12 +1,13 @@
EventEmitter = require 'events'
EventTracker = require './event-tracker'
{ EventTracker } = require './event-tracker'
DB = require './db'
Config = require './config'
APIBinder = require './api-binder'
DeviceState = require './device-state'
SupervisorAPI = require './supervisor-api'
Logger = require './logger'
{ checkTruthy } = require './lib/validation';
constants = require './lib/constants'
@ -51,11 +52,11 @@ module.exports = class Supervisor extends EventEmitter
@apiBinder.initClient()
.then =>
@logger.init({
apiEndpoint: conf.apiEndpoint
uuid: conf.uuid
deviceApiKey: conf.deviceApiKey
offlineMode: conf.offlineMode
enable: conf.loggingEnabled
apiEndpoint: conf.apiEndpoint,
uuid: conf.uuid,
deviceApiKey: conf.deviceApiKey,
offlineMode: checkTruthy(conf.offlineMode),
enableLogs: checkTruthy(conf.loggingEnabled),
})
.then =>
@deviceState.init()

View File

@ -4,7 +4,7 @@ m = require 'mochainon'
{ expect } = m.chai
{ stub } = m.sinon
EventTracker = require '../src/event-tracker'
{ EventTracker } = require '../src/event-tracker'
describe 'EventTracker', ->
before ->
stub(mixpanel, 'init').callsFake (token) ->
@ -15,10 +15,10 @@ describe 'EventTracker', ->
@eventTrackerOffline = new EventTracker()
@eventTracker = new EventTracker()
stub(EventTracker.prototype, '_logEvent')
stub(EventTracker.prototype, 'logEvent')
after ->
EventTracker.prototype._logEvent.restore()
EventTracker.prototype.logEvent.restore()
mixpanel.init.restore()
it 'initializes in offline mode', ->
@ -28,11 +28,11 @@ describe 'EventTracker', ->
})
expect(promise).to.be.fulfilled
.then =>
expect(@eventTrackerOffline._client).to.be.null
expect(@eventTrackerOffline.client).to.be.null
it 'logs events in offline mode, with the correct properties', ->
@eventTrackerOffline.track('Test event', { appId: 'someValue' })
expect(@eventTrackerOffline._logEvent).to.be.calledWith('Event:', 'Test event', JSON.stringify({ appId: 'someValue' }))
expect(@eventTrackerOffline.logEvent).to.be.calledWith('Event:', 'Test event', JSON.stringify({ appId: 'someValue' }))
it 'initializes a mixpanel client when not in offline mode', ->
promise = @eventTracker.init({
@ -42,18 +42,18 @@ describe 'EventTracker', ->
expect(promise).to.be.fulfilled
.then =>
expect(mixpanel.init).to.have.been.calledWith('someToken')
expect(@eventTracker._client.token).to.equal('someToken')
expect(@eventTracker._client.track).to.be.a('function')
expect(@eventTracker.client.token).to.equal('someToken')
expect(@eventTracker.client.track).to.be.a('function')
it 'calls the mixpanel client track function with the event, properties and uuid as distinct_id', ->
@eventTracker.track('Test event 2', { appId: 'someOtherValue' })
expect(@eventTracker._logEvent).to.be.calledWith('Event:', 'Test event 2', JSON.stringify({ appId: 'someOtherValue' }))
expect(@eventTracker._client.track).to.be.calledWith('Test event 2', { appId: 'someOtherValue', uuid: 'barbaz', distinct_id: 'barbaz' })
expect(@eventTracker.logEvent).to.be.calledWith('Event:', 'Test event 2', JSON.stringify({ appId: 'someOtherValue' }))
expect(@eventTracker.client.track).to.be.calledWith('Test event 2', { appId: 'someOtherValue', uuid: 'barbaz', distinct_id: 'barbaz' })
it 'can be passed an Error and it is added to the event properties', ->
theError = new Error('something went wrong')
@eventTracker.track('Error event', theError)
expect(@eventTracker._client.track).to.be.calledWith('Error event', {
expect(@eventTracker.client.track).to.be.calledWith('Error event', {
error:
message: theError.message
stack: theError.stack
@ -72,7 +72,7 @@ describe 'EventTracker', ->
}
}
@eventTracker.track('Some app event', props)
expect(@eventTracker._client.track).to.be.calledWith('Some app event', {
expect(@eventTracker.client.track).to.be.calledWith('Some app event', {
service: { appId: '1' }
uuid: 'barbaz'
distinct_id: 'barbaz'

View File

@ -7,7 +7,7 @@ m = require 'mochainon'
{ expect } = m.chai
{ stub } = m.sinon
Logger = require '../src/logger'
{ Logger } = require '../src/logger'
describe 'Logger', ->
beforeEach ->
@_req = new stream.PassThrough()
@ -32,6 +32,7 @@ describe 'Logger', ->
uuid: 'deadbeef'
deviceApiKey: 'secretkey'
offlineMode: false
enableLogs: true
})
afterEach ->

View File

@ -1,7 +1,7 @@
m = require 'mochainon'
{ expect } = m.chai
{ PortMap } = require '../src/compose/ports.ts'
{ PortMap } = require '../src/compose/ports'
describe 'Ports', ->

View File

@ -8,7 +8,10 @@
"removeComments": true,
"sourceMap": true,
"strictNullChecks": true,
"outDir": "./build/src/"
"outDir": "./build/src/",
"lib": [
"es2015",
]
},
"include": [
"src/**/*.ts",

3
typings/global.d.ts vendored Normal file
View File

@ -0,0 +1,3 @@
interface Dictionary<T> {
[key: string]: T;
}

12
typings/json-mask.d.ts vendored Normal file
View File

@ -0,0 +1,12 @@
declare module 'json-mask' {
function mask(obj: Dictionary<any>, mask: string): Dictionary<any>;
// These types are not strictly correct, but they don't need to be for our usage
namespace mask {
export const compile: (mask: string) => any;
export const filter: (obj: Dictionary<any>, compiledMask: any) => any;
}
export = mask;
}

1
typings/mixpanel.d.ts vendored Normal file
View File

@ -0,0 +1 @@
declare module 'mixpanel';

5
typings/zlib.d.ts vendored Normal file
View File

@ -0,0 +1,5 @@
declare module 'zlib' {
export interface ZlibOptions {
writableHighWaterMark: number;
}
}