mirror of
https://github.com/balena-os/balena-supervisor.git
synced 2025-02-01 00:45:23 +00:00
Merge pull request #979 from balena-io/debug-build
Update to node10 and add debug builds
This commit is contained in:
commit
825f6e9db9
62
Dockerfile
62
Dockerfile
@ -2,27 +2,36 @@ ARG ARCH=amd64
|
||||
|
||||
# The node version here should match the version of the runtime image which is
|
||||
# specified in the base-image subdirectory in the project
|
||||
FROM balenalib/rpi-node:6.16.0 as rpi-node-base
|
||||
FROM balenalib/armv7hf-node:6.16.0 as armv7hf-node-base
|
||||
FROM balenalib/aarch64-node:6.16.0 as aarch64-node-base
|
||||
FROM balenalib/raspberry-pi-node:10-run as rpi-node-base
|
||||
FROM balenalib/armv7hf-node:10-run as armv7hf-node-base
|
||||
FROM balenalib/aarch64-node:10-run as aarch64-node-base
|
||||
RUN [ "cross-build-start" ]
|
||||
RUN sed -i '/security.debian.org jessie/d' /etc/apt/sources.list
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
FROM balenalib/amd64-node:6.16.0 as amd64-node-base
|
||||
FROM balenalib/amd64-node:10-run as amd64-node-base
|
||||
RUN echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-start && chmod +x /usr/bin/cross-build-start \
|
||||
&& echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-end && chmod +x /usr/bin/cross-build-end
|
||||
|
||||
FROM balenalib/i386-node:6.16.0 as i386-node-base
|
||||
FROM balenalib/i386-node:10-run as i386-node-base
|
||||
RUN echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-start && chmod +x /usr/bin/cross-build-start \
|
||||
&& echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-end && chmod +x /usr/bin/cross-build-end
|
||||
|
||||
FROM i386-node-base as i386-nlp-node-base
|
||||
FROM balenalib/i386-nlp-node:6-run as i386-nlp-node-base
|
||||
RUN echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-start && chmod +x /usr/bin/cross-build-start \
|
||||
&& echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-end && chmod +x /usr/bin/cross-build-end
|
||||
|
||||
# Setup webpack building base images
|
||||
# We always do the webpack build on amd64, cause it's way faster
|
||||
FROM amd64-node-base as rpi-node-build
|
||||
FROM amd64-node-base as amd64-node-build
|
||||
FROM amd64-node-base as armv7hf-node-build
|
||||
FROM amd64-node-base as aarch64-node-build
|
||||
FROM amd64-node-base as i386-node-build
|
||||
FROM balenalib/amd64-node:6-build as i386-nlp-node-build
|
||||
##############################################################################
|
||||
|
||||
# We always do the webpack build on amd64, cause it's way faster
|
||||
FROM amd64-node-base as node-build
|
||||
FROM $ARCH-node-build as node-build
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
@ -61,13 +70,13 @@ WORKDIR /usr/src/app
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++ \
|
||||
git \
|
||||
libsqlite3-dev \
|
||||
make \
|
||||
python \
|
||||
rsync \
|
||||
wget \
|
||||
g++ \
|
||||
git \
|
||||
libsqlite3-dev \
|
||||
make \
|
||||
python \
|
||||
rsync \
|
||||
wget \
|
||||
&& rm -rf /var/lib/apt/lists/
|
||||
|
||||
RUN mkdir -p rootfs-overlay && \
|
||||
@ -76,20 +85,21 @@ RUN mkdir -p rootfs-overlay && \
|
||||
COPY package.json package-lock.json /usr/src/app/
|
||||
|
||||
# Install only the production modules that have C extensions
|
||||
# First try to install with npm ci, then fallback to npm install
|
||||
RUN (JOBS=MAX npm ci --production --no-optional --unsafe-perm || \
|
||||
JOBS=MAX npm install --production --no-optional --unsafe-perm) \
|
||||
&& npm dedupe
|
||||
RUN (if [ $ARCH = "i386-nlp" ]; then \
|
||||
JOBS=MAX npm install --no-optional --unsafe-perm; \
|
||||
else \
|
||||
JOBS=MAX npm ci --no-optional --unsafe-perm; \
|
||||
fi) && npm dedupe
|
||||
|
||||
# Remove various uneeded filetypes in order to reduce space
|
||||
# We also remove the spurious node.dtps, see https://github.com/mapbox/node-sqlite3/issues/861
|
||||
RUN find . -path '*/coverage/*' -o -path '*/test/*' -o -path '*/.nyc_output/*' \
|
||||
-o -name '*.tar.*' -o -name '*.in' -o -name '*.cc' \
|
||||
-o -name '*.c' -o -name '*.coffee' -o -name '*.eslintrc' \
|
||||
-o -name '*.h' -o -name '*.html' -o -name '*.markdown' \
|
||||
-o -name '*.md' -o -name '*.patch' -o -name '*.png' \
|
||||
-o -name '*.yml' -o -name "*.ts" \
|
||||
-delete \
|
||||
-o -name '*.tar.*' -o -name '*.in' -o -name '*.cc' \
|
||||
-o -name '*.c' -o -name '*.coffee' -o -name '*.eslintrc' \
|
||||
-o -name '*.h' -o -name '*.html' -o -name '*.markdown' \
|
||||
-o -name '*.md' -o -name '*.patch' -o -name '*.png' \
|
||||
-o -name '*.yml' -o -name "*.ts" \
|
||||
-delete \
|
||||
&& find . -type f -path '*/node_modules/sqlite3/deps*' -delete \
|
||||
&& find . -type f -path '*/node_modules/knex/build*' -delete \
|
||||
&& rm -rf node_modules/sqlite3/node.dtps
|
||||
@ -103,7 +113,7 @@ RUN [ "cross-build-end" ]
|
||||
##############################################################################
|
||||
|
||||
# Minimal runtime image
|
||||
FROM resin/$ARCH-supervisor-base:v1.3.0
|
||||
FROM balena/$ARCH-supervisor-base:v1.4.6
|
||||
ARG ARCH
|
||||
ARG VERSION=master
|
||||
ARG DEFAULT_MIXPANEL_TOKEN=bananasbananas
|
||||
|
75
Dockerfile.debug
Normal file
75
Dockerfile.debug
Normal file
@ -0,0 +1,75 @@
|
||||
ARG ARCH=amd64
|
||||
|
||||
# The node version here should match the version of the runtime image which is
|
||||
# specified in the base-image subdirectory in the project
|
||||
FROM balenalib/raspberry-pi-node:10-run as rpi-base
|
||||
FROM balenalib/armv7hf-node:10-run as armv7hf-base
|
||||
FROM balenalib/aarch64-node:10-run as aarch64-base
|
||||
RUN [ "cross-build-start" ]
|
||||
RUN sed -i '/security.debian.org jessie/d' /etc/apt/sources.list
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
FROM balenalib/amd64-node:10-run as amd64-base
|
||||
RUN echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-start && chmod +x /usr/bin/cross-build-start \
|
||||
&& echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-end && chmod +x /usr/bin/cross-build-end
|
||||
|
||||
FROM balenalib/i386-node:10-run as i386-base
|
||||
RUN echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-start && chmod +x /usr/bin/cross-build-start \
|
||||
&& echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-end && chmod +x /usr/bin/cross-build-end
|
||||
|
||||
FROM resin/i386-node:6.13.1-slim as i386-nlp-base
|
||||
RUN echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-start && chmod +x /usr/bin/cross-build-start \
|
||||
&& echo '#!/bin/sh\nexit 0' > /usr/bin/cross-build-end && chmod +x /usr/bin/cross-build-end \
|
||||
# TODO: Move this to a balenalib image so this isn't necessary
|
||||
&& sed -i '/jessie-updates/{s/^/#/}' /etc/apt/sources.list
|
||||
|
||||
# A little hack to make this work with the makefile
|
||||
FROM $ARCH-base AS node-build
|
||||
FROM $ARCH-base AS node-deps
|
||||
|
||||
ARG ARCH
|
||||
ARG VERSION=master
|
||||
ARG DEFAULT_MIXPANEL_TOKEN=bananasbananas
|
||||
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
RUN apt-get update && apt-get install ca-certificates \
|
||||
iptables libnss-mdns nodejs rsync git python make wget g++ \
|
||||
kmod vim
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
# i386-nlp doesn't have an npm version which supports ci
|
||||
RUN if [ $ARCH = "i386-nlp" ]; then \
|
||||
JOBS=MAX npm install --no-optional --unsafe-perm; \
|
||||
else \
|
||||
JOBS=MAX npm ci --no-optional --unsafe-perm; \
|
||||
fi
|
||||
|
||||
COPY src src/
|
||||
COPY typings typings/
|
||||
COPY tsconfig.json hardcode-migrations.js fix-jsonstream.js ./
|
||||
|
||||
RUN npm run build:debug
|
||||
RUN mkdir -p dist && echo "require('../build/app.js')" > dist/app.js
|
||||
|
||||
COPY entry.sh .
|
||||
|
||||
RUN mkdir -p rootfs-overlay && ([ ! -e rootfs-overlay/lib64 ] && ln -s /lib rootfs-overlay/lib64)
|
||||
|
||||
ENV CONFIG_MOUNT_POINT=/boot/config.json \
|
||||
LED_FILE=/dev/null \
|
||||
SUPERVISOR_IMAGE=resin/$ARCH-supervisor \
|
||||
VERSION=$VERSION \
|
||||
DEFAULT_MIXPANEL_TOKEN=$DEFAULT_MIXPANEL_TOKEN
|
||||
COPY avahi-daemon.conf /etc/avahi/avahi-daemon.conf
|
||||
|
||||
VOLUME /data
|
||||
HEALTHCHECK --interval=5m --start-period=1m --timeout=30s --retries=3 \
|
||||
CMD wget -qO- http://127.0.0.1:${LISTEN_PORT:-48484}/v1/healthy || exit 1
|
||||
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
CMD DEBUG=1 ./entry.sh
|
1
Makefile
1
Makefile
@ -154,6 +154,7 @@ endif
|
||||
--build-arg ARCH=$(ARCH) \
|
||||
--build-arg VERSION=$(shell jq -r .version package.json) \
|
||||
--build-arg DEFAULT_MIXPANEL_TOKEN=$(MIXPANEL_TOKEN) \
|
||||
`if [ -z "$$DEBUG" ]; then echo ''; else echo '-f Dockerfile.debug'; fi` \
|
||||
-t $(IMAGE) .
|
||||
|
||||
supervisor:
|
||||
|
@ -43,19 +43,19 @@ function tryRemove() {
|
||||
}
|
||||
|
||||
# This is the supervisor image we will produce
|
||||
TARGET_IMAGE=balena/$ARCH-supervisor:$TAG
|
||||
TARGET_IMAGE=balena/$ARCH-supervisor:$TAG$DEBUG
|
||||
|
||||
# Intermediate images and cache
|
||||
NODE_IMAGE=balena/$ARCH-supervisor-node:$TAG
|
||||
NODE_BUILD_IMAGE=balena/$ARCH-supervisor-node:$TAG-build
|
||||
NODE_IMAGE=balena/$ARCH-supervisor-node:$TAG$DEBUG
|
||||
NODE_BUILD_IMAGE=balena/$ARCH-supervisor-node:$TAG-build$DEBUG
|
||||
|
||||
TARGET_CACHE=$TARGET_IMAGE
|
||||
NODE_CACHE=$NODE_IMAGE
|
||||
NODE_BUILD_CACHE=$NODE_BUILD_IMAGE
|
||||
|
||||
TARGET_CACHE_MASTER=balena/$ARCH-supervisor:master
|
||||
NODE_CACHE_MASTER=balena/$ARCH-supervisor-node:master
|
||||
NODE_BUILD_CACHE_MASTER=balena/$ARCH-supervisor-node:master-build
|
||||
TARGET_CACHE_MASTER=balena/$ARCH-supervisor:master$DEBUG
|
||||
NODE_CACHE_MASTER=balena/$ARCH-supervisor-node:master$DEBUG
|
||||
NODE_BUILD_CACHE_MASTER=balena/$ARCH-supervisor-node:master-build$DEBUG
|
||||
|
||||
CACHE_FROM=""
|
||||
function tryPullForCache() {
|
||||
|
133
circle.yml
133
circle.yml
@ -25,40 +25,41 @@ defaults: &defaults
|
||||
name: Install npm dependencies
|
||||
working_directory: /tmp/build/automation
|
||||
command: |
|
||||
JOBS=max npm install \
|
||||
&& npm cache clean
|
||||
JOBS=max npm install \
|
||||
&& npm cache clean
|
||||
- run:
|
||||
name: Initialize the submodules (yocto layers)
|
||||
command: |
|
||||
git submodule update --init --recursive
|
||||
git clean -fxd base-image
|
||||
git submodule foreach --recursive git clean -fxd
|
||||
git submodule update --init --recursive
|
||||
git clean -fxd base-image
|
||||
git submodule foreach --recursive git clean -fxd
|
||||
- run:
|
||||
name: Build $ARCH-supervisor
|
||||
no_output_timeout: 10800
|
||||
command: |
|
||||
VERSION_TAG=v$(jq --raw-output .version package.json)
|
||||
GIT_TAG=$(git describe --tags | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || echo "")
|
||||
if [ "${VERSION_TAG}" = "${GIT_TAG}" ]; then
|
||||
export EXTRA_TAG=$VERSION_TAG
|
||||
fi
|
||||
echo "Starting build.sh"
|
||||
if [ "$DOCKER_PASSWORD" != "" ]; then
|
||||
docker login --username $DOCKER_USERNAME --password $DOCKER_PASSWORD
|
||||
export PUSH_IMAGES=${PUSH_IMAGES}
|
||||
else
|
||||
export PUSH_IMAGES=false
|
||||
fi
|
||||
# start the build for this architecture
|
||||
export TAG=$(echo ${CIRCLE_BRANCH} | sed 's/[^a-z0-9A-Z_.-]/-/g')
|
||||
export ARCH=${ARCH}
|
||||
bash automation/build.sh
|
||||
if [ "${CIRCLE_BRANCH}" = "master" ] && [ "${DEPLOY_TO_BALENA}" = "true" ]; then
|
||||
echo "Deploying to balena API (staging)"
|
||||
ARCH=${ARCH} TAG=$VERSION_TAG API_KEY=$STAGING_API_KEY API_ENDPOINT=$STAGING_API_ENDPOINT node automation/deploy-to-balena-cloud.js
|
||||
echo "Deploying to balena API (production)"
|
||||
ARCH=${ARCH} TAG=$VERSION_TAG API_KEY=$PRODUCTION_API_KEY API_ENDPOINT=$PRODUCTION_API_ENDPOINT node automation/deploy-to-balena-cloud.js
|
||||
fi
|
||||
VERSION_TAG_NO_DEBUG=v$(jq --raw-output .version package.json)
|
||||
GIT_TAG=$(git describe --tags | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || echo "")
|
||||
if [ "${VERSION_TAG_NO_DEBUG}" = "${GIT_TAG}" ]; then
|
||||
export EXTRA_TAG=$VERSION_TAG$DEBUG
|
||||
fi
|
||||
VERSION_TAG=${VERSION_TAG_NO_DEBUG}${DEBUG}
|
||||
echo "Starting build.sh"
|
||||
if [ "$DOCKER_PASSWORD" != "" ]; then
|
||||
docker login --username $DOCKER_USERNAME --password $DOCKER_PASSWORD
|
||||
export PUSH_IMAGES=${PUSH_IMAGES}
|
||||
else
|
||||
export PUSH_IMAGES=false
|
||||
fi
|
||||
# start the build for this architecture
|
||||
export TAG=$(echo ${CIRCLE_BRANCH} | sed 's/[^a-z0-9A-Z_.-]/-/g')
|
||||
export ARCH=${ARCH}
|
||||
bash automation/build.sh
|
||||
if [ "${CIRCLE_BRANCH}" = "master" ] && [ "${DEPLOY_TO_BALENA}" = "true" ]; then
|
||||
echo "Deploying to balena API (staging)"
|
||||
ARCH=${ARCH} TAG=$VERSION_TAG API_KEY=$STAGING_API_KEY API_ENDPOINT=$STAGING_API_ENDPOINT node automation/deploy-to-balena-cloud.js
|
||||
echo "Deploying to balena API (production)"
|
||||
ARCH=${ARCH} TAG=$VERSION_TAG API_KEY=$PRODUCTION_API_KEY API_ENDPOINT=$PRODUCTION_API_ENDPOINT node automation/deploy-to-balena-cloud.js
|
||||
fi
|
||||
|
||||
version: 2
|
||||
jobs:
|
||||
@ -67,49 +68,109 @@ jobs:
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: amd64
|
||||
PUSH_IMAGES: "true"
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: ''
|
||||
i386:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: i386
|
||||
PUSH_IMAGES: "true"
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: ''
|
||||
i386-nlp:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: i386-nlp
|
||||
PUSH_IMAGES: "true"
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: ''
|
||||
armv7hf:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: armv7hf
|
||||
PUSH_IMAGES: "true"
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: ''
|
||||
aarch64:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: aarch64
|
||||
PUSH_IMAGES: "true"
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: ''
|
||||
rpi:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: rpi
|
||||
PUSH_IMAGES: "true"
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: ''
|
||||
amd64-debug:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: amd64
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: '-debug'
|
||||
i386-debug:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: i386
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: '-debug'
|
||||
i386-nlp-debug:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: i386-nlp
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: '-debug'
|
||||
armv7hf-debug:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: armv7hf
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: '-debug'
|
||||
aarch64-debug:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: aarch64
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: '-debug'
|
||||
rpi-debug:
|
||||
<<: *defaults
|
||||
environment:
|
||||
DOCKER_USERNAME: travisciresin
|
||||
ARCH: rpi
|
||||
PUSH_IMAGES: 'true'
|
||||
STAGING_API_ENDPOINT: https://api.balena-staging.com
|
||||
PRODUCTION_API_ENDPOINT: https://api.balena-cloud.com
|
||||
DEBUG: '-debug'
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
@ -121,3 +182,9 @@ workflows:
|
||||
- armv7hf
|
||||
- aarch64
|
||||
- i386-nlp
|
||||
- amd64-debug
|
||||
- i386-debug
|
||||
- rpi-debug
|
||||
- armv7hf-debug
|
||||
- aarch64-debug
|
||||
- i386-nlp-debug
|
||||
|
8
entry.sh
8
entry.sh
@ -8,7 +8,13 @@ rm -f /etc/avahi/services/*
|
||||
mkdir -p /var/run/dbus
|
||||
rm -f /var/run/avahi-daemon/pid
|
||||
rm -f /var/run/dbus/pid
|
||||
/etc/init.d/dbus-1 start
|
||||
if [ -x /etc/init.d/dbus-1 ]; then
|
||||
/etc/init.d/dbus-1 start;
|
||||
elif [ -x /etc/init.d/dbus ]; then
|
||||
/etc/init.d/dbus start;
|
||||
else
|
||||
echo "Could not start container local dbus daemon. Avahi services may fail!";
|
||||
fi;
|
||||
/etc/init.d/avahi-daemon start
|
||||
|
||||
# If the legacy /tmp/resin-supervisor exists on the host, a container might
|
||||
|
2208
package-lock.json
generated
2208
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -10,6 +10,7 @@
|
||||
"scripts": {
|
||||
"start": "./entry.sh",
|
||||
"build": "webpack",
|
||||
"build:debug": "tsc && cp -r build/src/* build && rm -rf build/src && coffee -m -c -o build src && cp -r src/migrations build/ && cp package.json build/",
|
||||
"precommit": "lint-staged",
|
||||
"prettify": "prettier --config ./node_modules/resin-lint/config/.prettierrc --write \"{src,test,typings}/**/*.ts\"",
|
||||
"lint:coffee": "resin-lint src/ test/",
|
||||
@ -23,7 +24,7 @@
|
||||
},
|
||||
"private": "true",
|
||||
"dependencies": {
|
||||
"sqlite3": "^4.0.4"
|
||||
"sqlite3": "^4.0.8"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^6.13.1"
|
||||
@ -51,13 +52,14 @@
|
||||
"body-parser": "^1.12.0",
|
||||
"buffer-equal-constant-time": "^1.0.1",
|
||||
"chai-events": "0.0.1",
|
||||
"chokidar": "^3.0.0",
|
||||
"coffee-loader": "^0.9.0",
|
||||
"coffeescript": "^1.12.7",
|
||||
"common-tags": "^1.8.0",
|
||||
"copy-webpack-plugin": "^4.6.0",
|
||||
"dbus-native": "^0.2.5",
|
||||
"deep-object-diff": "^1.1.0",
|
||||
"docker-delta": "^2.2.4",
|
||||
"docker-delta": "^2.2.9",
|
||||
"docker-progress": "^3.0.3",
|
||||
"docker-toolbelt": "^3.3.7",
|
||||
"duration-js": "^4.0.0",
|
||||
@ -71,6 +73,7 @@
|
||||
"json-mask": "^0.3.8",
|
||||
"knex": "~0.15.2",
|
||||
"lint-staged": "^8.1.0",
|
||||
"livepush": "^1.2.1",
|
||||
"lockfile": "^1.0.1",
|
||||
"lodash": "^4.17.5",
|
||||
"log-timestamp": "^0.1.2",
|
||||
|
@ -45,6 +45,7 @@ fetchAction = (service) ->
|
||||
action: 'fetch'
|
||||
image: imageForService(service)
|
||||
serviceId: service.serviceId
|
||||
serviceName: service.serviceName
|
||||
}
|
||||
|
||||
# TODO: implement additional v2 endpoints
|
||||
@ -138,6 +139,7 @@ module.exports = class ApplicationManager extends EventEmitter
|
||||
# and it's relevant mostly for the legacy GET /v1/device endpoint
|
||||
# that assumes a single-container app
|
||||
@reportCurrentState(update_downloaded: true)
|
||||
, step.serviceName
|
||||
)
|
||||
removeImage: (step) =>
|
||||
@images.remove(step.image)
|
||||
|
@ -89,6 +89,7 @@ export class Images extends (EventEmitter as {
|
||||
image: Image,
|
||||
opts: FetchOptions,
|
||||
onFinish = _.noop,
|
||||
serviceName: string,
|
||||
): Promise<null> {
|
||||
if (this.imageFetchFailures[image.name] != null) {
|
||||
// If we are retrying a pull within the backoff time of the last failure,
|
||||
@ -148,7 +149,7 @@ export class Images extends (EventEmitter as {
|
||||
try {
|
||||
let id;
|
||||
if (opts.delta && (opts as DeltaFetchOptions).deltaSource != null) {
|
||||
id = await this.fetchDelta(image, opts, onProgress);
|
||||
id = await this.fetchDelta(image, opts, onProgress, serviceName);
|
||||
} else {
|
||||
id = await this.fetchImage(image, opts, onProgress);
|
||||
}
|
||||
@ -598,6 +599,7 @@ export class Images extends (EventEmitter as {
|
||||
image: Image,
|
||||
opts: FetchOptions,
|
||||
onProgress: (evt: FetchProgressEvent) => void,
|
||||
serviceName: string,
|
||||
): Promise<string> {
|
||||
this.logger.logSystemEvent(LogTypes.downloadImageDelta, { image });
|
||||
|
||||
@ -609,6 +611,7 @@ export class Images extends (EventEmitter as {
|
||||
image.name,
|
||||
deltaOpts,
|
||||
onProgress,
|
||||
serviceName,
|
||||
);
|
||||
|
||||
if (!Images.hasDigest(image.name)) {
|
||||
|
@ -68,6 +68,7 @@ export class DockerUtils extends DockerToolbelt {
|
||||
imgDest: string,
|
||||
deltaOpts: DeltaFetchOptions,
|
||||
onProgress: ProgressCallback,
|
||||
serviceName: string,
|
||||
): Promise<string> {
|
||||
const deltaSourceId =
|
||||
deltaOpts.deltaSourceId != null
|
||||
@ -77,7 +78,7 @@ export class DockerUtils extends DockerToolbelt {
|
||||
const timeout = deltaOpts.deltaApplyTimeout;
|
||||
|
||||
const log = (str: string) =>
|
||||
console.log(`delta(${deltaOpts.deltaSource}): ${str}`);
|
||||
console.log(`delta([${serviceName}] ${deltaOpts.deltaSource}): ${str}`);
|
||||
|
||||
if (!_.includes([2, 3], deltaOpts.deltaVersion)) {
|
||||
log(
|
||||
|
95
sync-debug.js
Executable file
95
sync-debug.js
Executable file
@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
if (!process.argv[2] || ['help', '-h', '--help'].includes(process.argv[2])) {
|
||||
console.log(`
|
||||
Sync changes in the javascript code to a running local mode supervisor on a device on the local network
|
||||
|
||||
Usage:
|
||||
./sync-debug.js <device IP>
|
||||
|
||||
Note that the device should be running a debug image.
|
||||
`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const ip = process.argv[2];
|
||||
|
||||
const { Livepush } = require('livepush');
|
||||
const { fs } = require('mz');
|
||||
const dockerode = require('dockerode');
|
||||
const chokidar = require('chokidar');
|
||||
const _ = require('lodash');
|
||||
|
||||
const docker = new dockerode({
|
||||
host: ip,
|
||||
port: 2375,
|
||||
});
|
||||
|
||||
function extractMessage(msgBuf) {
|
||||
// Non-tty message format from:
|
||||
// https://docs.docker.com/engine/api/v1.30/#operation/ContainerAttach
|
||||
if (
|
||||
_.includes([0, 1, 2], msgBuf[0]) &&
|
||||
_.every(msgBuf.slice(1, 7), c => c === 0)
|
||||
) {
|
||||
// Take the header from this message, and parse it as normal
|
||||
msgBuf = msgBuf.slice(8);
|
||||
}
|
||||
const logLine = msgBuf.toString();
|
||||
const space = logLine.indexOf(' ');
|
||||
if (space > 0) {
|
||||
let timestamp = new Date(logLine.substr(0, space)).getTime();
|
||||
if (_.isNaN(timestamp)) {
|
||||
timestamp = Date.now();
|
||||
}
|
||||
return {
|
||||
timestamp,
|
||||
message: logLine.substr(space + 1),
|
||||
};
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
(async () => {
|
||||
// Get the supervisor container id
|
||||
const container = await docker.getContainer('resin_supervisor').inspect();
|
||||
const containerId = container.Id;
|
||||
const image = container.Image;
|
||||
|
||||
const livepush = await Livepush.init(
|
||||
await fs.readFile('Dockerfile.debug'),
|
||||
'.',
|
||||
containerId,
|
||||
// a bit of a hack, as the multistage images aren't
|
||||
// present, but it shouldn't make a difference as these
|
||||
// will never change
|
||||
_.times(7, () => image),
|
||||
docker,
|
||||
);
|
||||
|
||||
// TODO: Debounce these calls
|
||||
chokidar
|
||||
.watch('.', {
|
||||
ignored: /((^|[\/\\])\..|node_modules.*)/,
|
||||
ignoreInitial: true,
|
||||
})
|
||||
.on('add', path => {
|
||||
livepush.performLivepush([path], []);
|
||||
})
|
||||
.on('change', path => {
|
||||
livepush.performLivepush([path], []);
|
||||
})
|
||||
.on('unlink', path => {
|
||||
livepush.performLivepush([], [path]);
|
||||
});
|
||||
|
||||
livepush.on('commandExecute', ({ command }) => {
|
||||
console.log('SYNC: executing:', command);
|
||||
});
|
||||
livepush.on('commandOutput', ({ output }) => {
|
||||
console.log(`\t${output.data.toString()}`);
|
||||
});
|
||||
livepush.on('containerRestart', () => {
|
||||
console.log('SYNC: Restarting container...');
|
||||
});
|
||||
})();
|
@ -226,6 +226,7 @@ describe 'ApplicationManager', ->
|
||||
image: @applications.imageForService(target.local.apps[0].services[1])
|
||||
serviceId: 24
|
||||
appId: 1234
|
||||
serviceName: 'anotherService'
|
||||
}])
|
||||
)
|
||||
|
||||
@ -264,7 +265,8 @@ describe 'ApplicationManager', ->
|
||||
action: 'fetch'
|
||||
image: @applications.imageForService(target.local.apps[0].services[0])
|
||||
serviceId: 23
|
||||
appId: 1234
|
||||
appId: 1234,
|
||||
serviceName: 'aservice'
|
||||
}, { action: 'noop', appId: 1234 }])
|
||||
)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user