Merge pull request #1998 from balena-os/test-refactor

Refactor (part of) the test suite into unit/integration
This commit is contained in:
bulldozer-balena[bot] 2022-09-28 15:36:24 +00:00 committed by GitHub
commit 7b1c1882c0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 1835 additions and 1953 deletions

View File

@ -6,14 +6,15 @@ testfs:
# them in the local source. These can be overriden
# in the `testfs` configuration.
filesystem:
/mnt/boot/config.json:
from: test/data/testconfig.json
/mnt/boot/config.txt:
from: test/data/mnt/boot/config.txt
/mnt/boot/device-type.json:
from: test/data/mnt/boot/device-type.json
/etc/os-release:
from: test/data/etc/os-release
/mnt/root:
/mnt/boot/config.json:
from: test/data/testconfig.json
/mnt/boot/config.txt:
from: test/data/mnt/boot/config.txt
/mnt/boot/device-type.json:
from: test/data/mnt/boot/device-type.json
/etc/os-release:
from: test/data/etc/os-release
# The `keep` list defines files that already exist in the
# filesystem and need to be backed up before setting up the test environment
keep: []
@ -21,3 +22,4 @@ testfs:
# when restoring the filesystem
cleanup:
- /data/database.sqlite
- /mnt/root/tmp/balena-supervisor/**/*.lock

View File

@ -9,6 +9,10 @@ services:
context: ./
command: sleep infinity
dbus:
build:
context: ./test/lib/dbus/
docker:
image: docker:dind
privileged: true
@ -28,13 +32,19 @@ services:
depends_on:
- balena-supervisor
- docker
- dbus
# Set required supervisor configuration variables here
environment:
DOCKER_HOST: tcp://docker:2375
DBUS_SYSTEM_BUS_ADDRESS: tcp:host=dbus,port=6667,family=ipv4
# Required by migrations
CONFIG_MOUNT_POINT: /mnt/root/mnt/boot/config.json
# Read by constants to setup `bootMountpoint`
BOOT_MOUNTPOINT: /mnt/boot
# Set required mounts as tmpfs or volumes here
# if specific files need to be backed up between tests,
# make sure to add them to the `testfs` configuration under
# .mochapodrc.yml
tmpfs:
- /data
- /mnt/boot
- /mnt/root

397
package-lock.json generated
View File

@ -81,7 +81,7 @@
"lodash": "^4.17.21",
"memoizee": "^0.4.14",
"mocha": "^8.3.2",
"mocha-pod": "^0.6.0",
"mocha-pod": "^0.8.0",
"mock-fs": "^4.14.0",
"morgan": "^1.10.0",
"network-checker": "^0.1.1",
@ -747,6 +747,26 @@
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"dev": true
},
"node_modules/@balena/lint/node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"dev": true,
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/@balena/lint/node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
@ -756,6 +776,18 @@
"node": ">=8"
}
},
"node_modules/@balena/lint/node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true,
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/@balena/lint/node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
@ -2557,6 +2589,12 @@
"tweetnacl": "^0.14.3"
}
},
"node_modules/better-lock": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/better-lock/-/better-lock-2.0.3.tgz",
"integrity": "sha512-3bCaToLrmEXZcIOOVWgi1STvp3/6EpoZAmlWBeuX2MvDB0Ql2ctl/vQ0CbhQIJYQiptdGypllP3ez+TeEmdnKQ==",
"dev": true
},
"node_modules/binary-extensions": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
@ -2974,9 +3012,9 @@
}
},
"node_modules/caniuse-lite": {
"version": "1.0.30001407",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001407.tgz",
"integrity": "sha512-4ydV+t4P7X3zH83fQWNDX/mQEzYomossfpViCOx9zHBSMV+rIe3LFqglHHtVyvNl1FhTNxPxs3jei82iqOW04w==",
"version": "1.0.30001409",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001409.tgz",
"integrity": "sha512-V0mnJ5dwarmhYv8/MzhJ//aW68UpvnQBXv8lJ2QUsvn2pHcmAuNtu8hQEDz37XnA1iE+lRR9CIfGWWpgJ5QedQ==",
"dev": true,
"funding": [
{
@ -3532,15 +3570,6 @@
"node": ">=10.13.0"
}
},
"node_modules/copy-webpack-plugin/node_modules/serialize-javascript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"dev": true,
"dependencies": {
"randombytes": "^2.1.0"
}
},
"node_modules/core-util-is": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
@ -4396,9 +4425,9 @@
"dev": true
},
"node_modules/electron-to-chromium": {
"version": "1.4.255",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.255.tgz",
"integrity": "sha512-H+mFNKow6gi2P5Gi2d1Fvd3TUEJlB9CF7zYaIV9T83BE3wP1xZ0mRPbNTm0KUjyd1QiVy7iKXuIcjlDtBQMiAQ==",
"version": "1.4.257",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.257.tgz",
"integrity": "sha512-C65sIwHqNnPC2ADMfse/jWTtmhZMII+x6ADI9gENzrOiI7BpxmfKFE84WkIEl5wEg+7+SfIkwChDlsd1Erju2A==",
"dev": true
},
"node_modules/emoji-regex": {
@ -5870,22 +5899,19 @@
}
},
"node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz",
"integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/glob-parent": {
@ -5906,17 +5932,6 @@
"integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==",
"dev": true
},
"node_modules/glob/node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/global-dirs": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-2.0.1.tgz",
@ -7862,6 +7877,26 @@
}
}
},
"node_modules/make-fetch-happen/node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"optional": true,
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/make-fetch-happen/node_modules/https-proxy-agent": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
@ -7875,6 +7910,18 @@
"node": ">= 6"
}
},
"node_modules/make-fetch-happen/node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"optional": true,
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/make-fetch-happen/node_modules/minipass-pipeline": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz",
@ -8056,7 +8103,6 @@
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"dependencies": {
"brace-expansion": "^1.1.7"
},
@ -8219,13 +8265,14 @@
}
},
"node_modules/mocha-pod": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/mocha-pod/-/mocha-pod-0.6.0.tgz",
"integrity": "sha512-jx4/K9vSmrzpw5n6O47c5Hd6tkV14QI/5iesut2pvFRZnf42U0+smpzt2761LkNpUcvMAITU1qdkLqlCMPjKgg==",
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/mocha-pod/-/mocha-pod-0.8.0.tgz",
"integrity": "sha512-0jhPpQMWCduiEFFFPrWWdKonwmyC6TFwgZEo7G/JhpIsmmfQm2cZGpoJ2HfUCXT1bcOuinSUPI8cweG+1fbbhw==",
"dev": true,
"dependencies": {
"@balena/compose": "^2.1.0",
"@balena/dockerignore": "^1.0.2",
"better-lock": "^2.0.3",
"debug": "^4.3.4",
"dockerode": "^3.3.2",
"fast-glob": "^3.2.11",
@ -8619,6 +8666,15 @@
"node": ">=8"
}
},
"node_modules/mocha/node_modules/serialize-javascript": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz",
"integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==",
"dev": true,
"dependencies": {
"randombytes": "^2.1.0"
}
},
"node_modules/mocha/node_modules/string-width": {
"version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
@ -9048,6 +9104,26 @@
"node": "^12.13.0 || ^14.15.0 || >=16.0.0"
}
},
"node_modules/node-gyp/node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"optional": true,
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/node-gyp/node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
@ -9057,6 +9133,18 @@
"node": ">=8"
}
},
"node_modules/node-gyp/node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"optional": true,
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/node-gyp/node_modules/npmlog": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz",
@ -10573,9 +10661,9 @@
"dev": true
},
"node_modules/serialize-javascript": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz",
"integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==",
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"dev": true,
"dependencies": {
"randombytes": "^2.1.0"
@ -11622,6 +11710,24 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/terser": {
"version": "5.15.0",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.15.0.tgz",
"integrity": "sha512-L1BJiXVmheAQQy+as0oF3Pwtlo4s3Wi1X2zNZ2NxOB4wx9bdS9Vk67XQENLFdLYGCK/Z2di53mTj/hBafR+dTA==",
"dev": true,
"dependencies": {
"@jridgewell/source-map": "^0.3.2",
"acorn": "^8.5.0",
"commander": "^2.20.0",
"source-map-support": "~0.5.20"
},
"bin": {
"terser": "bin/terser"
},
"engines": {
"node": ">=10"
}
},
"node_modules/terser-webpack-plugin": {
"version": "5.3.6",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz",
@ -11680,33 +11786,6 @@
"url": "https://opencollective.com/webpack"
}
},
"node_modules/terser-webpack-plugin/node_modules/serialize-javascript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"dev": true,
"dependencies": {
"randombytes": "^2.1.0"
}
},
"node_modules/terser-webpack-plugin/node_modules/terser": {
"version": "5.15.0",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.15.0.tgz",
"integrity": "sha512-L1BJiXVmheAQQy+as0oF3Pwtlo4s3Wi1X2zNZ2NxOB4wx9bdS9Vk67XQENLFdLYGCK/Z2di53mTj/hBafR+dTA==",
"dev": true,
"dependencies": {
"@jridgewell/source-map": "^0.3.2",
"acorn": "^8.5.0",
"commander": "^2.20.0",
"source-map-support": "~0.5.20"
},
"bin": {
"terser": "bin/terser"
},
"engines": {
"node": ">=10"
}
},
"node_modules/text-hex": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
@ -11877,9 +11956,9 @@
"dev": true
},
"node_modules/ts-loader": {
"version": "9.4.0",
"resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.4.0.tgz",
"integrity": "sha512-0G3UMhk1bjgsgiwF4rnZRAeTi69j9XMDtmDDMghGSqlWESIAS3LFgJe//GYfE4vcjbyzuURLB9Us2RZIWp2clQ==",
"version": "9.4.1",
"resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.4.1.tgz",
"integrity": "sha512-384TYAqGs70rn9F0VBnh6BPTfhga7yFNdC5gXbQpDrBj9/KsT4iRkGqKXhziofHOlE2j6YEaiTYVGKKvPhGWvw==",
"dev": true,
"dependencies": {
"chalk": "^4.1.0",
@ -13830,12 +13909,35 @@
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"dev": true
},
"glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"dev": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"dev": true
},
"minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true,
"requires": {
"brace-expansion": "^1.1.7"
}
},
"string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
@ -15418,6 +15520,12 @@
"tweetnacl": "^0.14.3"
}
},
"better-lock": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/better-lock/-/better-lock-2.0.3.tgz",
"integrity": "sha512-3bCaToLrmEXZcIOOVWgi1STvp3/6EpoZAmlWBeuX2MvDB0Ql2ctl/vQ0CbhQIJYQiptdGypllP3ez+TeEmdnKQ==",
"dev": true
},
"binary-extensions": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
@ -15757,9 +15865,9 @@
"dev": true
},
"caniuse-lite": {
"version": "1.0.30001407",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001407.tgz",
"integrity": "sha512-4ydV+t4P7X3zH83fQWNDX/mQEzYomossfpViCOx9zHBSMV+rIe3LFqglHHtVyvNl1FhTNxPxs3jei82iqOW04w==",
"version": "1.0.30001409",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001409.tgz",
"integrity": "sha512-V0mnJ5dwarmhYv8/MzhJ//aW68UpvnQBXv8lJ2QUsvn2pHcmAuNtu8hQEDz37XnA1iE+lRR9CIfGWWpgJ5QedQ==",
"dev": true
},
"caseless": {
@ -16191,15 +16299,6 @@
"requires": {
"is-glob": "^4.0.3"
}
},
"serialize-javascript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"dev": true,
"requires": {
"randombytes": "^2.1.0"
}
}
}
},
@ -16900,9 +16999,9 @@
"dev": true
},
"electron-to-chromium": {
"version": "1.4.255",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.255.tgz",
"integrity": "sha512-H+mFNKow6gi2P5Gi2d1Fvd3TUEJlB9CF7zYaIV9T83BE3wP1xZ0mRPbNTm0KUjyd1QiVy7iKXuIcjlDtBQMiAQ==",
"version": "1.4.257",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.257.tgz",
"integrity": "sha512-C65sIwHqNnPC2ADMfse/jWTtmhZMII+x6ADI9gENzrOiI7BpxmfKFE84WkIEl5wEg+7+SfIkwChDlsd1Erju2A==",
"dev": true
},
"emoji-regex": {
@ -18057,26 +18156,16 @@
}
},
"glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz",
"integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==",
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"dependencies": {
"minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"requires": {
"brace-expansion": "^1.1.7"
}
}
}
},
"glob-parent": {
@ -19595,6 +19684,20 @@
"ms": "2.1.2"
}
},
"glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"optional": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"https-proxy-agent": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
@ -19605,6 +19708,15 @@
"debug": "4"
}
},
"minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"optional": true,
"requires": {
"brace-expansion": "^1.1.7"
}
},
"minipass-pipeline": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz",
@ -19743,7 +19855,6 @@
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"requires": {
"brace-expansion": "^1.1.7"
}
@ -20034,6 +20145,15 @@
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
"dev": true
},
"serialize-javascript": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz",
"integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==",
"dev": true,
"requires": {
"randombytes": "^2.1.0"
}
},
"string-width": {
"version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
@ -20119,13 +20239,14 @@
}
},
"mocha-pod": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/mocha-pod/-/mocha-pod-0.6.0.tgz",
"integrity": "sha512-jx4/K9vSmrzpw5n6O47c5Hd6tkV14QI/5iesut2pvFRZnf42U0+smpzt2761LkNpUcvMAITU1qdkLqlCMPjKgg==",
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/mocha-pod/-/mocha-pod-0.8.0.tgz",
"integrity": "sha512-0jhPpQMWCduiEFFFPrWWdKonwmyC6TFwgZEo7G/JhpIsmmfQm2cZGpoJ2HfUCXT1bcOuinSUPI8cweG+1fbbhw==",
"dev": true,
"requires": {
"@balena/compose": "^2.1.0",
"@balena/dockerignore": "^1.0.2",
"better-lock": "^2.0.3",
"debug": "^4.3.4",
"dockerode": "^3.3.2",
"fast-glob": "^3.2.11",
@ -20518,12 +20639,35 @@
"wide-align": "^1.1.5"
}
},
"glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"optional": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"optional": true
},
"minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"optional": true,
"requires": {
"brace-expansion": "^1.1.7"
}
},
"npmlog": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz",
@ -21721,9 +21865,9 @@
}
},
"serialize-javascript": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz",
"integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==",
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"dev": true,
"requires": {
"randombytes": "^2.1.0"
@ -22523,6 +22667,18 @@
"integrity": "sha512-a6sumDlzyHVJWb8+YofY4TW112G6p2FCPEAFk+59gIYHv3XHRhm9ltVQ9kli4hNWeQBwSpe8cRN25x0ROunMOw==",
"dev": true
},
"terser": {
"version": "5.15.0",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.15.0.tgz",
"integrity": "sha512-L1BJiXVmheAQQy+as0oF3Pwtlo4s3Wi1X2zNZ2NxOB4wx9bdS9Vk67XQENLFdLYGCK/Z2di53mTj/hBafR+dTA==",
"dev": true,
"requires": {
"@jridgewell/source-map": "^0.3.2",
"acorn": "^8.5.0",
"commander": "^2.20.0",
"source-map-support": "~0.5.20"
}
},
"terser-webpack-plugin": {
"version": "5.3.6",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz",
@ -22552,27 +22708,6 @@
"ajv": "^6.12.5",
"ajv-keywords": "^3.5.2"
}
},
"serialize-javascript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
"dev": true,
"requires": {
"randombytes": "^2.1.0"
}
},
"terser": {
"version": "5.15.0",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.15.0.tgz",
"integrity": "sha512-L1BJiXVmheAQQy+as0oF3Pwtlo4s3Wi1X2zNZ2NxOB4wx9bdS9Vk67XQENLFdLYGCK/Z2di53mTj/hBafR+dTA==",
"dev": true,
"requires": {
"@jridgewell/source-map": "^0.3.2",
"acorn": "^8.5.0",
"commander": "^2.20.0",
"source-map-support": "~0.5.20"
}
}
}
},
@ -22718,9 +22853,9 @@
"dev": true
},
"ts-loader": {
"version": "9.4.0",
"resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.4.0.tgz",
"integrity": "sha512-0G3UMhk1bjgsgiwF4rnZRAeTi69j9XMDtmDDMghGSqlWESIAS3LFgJe//GYfE4vcjbyzuURLB9Us2RZIWp2clQ==",
"version": "9.4.1",
"resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.4.1.tgz",
"integrity": "sha512-384TYAqGs70rn9F0VBnh6BPTfhga7yFNdC5gXbQpDrBj9/KsT4iRkGqKXhziofHOlE2j6YEaiTYVGKKvPhGWvw==",
"dev": true,
"requires": {
"chalk": "^4.1.0",

View File

@ -13,7 +13,8 @@
"lint": "balena-lint -e ts -e js src/ test/ typings/ build-utils/ webpack.config.js",
"test:build": "tsc --noEmit && tsc --noEmit --project tsconfig.js.json",
"test:unit": "mocha --config test/unit/.mocharc.js",
"test:integration": "mocha --config test/integration/.mocharc.js",
"test:integration": "find test/integration -name *.spec.ts | xargs -n 1 -I {} sh -c 'mocha --config test/integration/.mocharc.js {} || exit 255'",
"test:integration:single": "find test/integration -name *.spec.ts | xargs mocha --config test/integration/.mocharc.js",
"test:legacy": "mocha --config test/legacy/.mocharc.js",
"test:node": "npm run test:unit && npm run test:integration && npm run test:legacy",
"test:env": "docker-compose -f docker-compose.test.yml -f docker-compose.dev.yml up --build; npm run compose:down",
@ -106,7 +107,7 @@
"lodash": "^4.17.21",
"memoizee": "^0.4.14",
"mocha": "^8.3.2",
"mocha-pod": "^0.6.0",
"mocha-pod": "^0.8.0",
"mock-fs": "^4.14.0",
"morgan": "^1.10.0",
"network-checker": "^0.1.1",

View File

@ -117,7 +117,7 @@ export async function removeOrphanedVolumes(
// *all* containers. This means we don't remove
// something that's part of a sideloaded container
const [dockerContainers, dockerVolumes] = await Promise.all([
docker.listContainers(),
docker.listContainers({ all: true }),
docker.listVolumes(),
]);

View File

@ -135,7 +135,7 @@ export async function lock<T extends unknown>(
}
}
// Resolve the function passed
return fn();
return await fn();
} finally {
for (const [id, release] of releases.entries()) {
// Try to dispose all the locks

View File

@ -6,9 +6,9 @@ module.exports = {
// Files to execute before running suites
'ts-node/register/transpile-only',
'tsconfig-paths/register',
'test/lib/chai.ts',
'mocha-pod/skip-setup',
'test/lib/chai.ts',
'test/lib/mocha-hooks.ts',
],
spec: ['test/integration/**/*.spec.ts'],
timeout: '30000',
};

View File

@ -11,10 +11,8 @@ import * as networkManager from '~/src/compose/network-manager';
import Service from '~/src/compose/service';
import { ServiceComposeConfig } from '~/src/compose/types/service';
import Volume from '~/src/compose/volume';
import log from '~/lib/supervisor-console';
import { InstancedAppState } from '~/src/types/state';
import * as dbHelper from '~/test-lib/db-helper';
import * as config from '~/src/config';
const DEFAULT_NETWORK = Network.fromComposeObject('default', 1, 'appuuid', {});
@ -163,46 +161,46 @@ function createCurrentState({
};
}
// TODO: application manager inferNextSteps still queries some stuff from
// the engine instead of receiving that information as parameter. Refactoring
// the method to be more of a pure function would allow us to move a lot of these tests
// to unit tests, leaving the need of integration tests just for more complex stuff that
// the application-manager also does and that is not currently tested.
// TODO: also, there is some redundancy between what is tested here and what is tested in
// the app spec, remove that redundancy to simplify the tests
describe('compose/application-manager', () => {
let testDb: dbHelper.TestDatabase;
before(async () => {
testDb = await dbHelper.createDB();
// disable log output during testing
sinon.stub(log, 'debug');
sinon.stub(log, 'warn');
sinon.stub(log, 'info');
sinon.stub(log, 'event');
sinon.stub(log, 'success');
// Stub methods that depend on external dependencies
stub(imageManager, 'isCleanupNeeded');
stub(networkManager, 'supervisorNetworkReady');
// Service.fromComposeObject gets api keys from the database
// which also depend on the local mode. This ensures the database
// is initialized. This can be removed when ApplicationManager and Service
// a refactored to work as pure functions
await config.initialized();
});
beforeEach(() => {
// Do not check for cleanup images by default
(imageManager.isCleanupNeeded as sinon.SinonStub).resolves(false);
// Do not check for network
// TODO: supervisorNetworkReady not only checks for a docker network, it also checks for the
// network interface to be created. That makes it harder to integration test with an external
// docker socket
(networkManager.supervisorNetworkReady as sinon.SinonStub).resolves(true);
});
afterEach(async () => {
await testDb.reset();
after(() => {
// Restore stubs
(imageManager.isCleanupNeeded as sinon.SinonStub).restore();
(networkManager.supervisorNetworkReady as sinon.SinonStub).restore();
});
after(async () => {
try {
await testDb.destroy();
} catch {
/* noop */
}
// Restore stubbed methods
sinon.restore();
});
it('should init', async () => {
// TODO: we don't test application manager initialization as it sets up a bunch of timers
// and listeners that may affect other tests. This is a bad pattern and it needs to be purged
// from the codebase
it.skip('should init', async () => {
await applicationManager.initialized();
});

View File

@ -1,11 +1,11 @@
import { expect } from 'chai';
import * as imageManager from '~/src/compose/images';
import * as dbHelper from '~/test-lib/db-helper';
import { createImage, withMockerode } from '~/test-lib/mockerode';
import * as sinon from 'sinon';
import { createDockerImage } from '~/test-lib/docker-helper';
import log from '~/lib/supervisor-console';
import * as Docker from 'dockerode';
import * as db from '~/src/db';
// TODO: this code is duplicated in multiple tests
// create a test module with all helper functions like this
@ -28,31 +28,17 @@ function createDBImage(
}
describe('compose/images', () => {
let testDb: dbHelper.TestDatabase;
const docker = new Docker();
before(async () => {
testDb = await dbHelper.createDB();
// disable log output during testing
sinon.stub(log, 'debug');
sinon.stub(log, 'warn');
sinon.stub(log, 'info');
sinon.stub(log, 'event');
sinon.stub(log, 'success');
});
after(async () => {
try {
await testDb.destroy();
} catch (e) {
/* noop */
}
// Restore stubbed methods
sinon.restore();
await db.initialized();
});
afterEach(async () => {
await testDb.reset();
await db.models('image').del();
});
after(async () => {
await docker.pruneImages({ filters: { dangling: { false: true } } });
});
it('finds image by matching digest on the database', async () => {
@ -61,7 +47,7 @@ describe('compose/images', () => {
dockerImageId:
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
});
await testDb.models('image').insert([dbImage]);
await db.models('image').insert([dbImage]);
const images = [
createImage(
@ -77,6 +63,8 @@ describe('compose/images', () => {
),
];
// INFO: We use mockerode here because cannot create images with a specific digest on the engine
// but we need to be able to test looking images by digest
await withMockerode(
async (mockerode) => {
// Looking by name should fail, if not, this is a mockerode issue
@ -103,79 +91,21 @@ describe('compose/images', () => {
});
it('finds image by tag on the engine', async () => {
const images = [
createImage(
{
Id: 'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
},
{
References: ['some-image:some-tag'],
},
),
];
await withMockerode(
async (mockerode) => {
expect(await imageManager.inspectByName('some-image:some-tag'))
.to.have.property('Id')
.that.equals(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
expect(mockerode.getImage).to.have.been.calledWith(
'some-image:some-tag',
);
// Check that non existing tags are not found
await expect(
imageManager.inspectByName('non-existing-image:non-existing-tag'),
).to.be.rejected;
},
{ images },
const dockerImageId = await createDockerImage(
'some-image:some-tag',
['io.balena.testing=1'],
docker,
);
});
it('finds image by tag on the database', async () => {
const dbImage = createDBImage({
name: 'some-image:some-tag',
dockerImageId:
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
});
await testDb.models('image').insert([dbImage]);
expect(await imageManager.inspectByName('some-image:some-tag'))
.to.have.property('Id')
.that.equals(dockerImageId);
const images = [
createImage(
{
Id: 'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
},
{
References: [
// Reference is different but there is a matching name on the database
'registry2.balena-cloud.com/v2/bbbb@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
],
},
),
];
await expect(
imageManager.inspectByName('non-existing-image:non-existing-tag'),
).to.be.rejected;
await withMockerode(
async (mockerode) => {
expect(await imageManager.inspectByName(dbImage.name))
.to.have.property('Id')
.that.equals(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
expect(mockerode.getImage).to.have.been.calledWith(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
// Check that non existing tags are not found
await expect(
imageManager.inspectByName('non-existing-image:non-existing-tag'),
).to.be.rejected;
},
{ images },
);
await docker.getImage('some-image:some-tag').remove();
});
it('finds image by reference on the engine', async () => {
@ -194,6 +124,10 @@ describe('compose/images', () => {
),
];
// INFO: we cannot create specific references to test on the engine, we need to
// use mockerode instead.
// QUESTION: Maybe the image search is overspecified and we should find a
// common identifier for all image search (e.g. label?)
await withMockerode(
async (mockerode) => {
// This is really testing mockerode functionality
@ -245,7 +179,7 @@ describe('compose/images', () => {
});
it('returns all images in both the database and the engine', async () => {
await testDb.models('image').insert([
await db.models('image').insert([
createDBImage({
name: 'first-image-name:first-image-tag',
serviceName: 'app_1',
@ -311,65 +245,36 @@ describe('compose/images', () => {
});
it('removes a single legacy db images without dockerImageId', async () => {
await createDockerImage(
'image-name:image-tag',
['io.balena.testing=1'],
docker,
);
// Legacy images don't have a dockerImageId so they are queried by name
const imageToRemove = createDBImage({
name: 'image-name:image-tag',
});
await testDb.models('image').insert([imageToRemove]);
await db.models('image').insert([imageToRemove]);
// Engine image state
const images = [
createImage(
{
Id: 'deadbeef',
},
{
// Image references
References: [
'image-name:image-tag@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
],
},
),
createImage(
{
Id: 'deadca1f',
},
{
References: ['balena/aarch64-supervisor:11.11.11'],
},
),
];
// Check that our legacy image exists
// failsafe to check for mockerode problems
await expect(
docker.getImage(imageToRemove.name).inspect(),
'image exists on the engine before test',
).to.not.be.rejected;
// Perform the test with our specially crafted data
await withMockerode(
async (mockerode) => {
// Check that our legacy image exists
// failsafe to check for mockerode problems
await expect(
mockerode.getImage(imageToRemove.name).inspect(),
'image exists on the engine before test',
).to.not.be.rejected;
// Check that the image exists on the db
expect(
await db.models('image').select().where(imageToRemove),
).to.have.lengthOf(1);
// Check that the image exists on the db
expect(
await testDb.models('image').select().where(imageToRemove),
).to.have.lengthOf(1);
// Now remove this image...
await imageManager.remove(imageToRemove);
// Now remove this image...
await imageManager.remove(imageToRemove);
// This checks that the remove method was ultimately called
expect(mockerode.removeImage).to.have.been.calledOnceWith(
imageToRemove.name,
);
// Check that the image was removed from the db
expect(await testDb.models('image').select().where(imageToRemove)).to.be
.empty;
},
{ images },
);
// Check that the image was removed from the db
expect(await db.models('image').select().where(imageToRemove)).to.be.empty;
});
it('removes image from DB and engine when there is a single DB image with matching name', async () => {
@ -380,7 +285,7 @@ describe('compose/images', () => {
});
// Insert images into the db
await testDb.models('image').insert([
await db.models('image').insert([
imageToRemove,
createDBImage({
name: 'registry2.balena-cloud.com/v2/two@sha256:12345a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
@ -436,7 +341,7 @@ describe('compose/images', () => {
// Check that only one image with this dockerImageId exists in the db
// in memory db is a bit flaky sometimes, this checks for issues
expect(
await testDb.models('image').where(imageToRemove).select(),
await db.models('image').where(imageToRemove).select(),
'image exists on db before the test',
).to.have.lengthOf(1);
@ -449,97 +354,62 @@ describe('compose/images', () => {
);
// Check that the database no longer has this image
expect(await testDb.models('image').select().where(imageToRemove)).to.be
expect(await db.models('image').select().where(imageToRemove)).to.be
.empty;
// Expect 1 entry left on the database
expect(await testDb.models('image').select()).to.have.lengthOf(1);
expect(await db.models('image').select()).to.have.lengthOf(1);
},
{ images },
);
});
it('removes the requested image even when there are multiple DB images with same docker ID', async () => {
const dockerImageId = await createDockerImage(
'registry2.balena-cloud.com/v2/one',
['io.balena.testing=1'],
docker,
);
const imageToRemove = createDBImage({
name: 'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
dockerImageId: 'sha256:image-id-one',
name: 'registry2.balena-cloud.com/v2/one',
dockerImageId,
});
const imageWithSameDockerImageId = createDBImage({
name: 'registry2.balena-cloud.com/v2/two@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
// Same imageId
dockerImageId: 'sha256:image-id-one',
dockerImageId,
});
// Insert images into the db
await testDb.models('image').insert([
await db.models('image').insert([
imageToRemove,
// Another image from the same app
imageWithSameDockerImageId,
]);
// Engine image state
const images = [
// The image to remove
createImage(
{
Id: imageToRemove.dockerImageId!,
},
{
References: [imageToRemove.name, imageWithSameDockerImageId.name],
},
),
// Other images to test
createImage(
{
Id: 'aaa',
},
{
References: ['balena/aarch64-supervisor:11.11.11'],
},
),
];
// Check that multiple images with the same dockerImageId are returned
expect(
await db
.models('image')
.where({ dockerImageId: imageToRemove.dockerImageId })
.select(),
).to.have.lengthOf(2);
// Perform the test with our specially crafted data
await withMockerode(
async (mockerode) => {
// Check that the image is on the engine
// really checking mockerode behavior
await expect(
mockerode.getImage(imageToRemove.dockerImageId!).inspect(),
'image exists on the engine before the test',
).to.not.be.rejected;
// Now remove these images
await imageManager.remove(imageToRemove);
// Check that multiple images with the same dockerImageId are returned
expect(
await testDb
.models('image')
.where({ dockerImageId: imageToRemove.dockerImageId })
.select(),
).to.have.lengthOf(2);
// Check that the database no longer has this image
expect(await db.models('image').select().where(imageToRemove)).to.be.empty;
// Now remove these images
await imageManager.remove(imageToRemove);
// Check that only the image with the right name was removed
expect(mockerode.removeImage).to.have.been.calledOnceWith(
imageToRemove.name,
);
// Check that the database no longer has this image
expect(await testDb.models('image').select().where(imageToRemove)).to.be
.empty;
// Check that the image with the same dockerImageId is still on the database
expect(
await testDb
.models('image')
.select()
.where({ dockerImageId: imageWithSameDockerImageId.dockerImageId }),
).to.have.lengthOf(1);
},
{ images },
);
// Check that the image with the same dockerImageId is still on the database
expect(
await db
.models('image')
.select()
.where({ dockerImageId: imageWithSameDockerImageId.dockerImageId }),
).to.have.lengthOf(1);
});
it('removes image from DB by tag when deltas are being used', async () => {
@ -555,7 +425,7 @@ describe('compose/images', () => {
});
// Insert images into the db
await testDb.models('image').insert([
await db.models('image').insert([
imageToRemove,
// Another image from the same app
imageWithSameDockerImageId,
@ -589,12 +459,12 @@ describe('compose/images', () => {
// Check that a single image is returned when given entire object
expect(
await testDb.models('image').select().where(imageToRemove),
await db.models('image').select().where(imageToRemove),
).to.have.lengthOf(1);
// Check that multiple images with the same dockerImageId are returned
expect(
await testDb
await db
.models('image')
.where({ dockerImageId: imageToRemove.dockerImageId })
.select(),
@ -609,15 +479,12 @@ describe('compose/images', () => {
);
// Check that the database no longer has this image
expect(await testDb.models('image').select().where(imageToRemove)).to.be
expect(await db.models('image').select().where(imageToRemove)).to.be
.empty;
// Check that the image with the same dockerImageId is still on the database
expect(
await testDb
.models('image')
.select()
.where(imageWithSameDockerImageId),
await db.models('image').select().where(imageWithSameDockerImageId),
).to.have.lengthOf(1);
},
{ images },

View File

@ -0,0 +1,188 @@
import { expect } from 'chai';
import { Network } from '~/src/compose/network';
import { createNetwork, withMockerode } from '~/test-lib/mockerode';
import * as Docker from 'dockerode';
describe('compose/network: integration tests', () => {
const docker = new Docker();
after(async () => {
const allNetworks = await docker.listNetworks();
// Delete any remaining networks
await Promise.all(
allNetworks
.filter(({ Name }) => !['bridge', 'host', 'none'].includes(Name)) // exclude docker default network from the cleanup
.map(({ Name }) => docker.getNetwork(Name).remove()),
);
});
describe('creating and removing networks', () => {
// This tests the happy path on the engine, including create and remove
it('creates a new network on the engine with the given data', async () => {
const network = Network.fromComposeObject('default', 12345, 'deadbeef', {
ipam: {
driver: 'default',
config: [
{
subnet: '172.20.0.0/16',
ip_range: '172.20.10.0/24',
gateway: '172.20.0.1',
},
],
options: {},
},
});
// Create the network
await network.create();
const dockerNetworkName = Network.generateDockerName(
network.appUuid!,
network.name,
);
// This should not throw
const dockerNetwork = await docker
.getNetwork(dockerNetworkName)
.inspect();
// Check that the create function was called with proper arguments
expect(dockerNetwork).to.deep.include({
Name: 'deadbeef_default',
Driver: 'bridge',
IPAM: {
Driver: 'default',
Config: [
{
Subnet: '172.20.0.0/16',
IPRange: '172.20.10.0/24',
Gateway: '172.20.0.1',
},
],
Options: {},
},
EnableIPv6: false,
Internal: false,
Labels: {
'io.balena.supervised': 'true',
'io.balena.app-id': '12345',
},
Options: {},
});
// Test network removal
await network.remove();
// The network should no longer exist
await expect(docker.getNetwork(dockerNetwork).inspect()).to.be.rejected;
});
it('throws the error if there is a problem while creating the network', async () => {
await withMockerode(async (mockerode) => {
const network = Network.fromComposeObject(
'default',
12345,
'deadbeef',
{
ipam: {
driver: 'default',
config: [
{
subnet: '172.20.0.0/16',
ip_range: '172.20.10.0/24',
gateway: '172.20.0.1',
},
],
options: {},
},
},
);
// Re-define the dockerode.createNetwork to throw
mockerode.createNetwork.rejects('Unknown engine error');
// Creating the network should fail
return expect(network.create()).to.be.rejected;
});
});
});
describe('removing a network', () => {
it('removes a legacy network from the engine if it exists', async () => {
// Creates a legacy network
await docker.createNetwork({ Name: '12345_default' });
// Create a dummy network object
const network = Network.fromComposeObject(
'default',
12345,
'deadbeef',
{},
);
// Perform the operation
await network.remove();
await expect(docker.getNetwork('12345_default').inspect()).to.be.rejected;
});
it('ignores the request if the given network does not exist on the engine', async () => {
// Create a mock network to add to the mock engine
await docker.createNetwork({
Name: 'some_network',
});
// Create a dummy network object
const network = Network.fromComposeObject(
'default',
12345,
'deadbeef',
{},
);
// This should not fail
await expect(network.remove()).to.not.be.rejected;
// We expect the network state to remain constant
await expect(docker.getNetwork('some_network').inspect()).to.not.be
.rejected;
// Cleanup
await docker.getNetwork('some_network').remove();
});
it('throws the error if there is a problem while removing the network', async () => {
// Create a mock network to add to the mock engine
const mockNetwork = createNetwork({
Id: 'aaaaaaaa',
Name: 'a173bdb734884b778f5cc3dffd18733e_default',
Labels: {
'io.balena.app-id': '12345',
},
});
await withMockerode(
async (mockerode) => {
// We can change the return value of the mockerode removeNetwork
// to have the remove operation fail
mockerode.removeNetwork.throws({
statusCode: 500,
message: 'Failed to remove the network',
});
// Create a dummy network object
const network = Network.fromComposeObject(
'default',
12345,
'a173bdb734884b778f5cc3dffd18733e',
{},
);
await expect(network.remove()).to.be.rejected;
},
{ networks: [mockNetwork] },
);
});
});
});

View File

@ -0,0 +1,70 @@
import * as _ from 'lodash';
import { expect } from 'chai';
import Service from '~/src/compose/service';
import * as apiKeys from '~/lib/api-keys';
describe('compose/service: integration tests', () => {
describe('Feature labels', () => {
// TODO: this is the only part of the service module that needs to be integration tested. This is becase it
// needs to access the database to get the service scoped api keys. If the keys were generated/queried in
// App.fromTargetState and passed to the service as a parameter, it would push this module to the domain model
// which is where it belongs
describe('io.balena.supervisor-api', () => {
it('sets BALENA_SUPERVISOR_HOST, BALENA_SUPERVISOR_PORT and BALENA_SUPERVISOR_ADDRESS env vars', async () => {
const service = await Service.fromComposeObject(
{
appId: 123456,
serviceId: 123456,
serviceName: 'foobar',
labels: {
'io.balena.features.supervisor-api': '1',
},
},
{
appName: 'test',
supervisorApiHost: 'supervisor',
listenPort: 48484,
} as any,
);
expect(
service.config.environment['BALENA_SUPERVISOR_HOST'],
).to.be.equal('supervisor');
expect(
service.config.environment['BALENA_SUPERVISOR_PORT'],
).to.be.equal('48484');
expect(
service.config.environment['BALENA_SUPERVISOR_ADDRESS'],
).to.be.equal('http://supervisor:48484');
});
it('sets BALENA_API_KEY env var to the scoped API key value', async () => {
const mykey = await apiKeys.generateScopedKey(123456, 'foobar');
const service = await Service.fromComposeObject(
{
appId: 123456,
serviceId: 123456,
serviceName: 'foobar',
labels: {
'io.balena.features.supervisor-api': '1',
},
},
{
appName: 'test',
supervisorApiHost: 'supervisor',
listenPort: 48484,
} as any,
);
expect(
service.config.environment['BALENA_SUPERVISOR_API_KEY'],
).to.be.equal(mykey);
});
});
});
});

View File

@ -0,0 +1,305 @@
import { expect } from 'chai';
import * as sinon from 'sinon';
import * as volumeManager from '~/src/compose/volume-manager';
import Volume from '~/src/compose/volume';
import { createDockerImage } from '~/test-lib/docker-helper';
import * as Docker from 'dockerode';
describe('compose/volume-manager', () => {
const docker = new Docker();
after(async () => {
await docker.pruneContainers();
await docker.pruneVolumes();
await docker.pruneImages({ filters: { dangling: { false: true } } });
});
describe('Retrieving volumes from the engine', () => {
it('gets all supervised Volumes', async () => {
// Setup volume data
await Promise.all([
docker.createVolume({
Name: Volume.generateDockerName(1, 'redis'),
// Recently created volumes contain io.balena.supervised label
Labels: { 'io.balena.supervised': '1' },
}),
docker.createVolume({
Name: Volume.generateDockerName(1, 'mysql'),
// Recently created volumes contain io.balena.supervised label and app-uuid
Labels: {
'io.balena.supervised': '1',
'io.balena.app-uuid': 'deadbeef',
},
}),
docker.createVolume({
Name: Volume.generateDockerName(2, 'backend'),
// Old Volumes will not have labels
}),
// Volume not created by the Supervisor
docker.createVolume({ Name: 'user_created_volume' }),
docker.createVolume({
Name: 'decoy',
// Added decoy to really test the inference (should not return)
Labels: { 'io.balena.supervised': '1' },
}),
]);
// Perform test
await expect(volumeManager.getAll()).to.eventually.have.deep.members([
{
appId: 1,
appUuid: undefined,
config: {
driver: 'local',
driverOpts: {},
labels: {
'io.balena.supervised': '1',
},
},
name: 'redis',
},
{
appId: 1,
appUuid: 'deadbeef',
config: {
driver: 'local',
driverOpts: {},
labels: {
'io.balena.supervised': '1',
'io.balena.app-uuid': 'deadbeef',
},
},
name: 'mysql',
},
{
appId: 2,
appUuid: undefined,
config: {
driver: 'local',
driverOpts: {},
labels: {},
},
name: 'backend',
},
]);
// Cleanup volumes
await Promise.all([
docker.getVolume(Volume.generateDockerName(1, 'redis')).remove(),
docker.getVolume(Volume.generateDockerName(1, 'mysql')).remove(),
docker.getVolume(Volume.generateDockerName(2, 'backend')).remove(),
docker.getVolume('user_created_volume').remove(),
docker.getVolume('decoy').remove(),
]);
});
it('can parse null Volumes', async () => {
// Perform test with no volumes
await expect(volumeManager.getAll()).to.eventually.deep.equal([]);
});
it('gets the volume for specific application', async () => {
// Setup volume data
await Promise.all([
docker.createVolume({
Name: Volume.generateDockerName(111, 'app'),
Labels: {
'io.balena.supervised': '1',
},
}),
docker.createVolume({
Name: Volume.generateDockerName(222, 'otherApp'),
Labels: {
'io.balena.supervised': '1',
},
}),
]);
// Perform test
await expect(volumeManager.getAllByAppId(111)).to.eventually.deep.equal([
{
appId: 111,
appUuid: undefined,
config: {
driver: 'local',
driverOpts: {},
labels: {
'io.balena.supervised': '1',
},
},
name: 'app',
},
]);
// Cleanup volumes
await Promise.all([
docker.getVolume(Volume.generateDockerName(111, 'app')).remove(),
docker.getVolume(Volume.generateDockerName(222, 'otherApp')).remove(),
]);
});
});
describe('Creating volumes', () => {
it('creates a volume if it does not exist', async () => {
// The volume does not exist on the engine before
await expect(
docker.getVolume(Volume.generateDockerName(111, 'main')).inspect(),
).to.be.rejected;
// Volume to create
const volume = Volume.fromComposeObject('main', 111, 'deadbeef', {});
// Create volume
await volumeManager.create(volume);
// Check the volume should have been created
await expect(
docker.getVolume(Volume.generateDockerName(111, 'main')).inspect(),
).to.not.be.rejected;
// Cleanup volumes
await Promise.all([
docker.getVolume(Volume.generateDockerName(111, 'main')).remove(),
]);
});
it('does not try to create a volume that already exists', async () => {
// Setup volume data
await docker.createVolume({
Name: Volume.generateDockerName(111, 'main'),
Labels: {
'io.balena.supervised': '1',
},
});
// Create compose object for volume already set up in mock engine
const volume = Volume.fromComposeObject('main', 111, 'deadbeef', {});
sinon.spy(volume, 'create');
// Create volume
await volumeManager.create(volume);
// Check volume was not created
expect(volume.create).to.not.have.been.called;
// Cleanup volumes
await Promise.all([
docker.getVolume(Volume.generateDockerName(111, 'main')).remove(),
]);
});
});
describe('Removing volumes', () => {
it('removes a volume if it exists', async () => {
// Setup volume data
await Promise.all([
docker.createVolume({
Name: Volume.generateDockerName(111, 'main'),
Labels: {
'io.balena.supervised': '1',
},
}),
]);
// Volume to remove
const volume = Volume.fromComposeObject('main', 111, 'deadbeef', {});
// Remove volume
await volumeManager.remove(volume);
// Check volume was removed
await expect(
docker.getVolume(Volume.generateDockerName(111, 'main')).inspect(),
).to.be.rejected;
});
it('does nothing on removal if the volume does not exist', async () => {
// Setup volume data
await Promise.all([
docker.createVolume({
Name: 'decoy-volume',
}),
]);
// Volume to remove
const volume = Volume.fromComposeObject('main', 111, 'deadbeef', {});
// Remove volume
await expect(volumeManager.remove(volume)).to.not.be.rejected;
// Cleanup volumes
await Promise.all([docker.getVolume('decoy-volume').remove()]);
});
});
describe('Removing orphaned volumes', () => {
it('removes any remaining unreferenced volumes after services have been deleted', async () => {
// Setup volume data
await Promise.all([
docker.createVolume({
Name: 'some-volume',
}),
// This volume is still referenced in the target state
docker.createVolume({
Name: Volume.generateDockerName(111, 'main'),
Labels: {
'io.balena.supervised': '1',
},
}),
docker.createVolume({
Name: Volume.generateDockerName(222, 'old'),
Labels: {
'io.balena.supervised': '1',
},
}),
// This volume is referenced by a container
docker.createVolume({
Name: 'other-volume',
}),
]);
// Create an empty image
await createDockerImage('hello', ['io.balena.testing=1'], docker);
// Create a container from the image
const { id: containerId } = await docker.createContainer({
Image: 'hello',
Cmd: ['true'],
HostConfig: {
Binds: ['other-volume:/data'],
},
});
await expect(
volumeManager.removeOrphanedVolumes([
// Keep any volumes in the target state
Volume.generateDockerName(111, 'main'),
]),
).to.not.be.rejected;
// All volumes should have been deleted
expect(await docker.listVolumes())
.to.have.property('Volumes')
.that.has.lengthOf(2);
// Reference volume should have been kept
await expect(
docker.getVolume(Volume.generateDockerName(111, 'main')).inspect(),
).to.not.be.rejected;
await expect(docker.getVolume('other-volume').inspect()).to.not.be
.rejected;
// Cleanup
await Promise.all([
docker.getVolume(Volume.generateDockerName(111, 'main')).remove(),
docker.getContainer(containerId).remove(),
]);
await Promise.all([
docker.getImage('hello').remove(),
docker.getVolume('other-volume').remove(),
]);
});
});
});

View File

@ -0,0 +1,179 @@
import { expect } from 'chai';
import { SinonStub, stub } from 'sinon';
import Volume from '~/src/compose/volume';
import * as logTypes from '~/lib/log-types';
import * as logger from '~/src/logger';
import * as Docker from 'dockerode';
import { createVolume, withMockerode } from '~/test-lib/mockerode';
describe('compose/volume: integration tests', () => {
const docker = new Docker();
describe('creating and removing docker volumes', () => {
before(() => {
// TODO: can we spy the actual log stream instead of stubbing and using
// implementation details?
stub(logger, 'logSystemEvent');
});
afterEach(() => {
(logger.logSystemEvent as SinonStub).reset();
});
after(async () => {
const { Volumes: allVolumes } = await docker.listVolumes();
await Promise.all(
allVolumes.map(({ Name }) => docker.getVolume(Name).remove()),
);
(logger.logSystemEvent as SinonStub).restore();
});
it('should use defaults to create the volume when no options are given', async () => {
const volume = Volume.fromComposeObject(
'one_volume',
1032480,
'deadbeef',
);
// Create the volume
await volume.create();
const dockerVolumeName = Volume.generateDockerName(
volume.appId,
volume.name,
);
// This should not throw
const dockerVolume = await docker.getVolume(dockerVolumeName).inspect();
expect(dockerVolume).to.deep.include({
Name: dockerVolumeName,
Driver: 'local',
Labels: {
'io.balena.supervised': 'true',
'io.balena.app-uuid': 'deadbeef',
},
});
expect(logger.logSystemEvent).to.have.been.calledOnceWith(
logTypes.createVolume,
);
// Test volume removal
await volume.remove();
// The volume should no longer exist
await expect(docker.getVolume(dockerVolumeName).inspect()).to.be.rejected;
// Check that log entry was generated
expect(logger.logSystemEvent).to.have.been.calledWith(
logTypes.removeVolume,
);
});
it('should pass configuration options to the engine', async () => {
const volume = Volume.fromComposeObject(
'one_volume',
1032480,
'deadbeef',
{
driver: 'local',
driver_opts: {
type: 'tmpfs',
device: 'tmpfs',
},
labels: {
'my-label': 'test-label',
},
},
);
await volume.create();
const dockerVolumeName = Volume.generateDockerName(
volume.appId,
volume.name,
);
// This should not throw
const dockerVolume = await docker.getVolume(dockerVolumeName).inspect();
expect(dockerVolume).to.deep.include({
Name: dockerVolumeName,
Driver: 'local',
Labels: {
'my-label': 'test-label',
'io.balena.supervised': 'true',
'io.balena.app-uuid': 'deadbeef',
},
Options: {
device: 'tmpfs',
type: 'tmpfs',
},
});
expect(logger.logSystemEvent).to.have.been.calledOnceWith(
logTypes.createVolume,
);
// Test volume removal
await volume.remove();
// The volume should no longer exist
await expect(docker.getVolume(dockerVolumeName).inspect()).to.be.rejected;
// Check that log entry was generated
expect(logger.logSystemEvent).to.have.been.calledWith(
logTypes.removeVolume,
);
});
it('should report an error if the volume does not exist', async () => {
const volume = Volume.fromComposeObject('aaa', 1234, 'deadbeef');
const dockerVolumeName = Volume.generateDockerName(
volume.appId,
volume.name,
);
// The volume should not exist before
await expect(docker.getVolume(dockerVolumeName).inspect()).to.be.rejected;
// Remove the volume, this should not throw
await expect(volume.remove()).to.not.be.rejected;
// Check that log entry was generated
expect(logger.logSystemEvent).to.have.been.calledWith(
logTypes.removeVolumeError,
);
});
it('should report an error if a problem happens while removing the volume', async () => {
const dockerVolume = createVolume({
Name: '1234_aaa',
});
// We only use mockerode to simulate errors
await withMockerode(
async (mockerode) => {
const volume = Volume.fromComposeObject('aaa', 1234, 'deadbeef');
// Stub the mockerode method to fail
mockerode.removeVolume.rejects('Something bad happened');
// Check engine state before
expect((await mockerode.listVolumes()).Volumes).to.have.lengthOf(1);
// Remove the volume, this should not throw
await expect(volume.remove()).to.not.be.rejected;
// Check that log entry was generated
expect(logger.logSystemEvent).to.have.been.calledWith(
logTypes.removeVolumeError,
);
},
{ volumes: [dockerVolume] },
);
});
});
});

View File

@ -0,0 +1,243 @@
import * as _ from 'lodash';
import * as path from 'path';
import { promises as fs } from 'fs';
import { SinonSpy, spy, SinonStub, stub } from 'sinon';
import { expect } from 'chai';
import { testfs, TestFs } from 'mocha-pod';
import constants = require('~/lib/constants');
import { SchemaTypeKey } from '~/src/config/schema-type';
import { fnSchema } from '~/src/config/functions';
import * as conf from '~/src/config';
describe('config', () => {
const configJsonPath = path.join(
constants.rootMountPoint,
constants.bootMountPoint,
'config.json',
);
const deviceTypeJsonPath = path.join(
constants.rootMountPoint,
constants.bootMountPoint,
'device-type.json',
);
const readConfigJson = () =>
fs.readFile(configJsonPath, 'utf8').then((data) => JSON.parse(data));
const readDeviceTypeJson = () =>
fs.readFile(deviceTypeJsonPath, 'utf8').then((data) => JSON.parse(data));
let testFs: TestFs.Enabled;
before(async () => {
await conf.initialized();
});
beforeEach(async () => {
// This tells testfs to make a backup of config.json before each test
// as some of the tests modify the file. This prevents any leaking between
// tests
testFs = await testfs({}, { keep: [configJsonPath] }).enable();
});
afterEach(async () => {
await testFs.restore();
});
it('reads and exposes values from config.json', async () => {
const configJson = await readConfigJson();
const id = await conf.get('applicationId');
return expect(id).to.equal(configJson.applicationId);
});
it('allows reading several values in one getMany call', async () => {
const configJson = await readConfigJson();
return expect(
await conf.getMany(['applicationId', 'apiEndpoint']),
).to.deep.equal({
applicationId: configJson.applicationId,
apiEndpoint: configJson.apiEndpoint,
});
});
it('generates a uuid and stores it in config.json', async () => {
const configJson = await readConfigJson();
const uuid = await conf.get('uuid');
expect(uuid).to.be.a('string');
expect(uuid).to.have.lengthOf(32);
expect(uuid).to.equal(configJson.uuid);
});
it('does not allow setting an immutable field', async () => {
return expect(conf.set({ deviceType: 'a different device type' })).to.be
.rejected;
});
it('allows setting both config.json and database fields transparently', async () => {
await conf.set({ appUpdatePollInterval: 30000, name: 'a new device name' });
const config = await conf.getMany(['appUpdatePollInterval', 'name']);
return expect(config).to.deep.equal({
appUpdatePollInterval: 30000,
name: 'a new device name',
});
});
it('allows deleting a config.json key and returns a default value if none is set', async () => {
await conf.remove('appUpdatePollInterval');
const poll = await conf.get('appUpdatePollInterval');
return expect(poll).to.equal(900000);
});
it('allows deleting a config.json key if it is null', async () => {
await conf.set({ apiKey: null });
const key = await conf.get('apiKey');
expect(key).to.be.undefined;
// config.json should have been modified as well
const configJson = await readConfigJson();
expect(configJson.apiKey).to.be.undefined;
});
it('does not allow modifying or removing a function value', async () => {
// We have to cast to any below, as the type system will
// not allow removing a function value
await expect(conf.remove('version' as any)).to.be.rejected;
await expect(conf.set({ version: '2.0' })).to.be.rejected;
});
it('throws when asked for an unknown key', () => {
return expect(conf.get('unknownInvalidValue' as any)).to.be.rejected;
});
it('emits a change event when values change', (done) => {
const listener = (val: conf.ConfigChangeMap<SchemaTypeKey>) => {
try {
if ('name' in val) {
expect(val.name).to.equal('someValue');
done();
conf.removeListener('change', listener);
}
} catch (e) {
done(e);
}
};
conf.on('change', listener);
conf.set({ name: 'someValue' });
});
// FIXME: this test illustrates the issue with the singleton approach and the
// "load config as you go" approach.
// The `osVariant` comes from a function in `src/config/functions` and that function
// memoizes the contents of `/etc/os-variant`.
// Since previous invocations have already memoized that value, there is no good way
// to force the config module to reload the file.
// The config module instead could read all static data on initialization and
// forget about memoization
// this is being skipped until the config module can be refactored
it.skip('deduces OS variant from developmentMode if not set', async () => {
const tFs = await testfs({
'/mnt/root/etc/os-release': testfs.from(
'test/data/etc/os-release-novariant',
),
}).enable();
await conf.set({ developmentMode: false });
const osVariant = await conf.get('osVariant');
expect(osVariant).to.equal('prod');
await tFs.restore();
});
it('reads and exposes MAC addresses', async () => {
// FIXME: this variable defaults to `/mnt/root/sys/class/net`. The supervisor runs with network_mode: false
// which means that it can just use the container `/sys/class/net` and the result should be the same
constants.macAddressPath = '/sys/class/net';
const macAddress = await conf.get('macAddress');
expect(macAddress).to.have.length.greaterThan(0);
});
describe('Function config providers', () => {
it('should throw if a non-mutable function provider is set', () => {
expect(conf.set({ version: 'some-version' })).to.be.rejected;
});
it('should throw if a non-mutable function provider is removed', () => {
expect(conf.remove('version' as any)).to.be.rejected;
});
});
describe('Config data sources', () => {
afterEach(() => {
// Clean up memoized values
fnSchema.deviceArch.clear();
fnSchema.deviceType.clear();
});
it('should obtain deviceArch from device-type.json', async () => {
const dtJson = await readDeviceTypeJson();
const deviceArch = await conf.get('deviceArch');
expect(deviceArch).to.equal(dtJson.arch);
});
it('should obtain deviceType from device-type.json', async () => {
const dtJson = await readDeviceTypeJson();
const deviceArch = await conf.get('deviceType');
expect(deviceArch).to.equal(dtJson.slug);
});
it('should memoize values from device-type.json', async () => {
const dtJson = await readDeviceTypeJson();
spy(fs, 'readFile');
// Make a first call to get the value to be memoized
await conf.get('deviceType');
await conf.get('deviceArch');
expect(fs.readFile).to.be.called;
(fs.readFile as SinonSpy).resetHistory();
const deviceArch = await conf.get('deviceArch');
expect(deviceArch).to.equal(dtJson.arch);
// The result should still be memoized from the previous call
expect(fs.readFile).to.not.be.called;
const deviceType = await conf.get('deviceType');
expect(deviceType).to.equal(dtJson.slug);
// The result should still be memoized from the previous call
expect(fs.readFile).to.not.be.called;
(fs.readFile as SinonSpy).restore();
});
it('should not memoize errors when reading deviceArch', async () => {
// File not found
stub(fs, 'readFile').rejects('File not found');
await expect(conf.get('deviceArch')).to.eventually.equal('unknown');
expect(fs.readFile).to.be.calledOnce;
(fs.readFile as SinonStub).restore();
const dtJson = await readDeviceTypeJson();
await expect(conf.get('deviceArch')).to.eventually.equal(dtJson.arch);
});
it('should not memoize errors when reading deviceType', async () => {
// File not found
stub(fs, 'readFile').rejects('File not found');
await expect(conf.get('deviceType')).to.eventually.equal('unknown');
expect(fs.readFile).to.be.calledOnce;
(fs.readFile as SinonStub).restore();
const dtJson = await readDeviceTypeJson();
await expect(conf.get('deviceType')).to.eventually.equal(dtJson.slug);
});
});
});

View File

@ -1,10 +1,9 @@
import * as Bluebird from 'bluebird';
import { knex, Knex } from 'knex';
import { promises as fs } from 'fs';
import { expect } from 'chai';
import prepare = require('~/test-lib/prepare');
import * as constants from '~/lib/constants';
import { exists } from '~/lib/fs-utils';
async function createOldDatabase(path: string) {
const db = knex({
@ -41,35 +40,28 @@ async function createOldDatabase(path: string) {
return db;
}
describe('Database Migrations', () => {
before(async () => {
await prepare();
async function restoreDb() {
await fs.unlink(constants.databasePath).catch(() => {
/* NOOP */
});
// Reset the module cache to allow the database to be initialized again
delete require.cache[require.resolve('~/src/db')];
}
after(() => {
// @ts-expect-error
constants.databasePath = process.env.DATABASE_PATH;
delete require.cache[require.resolve('~/src/db')];
describe('db', () => {
afterEach(async () => {
await restoreDb();
});
it('creates a database at the path passed on creation', async () => {
const databasePath = process.env.DATABASE_PATH_2!;
// @ts-expect-error
constants.databasePath = databasePath;
delete require.cache[require.resolve('~/src/db')];
const testDb = await import('~/src/db');
await testDb.initialized();
expect(await exists(databasePath)).to.be.true;
await expect(fs.access(constants.databasePath)).to.not.be.rejected;
});
it('adds new fields and removes old ones in an old database', async () => {
const databasePath = process.env.DATABASE_PATH_3!;
const knexForDB = await createOldDatabase(databasePath);
// @ts-expect-error
constants.databasePath = databasePath;
delete require.cache[require.resolve('~/src/db')];
it('migrations add new fields and removes old ones in an old database', async () => {
// Create a database with an older schema
const knexForDB = await createOldDatabase(constants.databasePath);
const testDb = await import('~/src/db');
await testDb.initialized();
await Bluebird.all([
@ -94,28 +86,18 @@ describe('Database Migrations', () => {
.to.eventually.be.true,
]);
});
});
describe('Database', () => {
let db: typeof import('~/src/db');
before(async () => {
await prepare();
db = await import('~/src/db');
});
it('initializes correctly, running the migrations', () => {
return expect(db.initialized()).to.be.fulfilled;
});
it('creates a database at the path from an env var', async () => {
expect(await exists(process.env.DATABASE_PATH!)).to.be.true;
});
it('creates a deviceConfig table with a single default value', async () => {
const deviceConfig = await db.models('deviceConfig').select();
const testDb = await import('~/src/db');
await testDb.initialized();
const deviceConfig = await testDb.models('deviceConfig').select();
expect(deviceConfig).to.have.lengthOf(1);
expect(deviceConfig).to.deep.equal([{ targetValues: '{}' }]);
});
it('allows performing transactions', () => {
return db.transaction((trx) => expect(trx.commit()).to.be.fulfilled);
it('allows performing transactions', async () => {
const testDb = await import('~/src/db');
await testDb.initialized();
return testDb.transaction((trx) => expect(trx.commit()).to.be.fulfilled);
});
});

View File

@ -12,7 +12,7 @@ import * as osRelease from '~/lib/os-release';
import supervisorVersion = require('~/lib/supervisor-version');
import * as fsUtils from '~/lib/fs-utils';
describe('Container contracts', () => {
describe('lib/contracts', () => {
before(() => {
intialiseContractRequirements({
supervisorVersion,

View File

@ -1,40 +1,31 @@
import { expect } from 'chai';
import { isRight } from 'fp-ts/lib/Either';
import * as sinon from 'sinon';
import * as nock from 'nock';
import { TargetState } from '~/src/types';
import * as config from '~/src/config';
import * as legacy from '~/lib/legacy';
import log from '~/lib/supervisor-console';
import * as config from '~/src/config';
describe('lib/legacy', () => {
before(async () => {
// disable log output during testing
sinon.stub(log, 'debug');
sinon.stub(log, 'warn');
sinon.stub(log, 'info');
sinon.stub(log, 'event');
sinon.stub(log, 'success');
await config.initialized;
await config.initialized();
// Set the device uuid and name
// these migration methods read some data from the database
// (and other data from the API)
// which is also why they need to be defined as integration tests
// TODO: when the supervisor is a full app, we'll be able to control updates
// using contracts, meaning this legacy code can dissapear
await config.set({ uuid: 'local' });
await config.set({ name: 'my-device' });
});
after(() => {
sinon.restore();
});
describe('Converting target state v2 to v3', () => {
it('accepts a local target state with empty configuration', async () => {
const target = await legacy.fromV2TargetState({} as any, true);
const decoded = TargetState.decode(target);
if (!isRight(decoded)) {
console.log(decoded.left);
// We do it this way let the type guard be triggered
expect.fail('Resulting target state is a valid v3 target state');
}
@ -152,7 +143,6 @@ describe('lib/legacy', () => {
const decoded = TargetState.decode(target);
if (!isRight(decoded)) {
console.log(decoded.left);
// We do it this way let the type guard be triggered
expect.fail('Resulting target state is a valid v3 target state');
}
@ -228,7 +218,6 @@ describe('lib/legacy', () => {
const decoded = TargetState.decode(target);
if (!isRight(decoded)) {
console.log(decoded.left);
// We do it this way let the type guard be triggered
expect.fail('Resulting target state is a valid v3 target state');
}

View File

@ -0,0 +1,290 @@
import { expect } from 'chai';
import * as path from 'path';
import { promises as fs } from 'fs';
import { testfs } from 'mocha-pod';
import * as updateLock from '~/lib/update-lock';
import * as constants from '~/lib/constants';
import { UpdatesLockedError } from '~/lib/errors';
import * as config from '~/src/config';
import * as lockfile from '~/lib/lockfile';
describe('lib/update-lock', () => {
describe('abortIfHUPInProgress', () => {
const breadcrumbFiles = [
'rollback-health-breadcrumb',
'rollback-altboot-breadcrumb',
];
const breadcrumbsDir = path.join(
constants.rootMountPoint,
constants.stateMountPoint,
);
const createBreadcrumb = (breadcrumb: string) =>
testfs({
[path.join(breadcrumbsDir, breadcrumb)]: '',
}).enable();
before(async () => {
// Ensure the directory exists for all tests
await fs.mkdir(breadcrumbsDir, { recursive: true });
});
it('should throw if any breadcrumbs exist on host', async () => {
for (const bc of breadcrumbFiles) {
const testFs = await createBreadcrumb(bc);
await expect(updateLock.abortIfHUPInProgress({ force: false }))
.to.eventually.be.rejectedWith('Waiting for Host OS update to finish')
.and.be.an.instanceOf(UpdatesLockedError);
await testFs.restore();
}
});
it('should resolve to false if no breadcrumbs on host', async () => {
// check that there are no breadcrumbs already on the directory
expect(await fs.readdir(breadcrumbsDir)).to.have.lengthOf(0);
await expect(
updateLock.abortIfHUPInProgress({ force: false }),
).to.eventually.equal(false);
});
it('should resolve to true if breadcrumbs are on host but force is passed', async () => {
for (const bc of breadcrumbFiles) {
const testFs = await createBreadcrumb(bc);
await expect(
updateLock.abortIfHUPInProgress({ force: true }),
).to.eventually.equal(true);
await testFs.restore();
}
});
});
describe('Lock/dispose functionality', () => {
const testAppId = 1234567;
const testServiceName = 'test';
const supportedLockfiles = ['resin-updates.lock', 'updates.lock'];
const takeLocks = () =>
Promise.all(
supportedLockfiles.map((lf) =>
lockfile.lock(path.join(lockdir(testAppId, testServiceName), lf)),
),
);
const releaseLocks = async () => {
await Promise.all(
lockfile.getLocksTaken().map((lock) => lockfile.unlock(lock)),
);
// Remove any other lockfiles created for the testAppId
await Promise.all(
supportedLockfiles.map((lf) =>
lockfile.unlock(path.join(lockdir(testAppId, testServiceName), lf)),
),
);
};
const lockdir = (appId: number, serviceName: string): string =>
path.join(
constants.rootMountPoint,
updateLock.lockPath(appId, serviceName),
);
const expectLocks = async (
exists: boolean,
msg?: string,
appId = testAppId,
serviceName = testServiceName,
) =>
expect(
fs.readdir(lockdir(appId, serviceName)),
msg,
).to.eventually.deep.equal(exists ? supportedLockfiles : []);
before(async () => {
await config.initialized();
await config.set({ lockOverride: false });
// Ensure the directory is available for all tests
await fs.mkdir(lockdir(testAppId, testServiceName), {
recursive: true,
});
});
afterEach(async () => {
// Cleanup all locks between tests
await releaseLocks();
});
it('should take the lock, run the function, then dispose of locks', async () => {
await expectLocks(
false,
'locks should not exist before the lock is taken',
);
await expect(
updateLock.lock(testAppId, { force: false }, () =>
// At this point the locks should be taken and not removed
// until this function has been resolved
expectLocks(true, 'lockfiles should exist while the lock is active'),
),
).to.be.fulfilled;
await expectLocks(
false,
'locks should not exist after the lock is released',
);
});
it('should throw UpdatesLockedError if lockfiles exists', async () => {
// Take the locks before testing
await takeLocks();
await expectLocks(true, 'locks should exist before the lock is taken');
await updateLock
.lock(testAppId, { force: false }, () =>
Promise.reject(
'the lock function should not invoke the callback if locks are taken',
),
)
.catch((err) => expect(err).to.be.instanceOf(UpdatesLockedError));
// Since the lock-taking failed, there should be no locks to dispose of
expect(lockfile.getLocksTaken()).to.have.length(0);
// Restore the locks that were taken at the beginning of the test
await releaseLocks();
});
it('should dispose of taken locks on any other errors', async () => {
await expectLocks(false, 'locks should not exist before lock is called');
await expect(
updateLock.lock(
testAppId,
{ force: false },
// At this point 2 lockfiles have been written, so this is testing
// that even if the function rejects, lockfiles will be disposed of
() =>
expectLocks(
true,
'locks should be owned by the calling function',
).then(() => Promise.reject('Test error')),
),
).to.be.rejectedWith('Test error');
await expectLocks(
false,
'locks should be removed if an error happens within the lock callback',
);
});
it('locks all applications before resolving input function', async () => {
const appIds = [111, 222, 333];
// Set up necessary lock directories
await Promise.all(
appIds.map((id) =>
fs.mkdir(lockdir(id, testServiceName), { recursive: true }),
),
);
await expect(
updateLock.lock(appIds, { force: false }, () =>
// At this point the locks should be taken and not removed
// until this function has been resolved
// Both `updates.lock` and `resin-updates.lock` should have been taken
Promise.all(
appIds.map((appId) =>
expectLocks(
true,
`locks for app(${appId}) should exist`,
appId,
testServiceName,
),
),
),
),
).to.eventually.be.fulfilled;
// Everything that was locked should have been unlocked after function resolves
await Promise.all(
appIds.map((appId) =>
expectLocks(
false,
`locks for app(${appId}) should have been released`,
appId,
testServiceName,
),
),
).finally(() =>
// In case the above fails, we need to make sure to cleanup the lockdir
Promise.all(
appIds
.map((appId) =>
supportedLockfiles.map((lf) =>
lockfile.unlock(path.join(lockdir(appId, testServiceName), lf)),
),
)
.flat(),
),
);
});
it('resolves input function without locking when appId is null', async () => {
await takeLocks();
await expect(
updateLock.lock(null as any, { force: false }, () => Promise.resolve()),
).to.be.fulfilled;
await expectLocks(
true,
'locks should not be touched by an unrelated lock() call',
);
await releaseLocks();
});
it('unlocks lockfile to resolve function if force option specified', async () => {
await takeLocks();
await expect(
updateLock.lock(testAppId, { force: true }, () =>
expectLocks(
true,
'locks should be deleted and taken again by the lock() call',
),
),
).to.be.fulfilled;
await expectLocks(
false,
'using force gave lock ownership to the callback, so they should now be deleted',
);
});
it('unlocks lockfile to resolve function if lockOverride option specified', async () => {
await takeLocks();
// Change the configuration
await config.set({ lockOverride: true });
await expect(
updateLock.lock(testAppId, { force: false }, () =>
expectLocks(
true,
'locks should be deleted and taken again by the lock() call because of the override',
),
),
).to.be.fulfilled;
await expectLocks(
false,
'using lockOverride gave lock ownership to the callback, so they should now be deleted',
);
});
});
});

View File

@ -1,257 +0,0 @@
import * as _ from 'lodash';
import { promises as fs } from 'fs';
import { SinonStub, stub } from 'sinon';
import { expect } from 'chai';
import prepare = require('~/test-lib/prepare');
import * as conf from '~/src/config';
import constants = require('~/lib/constants');
import { SchemaTypeKey } from '~/src/config/schema-type';
import { fnSchema } from '~/src/config/functions';
describe('Config', () => {
before(async () => {
await prepare();
await conf.initialized();
});
it('reads and exposes values from the config.json', async () => {
const id = await conf.get('applicationId');
return expect(id).to.equal(78373);
});
it('allows reading several values in one getMany call', async () => {
return expect(
await conf.getMany(['applicationId', 'apiEndpoint']),
).to.deep.equal({
applicationId: 78373,
apiEndpoint: 'https://api.resin.io',
});
});
it('generates a uuid and stores it in config.json', async () => {
const uuid = await conf.get('uuid');
const configJsonUuid = JSON.parse(
await fs.readFile('./test/data/config.json', 'utf8'),
).uuid;
expect(uuid).to.be.a('string');
expect(uuid).to.have.lengthOf(32);
expect(uuid).to.equal(configJsonUuid);
});
it('does not allow setting an immutable field', async () => {
const promise = conf.set({ deviceType: 'a different device type' });
// We catch it to avoid the unhandled error log
promise.catch(_.noop);
return expect(promise).to.be.rejected;
});
it('allows setting both config.json and database fields transparently', async () => {
await conf.set({ appUpdatePollInterval: 30000, name: 'a new device name' });
const config = await conf.getMany(['appUpdatePollInterval', 'name']);
return expect(config).to.deep.equal({
appUpdatePollInterval: 30000,
name: 'a new device name',
});
});
it('allows deleting a config.json key and returns a default value if none is set', async () => {
await conf.remove('appUpdatePollInterval');
const poll = await conf.get('appUpdatePollInterval');
return expect(poll).to.equal(900000);
});
it('allows deleting a config.json key if it is null', async () => {
await conf.set({ apiKey: null });
const key = await conf.get('apiKey');
expect(key).to.be.undefined;
expect(
JSON.parse(await fs.readFile('./test/data/config.json', 'utf8')),
).to.not.have.property('apiKey');
});
it('does not allow modifying or removing a function value', () => {
// We have to cast to any below, as the type system will
// not allow removing a function value
expect(conf.remove('version' as any)).to.be.rejected;
expect(conf.set({ version: '2.0' })).to.be.rejected;
});
it('throws when asked for an unknown key', () => {
expect(conf.get('unknownInvalidValue' as any)).to.be.rejected;
});
it('emits a change event when values', (done) => {
const listener = (val: conf.ConfigChangeMap<SchemaTypeKey>) => {
try {
if ('name' in val) {
expect(val.name).to.equal('someValue');
done();
conf.removeListener('change', listener);
}
} catch (e) {
done(e);
}
};
conf.on('change', listener);
conf.set({ name: 'someValue' });
});
it("returns production OS variant if it doesn't exist", async () => {
const oldPath = constants.hostOSVersionPath;
constants.hostOSVersionPath = 'test/data/etc/os-release-novariant';
const osVariant = await conf.get('osVariant');
constants.hostOSVersionPath = oldPath;
expect(osVariant).to.equal('prod');
});
it('reads and exposes MAC addresses', async () => {
const macAddress = await conf.get('macAddress');
expect(macAddress).to.have.length.greaterThan(0);
});
describe('Function config providers', () => {
it('should throw if a non-mutable function provider is set', () => {
expect(conf.set({ version: 'some-version' })).to.be.rejected;
});
it('should throw if a non-mutable function provider is removed', () => {
expect(conf.remove('version' as any)).to.be.rejected;
});
});
describe('Config data sources', () => {
afterEach(() => {
// Clean up memoized values
fnSchema.deviceArch.clear();
fnSchema.deviceType.clear();
});
it('should obtain deviceArch from device-type.json', async () => {
const [slug, arch] = ['raspberrypi3', 'armv7hf'];
stub(fs, 'readFile').resolves(
JSON.stringify({
slug,
arch,
}),
);
const deviceArch = await conf.get('deviceArch');
expect(deviceArch).to.equal(arch);
expect(fs.readFile).to.be.calledOnce;
expect(fs.readFile).to.be.calledWith(
`${constants.rootMountPoint}${constants.bootMountPoint}/device-type.json`,
'utf8',
);
(fs.readFile as SinonStub).restore();
});
it('should obtain deviceType from device-type.json', async () => {
const [slug, arch] = ['raspberrypi3', 'armv7hf'];
stub(fs, 'readFile').resolves(
JSON.stringify({
slug,
arch,
}),
);
const deviceType = await conf.get('deviceType');
expect(deviceType).to.equal(slug);
expect(fs.readFile).to.be.calledOnce;
expect(fs.readFile).to.be.calledWith(
`${constants.rootMountPoint}${constants.bootMountPoint}/device-type.json`,
'utf8',
);
(fs.readFile as SinonStub).restore();
});
it('should memoize values from device-type.json', async () => {
const [slug, arch] = ['raspberrypi3', 'armv7hf'];
stub(fs, 'readFile').resolves(
JSON.stringify({
slug,
arch,
}),
);
// Make a first call to get the value to be memoized
await conf.get('deviceType');
await conf.get('deviceArch');
expect(fs.readFile).to.be.called;
(fs.readFile as SinonStub).resetHistory();
const deviceArch = await conf.get('deviceArch');
expect(deviceArch).to.equal(arch);
// The result should still be memoized from the previous call
expect(fs.readFile).to.not.be.called;
const deviceType = await conf.get('deviceType');
expect(deviceType).to.equal(slug);
// The result should still be memoized from the previous call
expect(fs.readFile).to.not.be.called;
(fs.readFile as SinonStub).restore();
});
it('should not memoize errors when reading deviceArch', (done) => {
// File not found
stub(fs, 'readFile').throws('File not found');
expect(conf.get('deviceArch')).to.eventually.equal('unknown');
expect(fs.readFile).to.be.calledOnce;
(fs.readFile as SinonStub).restore();
// Next call should not throw
const [slug, arch] = ['raspberrypi3', 'armv7hf'];
stub(fs, 'readFile').resolves(
JSON.stringify({
slug,
arch,
}),
);
// We need to let rejection be discovered
// https://github.com/medikoo/memoizee/issues/93
setTimeout(() => {
expect(conf.get('deviceArch')).to.eventually.equal(arch);
expect(fs.readFile).to.be.calledOnce;
(fs.readFile as SinonStub).restore();
done();
});
});
it('should not memoize errors when reading deviceType', (done) => {
// File not found
stub(fs, 'readFile').throws('File not found');
expect(conf.get('deviceType')).to.eventually.equal('unknown');
expect(fs.readFile).to.be.calledOnce;
(fs.readFile as SinonStub).restore();
// Next call should not throw
const [slug, arch] = ['raspberrypi3', 'armv7hf'];
stub(fs, 'readFile').resolves(
JSON.stringify({
slug,
arch,
}),
);
// We need to let rejection be discovered
// https://github.com/medikoo/memoizee/issues/93
setTimeout(() => {
expect(conf.get('deviceType')).to.eventually.equal(slug);
expect(fs.readFile).to.be.calledOnce;
(fs.readFile as SinonStub).restore();
done();
});
});
});
});

View File

@ -1,22 +0,0 @@
import { promises as fs } from 'fs';
import { expect } from 'chai';
import blink = require('~/lib/blink');
import constants = require('~/lib/constants');
describe('blink', () => {
it('is a blink function', () => expect(blink).to.be.a('function'));
it('has a pattern property with start and stop functions', () => {
expect(blink.pattern.start).to.be.a('function');
expect(blink.pattern.stop).to.be.a('function');
});
it('writes to a file that represents the LED, and writes a 0 at the end to turn the LED off', async () => {
// TODO: Fix the typings for blink
await (blink as any)(1);
const contents = await fs.readFile(constants.ledFile);
expect(contents.toString()).to.equal('0');
});
});

View File

@ -1,360 +0,0 @@
import { expect } from 'chai';
import * as sinon from 'sinon';
import {
createVolume,
createContainer,
withMockerode,
} from '~/test-lib/mockerode';
import * as volumeManager from '~/src/compose/volume-manager';
import log from '~/lib/supervisor-console';
import Volume from '~/src/compose/volume';
describe('compose/volume-manager', () => {
describe('Retrieving volumes from the engine', () => {
let logDebug: sinon.SinonStub;
before(() => {
logDebug = sinon.stub(log, 'debug');
});
after(() => {
logDebug.restore();
});
afterEach(() => {
logDebug.reset();
});
it('gets all supervised Volumes', async () => {
// Setup volume data
const volumeData = [
createVolume({
Name: Volume.generateDockerName(1, 'redis'),
// Recently created volumes contain io.balena.supervised label
Labels: { 'io.balena.supervised': '1' },
}),
createVolume({
Name: Volume.generateDockerName(1, 'mysql'),
// Recently created volumes contain io.balena.supervised label and app-uuid
Labels: {
'io.balena.supervised': '1',
'io.balena.app-uuid': 'deadbeef',
},
}),
createVolume({
Name: Volume.generateDockerName(1, 'backend'),
// Old Volumes will not have labels
}),
// Volume not created by the Supervisor
createVolume({ Name: 'user_created_volume' }),
createVolume({
Name: 'decoy',
// Added decoy to really test the inference (should not return)
Labels: { 'io.balena.supervised': '1' },
}),
];
// Perform test
await withMockerode(
async () => {
await expect(volumeManager.getAll()).to.eventually.deep.equal([
{
appId: 1,
appUuid: undefined,
config: {
driver: 'local',
driverOpts: {},
labels: {
'io.balena.supervised': '1',
},
},
name: 'redis',
},
{
appId: 1,
appUuid: 'deadbeef',
config: {
driver: 'local',
driverOpts: {},
labels: {
'io.balena.supervised': '1',
'io.balena.app-uuid': 'deadbeef',
},
},
name: 'mysql',
},
{
appId: 1,
appUuid: undefined,
config: {
driver: 'local',
driverOpts: {},
labels: {},
},
name: 'backend',
},
]);
// Check that debug message was logged saying we found a Volume not created by us
expect(logDebug.lastCall.lastArg).to.equal(
'Found unmanaged or anonymous Volume: decoy',
);
},
{ volumes: volumeData },
);
});
it('can parse null Volumes', async () => {
// Perform test with no volumes
await withMockerode(async () => {
await expect(volumeManager.getAll()).to.eventually.deep.equal([]);
});
});
it('gets the volume for specific application', async () => {
// Setup volume data
const volumes = [
createVolume({
Name: Volume.generateDockerName(111, 'app'),
Labels: {
'io.balena.supervised': '1',
},
}),
createVolume({
Name: Volume.generateDockerName(222, 'otherApp'),
Labels: {
'io.balena.supervised': '1',
},
}),
];
// Perform test
await withMockerode(
async () => {
await expect(
volumeManager.getAllByAppId(111),
).to.eventually.deep.equal([
{
appId: 111,
appUuid: undefined,
config: {
driver: 'local',
driverOpts: {},
labels: {
'io.balena.supervised': '1',
},
},
name: 'app',
},
]);
},
{ volumes },
);
});
});
describe('Creating volumes', () => {
it('creates a volume if it does not exist', async () => {
// Perform test
await withMockerode(async (mockerode) => {
// The volume does not exist on the engine before
expect(
mockerode.getVolume(Volume.generateDockerName(111, 'main')).inspect(),
).to.be.rejected;
// Volume to create
const volume = Volume.fromComposeObject('main', 111, 'deadbeef', {});
sinon.spy(volume, 'create');
// Create volume
await volumeManager.create(volume);
// Check that the creation function was called
expect(volume.create).to.have.been.calledOnce;
});
});
it('does not try to create a volume that already exists', async () => {
// Setup volume data
const volumes = [
createVolume({
Name: Volume.generateDockerName(111, 'main'),
Labels: {
'io.balena.supervised': '1',
},
}),
];
// Perform test
await withMockerode(
async () => {
// Create compose object for volume already set up in mock engine
const volume = Volume.fromComposeObject('main', 111, 'deadbeef', {});
sinon.spy(volume, 'create');
// Create volume
await volumeManager.create(volume);
// Check volume was not created
expect(volume.create).to.not.have.been.called;
},
{ volumes },
);
});
});
describe('Removing volumes', () => {
it('removes a volume if it exists', async () => {
// Setup volume data
const volumes = [
createVolume({
Name: Volume.generateDockerName(111, 'main'),
Labels: {
'io.balena.supervised': '1',
},
}),
];
// Perform test
await withMockerode(
async (mockerode) => {
// Volume to remove
const volume = Volume.fromComposeObject('main', 111, 'deadbeef', {});
sinon.spy(volume, 'remove');
// Remove volume
await volumeManager.remove(volume);
// Check volume was removed
expect(volume.remove).to.be.calledOnce;
expect(mockerode.removeVolume).to.have.been.calledOnceWith(
Volume.generateDockerName(111, 'main'),
);
},
{ volumes },
);
});
it('does nothing on removal if the volume does not exist', async () => {
// Setup volume data
const volumes = [
createVolume({
Name: 'decoy-volume',
}),
];
// Perform test
await withMockerode(
async (mockerode) => {
// Volume to remove
const volume = Volume.fromComposeObject('main', 111, 'deadbeef', {});
sinon.spy(volume, 'remove');
// Remove volume
await expect(volumeManager.remove(volume)).to.not.be.rejected;
expect(mockerode.removeVolume).to.not.have.been.called;
},
{ volumes },
);
});
});
describe('Removing orphaned volumes', () => {
it('removes any remaining unreferenced volumes after services have been deleted', async () => {
// Setup volume data
const volumes = [
createVolume({
Name: 'some-volume',
}),
createVolume({
Name: Volume.generateDockerName(111, 'main'),
Labels: {
'io.balena.supervised': '1',
},
}),
];
await withMockerode(
async (mockerode) => {
await volumeManager.removeOrphanedVolumes([]);
expect(mockerode.removeVolume).to.have.been.calledTwice;
expect(mockerode.removeVolume).to.have.been.calledWith('some-volume');
expect(mockerode.removeVolume).to.have.been.calledWith(
Volume.generateDockerName(111, 'main'),
);
},
{ volumes },
);
});
it('keeps volumes still referenced in target state', async () => {
// Setup volume data
const volumes = [
createVolume({
Name: 'some-volume',
}),
createVolume({
Name: Volume.generateDockerName(111, 'main'),
Labels: {
'io.balena.supervised': '1',
},
}),
createVolume({
Name: Volume.generateDockerName(222, 'old'),
Labels: {
'io.balena.supervised': '1',
},
}),
];
await withMockerode(
async (mockerode) => {
await volumeManager.removeOrphanedVolumes([
Volume.generateDockerName(111, 'main'),
]);
expect(mockerode.removeVolume).to.have.been.calledTwice;
expect(mockerode.removeVolume).to.have.been.calledWith('some-volume');
expect(mockerode.removeVolume).to.have.been.calledWith(
Volume.generateDockerName(222, 'old'),
);
},
{ volumes },
);
});
it('keeps volumes still referenced by a container', async () => {
// Setup volume data
const volumes = [
createVolume({
Name: 'some-volume',
}),
createVolume({
Name: Volume.generateDockerName(111, 'main'),
Labels: {
'io.balena.supervised': '1',
},
}),
];
const containers = [
createContainer({
Id: 'some-service',
Mounts: [
{
Name: 'some-volume',
},
],
}),
];
await withMockerode(
async (mockerode) => {
await volumeManager.removeOrphanedVolumes([]);
// Container that has a volume should not be removed
expect(mockerode.removeVolume).to.have.been.calledOnceWith(
Volume.generateDockerName(111, 'main'),
);
},
{ volumes, containers },
);
});
});
});

View File

@ -1,319 +0,0 @@
import { expect } from 'chai';
import { SinonSpy, SinonStub, spy, stub } from 'sinon';
import * as path from 'path';
import { promises as fs } from 'fs';
import mockFs = require('mock-fs');
import * as updateLock from '~/lib/update-lock';
import * as constants from '~/lib/constants';
import { UpdatesLockedError } from '~/lib/errors';
import * as config from '~/src/config';
import * as lockfile from '~/lib/lockfile';
import * as fsUtils from '~/lib/fs-utils';
describe('lib/update-lock', () => {
const appId = 1234567;
const serviceName = 'test';
const mockLockDir = ({
createLockfile = true,
}: {
createLockfile?: boolean;
}) => {
const lockDirFiles: any = {};
if (createLockfile) {
lockDirFiles['updates.lock'] = mockFs.file({
uid: updateLock.LOCKFILE_UID,
});
lockDirFiles['resin-updates.lock'] = mockFs.file({
uid: updateLock.LOCKFILE_UID,
});
}
mockFs({
[path.join(
constants.rootMountPoint,
updateLock.lockPath(appId),
serviceName,
)]: lockDirFiles,
});
};
// TODO: Remove these hooks when we don't need './test/data' as test process's rootMountPoint
before(() => {
// @ts-expect-error // Set rootMountPoint for mockFs
constants.rootMountPoint = '/mnt/root';
});
after(() => {
// @ts-expect-error
constants.rootMountPoint = process.env.ROOT_MOUNTPOINT;
});
describe('lockPath', () => {
it('should return path prefix of service lockfiles on host', () => {
expect(updateLock.lockPath(appId)).to.equal(
`/tmp/balena-supervisor/services/${appId}`,
);
expect(updateLock.lockPath(appId, serviceName)).to.equal(
`/tmp/balena-supervisor/services/${appId}/${serviceName}`,
);
});
});
describe('abortIfHUPInProgress', () => {
const breadcrumbFiles = [
'rollback-health-breadcrumb',
'rollback-altboot-breadcrumb',
];
const mockBreadcrumbs = (breadcrumb?: string) => {
mockFs({
[path.join(
constants.rootMountPoint,
constants.stateMountPoint,
breadcrumb ? breadcrumb : '',
)]: '',
});
};
afterEach(() => mockFs.restore());
it('should throw if any breadcrumbs exist on host', async () => {
for (const bc of breadcrumbFiles) {
mockBreadcrumbs(bc);
await expect(updateLock.abortIfHUPInProgress({ force: false }))
.to.eventually.be.rejectedWith('Waiting for Host OS update to finish')
.and.be.an.instanceOf(UpdatesLockedError);
}
});
it('should resolve to false if no breadcrumbs on host', async () => {
mockBreadcrumbs();
await expect(
updateLock.abortIfHUPInProgress({ force: false }),
).to.eventually.equal(false);
});
it('should resolve to true if breadcrumbs are on host but force is passed', async () => {
for (const bc of breadcrumbFiles) {
mockBreadcrumbs(bc);
await expect(
updateLock.abortIfHUPInProgress({ force: true }),
).to.eventually.equal(true);
}
});
});
describe('Lock/dispose functionality', () => {
const getLockParentDir = (): string =>
`${constants.rootMountPoint}${updateLock.lockPath(appId, serviceName)}`;
const expectLocks = async (exists: boolean = true) => {
expect(await fs.readdir(getLockParentDir())).to.deep.equal(
exists ? ['resin-updates.lock', 'updates.lock'] : [],
);
};
let unlockSpy: SinonSpy;
let lockSpy: SinonSpy;
let execStub: SinonStub;
let configGetStub: SinonStub;
beforeEach(() => {
unlockSpy = spy(lockfile, 'unlock');
lockSpy = spy(lockfile, 'lock');
// lockfile.lock calls exec to interface with the lockfile binary,
// so mock it here as we don't have access to the binary in the test env
// @ts-expect-error
execStub = stub(fsUtils, 'exec').callsFake(async (command, opts) => {
// Sanity check for the command call
expect(command.trim().startsWith('lockfile')).to.be.true;
// Remove any `lockfile` command options to leave just the command and the target filepath
const [, targetPath] = command
.replace(/-v|-nnn|-r\s+\d+|-l\s+\d+|-s\s+\d+|-!|-ml|-mu/g, '')
.split(/\s+/);
// Emulate the lockfile binary exec call
await fsUtils.touch(targetPath);
await fs.chown(targetPath, opts!.uid!, 0);
});
// config.get is called in updateLock.lock to get `lockOverride` value,
// so mock it here to definitively avoid any side effects
configGetStub = stub(config, 'get').resolves(false);
});
afterEach(async () => {
unlockSpy.restore();
lockSpy.restore();
execStub.restore();
configGetStub.restore();
// Even though mock-fs is restored, this is needed to delete any in-memory storage of locks
for (const lock of lockfile.getLocksTaken()) {
await lockfile.unlock(lock);
}
mockFs.restore();
});
it('should take the lock, run the function, then dispose of locks', async () => {
// Set up fake filesystem for lockfiles
mockLockDir({ createLockfile: false });
await expect(
updateLock.lock(appId, { force: false }, async () => {
// At this point the locks should be taken and not removed
// until this function has been resolved
await expectLocks(true);
return Promise.resolve();
}),
).to.eventually.be.fulfilled;
// Both `updates.lock` and `resin-updates.lock` should have been taken
expect(lockSpy.args).to.have.length(2);
// Everything that was locked should have been unlocked
expect(lockSpy.args.map(([lock]) => [lock])).to.deep.equal(
unlockSpy.args,
);
});
it('should throw UpdatesLockedError if lockfile exists', async () => {
// Set up fake filesystem for lockfiles
mockLockDir({ createLockfile: true });
const lockPath = `${getLockParentDir()}/updates.lock`;
execStub.throws(new lockfile.LockfileExistsError(lockPath));
try {
await updateLock.lock(appId, { force: false }, async () => {
await expectLocks(false);
return Promise.resolve();
});
expect.fail('updateLock.lock should throw an UpdatesLockedError');
} catch (err) {
expect(err).to.be.instanceOf(UpdatesLockedError);
}
// Should only have attempted to take `updates.lock`
expect(lockSpy.args.flat()).to.deep.equal([
lockPath,
updateLock.LOCKFILE_UID,
]);
// Since the lock-taking failed, there should be no locks to dispose of
expect(lockfile.getLocksTaken()).to.have.length(0);
// Since nothing was locked, nothing should be unlocked
expect(unlockSpy.args).to.have.length(0);
});
it('should dispose of taken locks on any other errors', async () => {
// Set up fake filesystem for lockfiles
mockLockDir({ createLockfile: false });
try {
await updateLock.lock(
appId,
{ force: false },
// At this point 2 lockfiles have been written, so this is testing
// that even if the function rejects, lockfiles will be disposed of
async () => {
await expectLocks();
return Promise.reject(new Error('Test error'));
},
);
} catch {
/* noop */
// This just catches the 'Test error' above
}
// Both `updates.lock` and `resin-updates.lock` should have been taken
expect(lockSpy.args).to.have.length(2);
// Everything that was locked should have been unlocked
expect(lockSpy.args.map(([lock]) => [lock])).to.deep.equal(
unlockSpy.args,
);
});
it('locks all applications before resolving input function', async () => {
const appIds = [111, 222, 333];
// Set up fake filesystem for lockfiles
mockFs({
[path.join(
constants.rootMountPoint,
updateLock.lockPath(111),
serviceName,
)]: {},
[path.join(
constants.rootMountPoint,
updateLock.lockPath(222),
serviceName,
)]: {},
[path.join(
constants.rootMountPoint,
updateLock.lockPath(333),
serviceName,
)]: {},
});
await expect(
updateLock.lock(appIds, { force: false }, async () => {
// At this point the locks should be taken and not removed
// until this function has been resolved
// Both `updates.lock` and `resin-updates.lock` should have been taken
expect(lockSpy.args).to.have.length(6);
// Make sure that no locks have been removed also
expect(unlockSpy).to.not.be.called;
return Promise.resolve();
}),
).to.eventually.be.fulfilled;
// Everything that was locked should have been unlocked after function resolves
expect(lockSpy.args.map(([lock]) => [lock])).to.deep.equal(
unlockSpy.args,
);
});
it('resolves input function without locking when appId is null', async () => {
mockLockDir({ createLockfile: true });
await expect(
updateLock.lock(null as any, { force: false }, stub().resolves()),
).to.be.fulfilled;
// Since appId is null, updateLock.lock should just run the function, so
// there should be no interfacing with the lockfile module
expect(unlockSpy).to.not.have.been.called;
expect(lockSpy).to.not.have.been.called;
});
it('unlocks lockfile to resolve function if force option specified', async () => {
mockLockDir({ createLockfile: true });
await expect(updateLock.lock(1234567, { force: true }, stub().resolves()))
.to.be.fulfilled;
expect(unlockSpy).to.have.been.called;
expect(lockSpy).to.have.been.called;
});
it('unlocks lockfile to resolve function if lockOverride option specified', async () => {
configGetStub.resolves(true);
mockLockDir({ createLockfile: true });
await expect(
updateLock.lock(1234567, { force: false }, stub().resolves()),
).to.be.fulfilled;
expect(unlockSpy).to.have.been.called;
expect(lockSpy).to.have.been.called;
});
});
});

10
test/lib/dbus/Dockerfile Normal file
View File

@ -0,0 +1,10 @@
FROM ubuntu:20.04
# Install Systemd
RUN apt-get update && apt-get install -y --no-install-recommends \
dbus \
&& rm -rf /var/lib/apt/lists/*
COPY dbus.conf /etc/dbus-1/session.d/
ENTRYPOINT ["dbus-run-session", "sleep", "infinity"]

9
test/lib/dbus/dbus.conf Normal file
View File

@ -0,0 +1,9 @@
<!DOCTYPE busconfig PUBLIC "-//freedesktop//DTD D-Bus Bus Configuration 1.0//EN"
"http://www.freedesktop.org/standards/dbus/1.0/busconfig.dtd">
<busconfig>
<listen>tcp:host=localhost,bind=*,port=6667,family=ipv4</listen>
<listen>unix:tmpdir=/tmp</listen>
<auth>ANONYMOUS</auth>
<allow_anonymous/>
</busconfig>

39
test/lib/docker-helper.ts Normal file
View File

@ -0,0 +1,39 @@
import * as Docker from 'dockerode';
import * as tar from 'tar-stream';
import { strict as assert } from 'assert';
// Creates an image from scratch with just some labels
export async function createDockerImage(
name: string,
labels: [string, ...string[]],
docker = new Docker(),
): Promise<string> {
const pack = tar.pack(); // pack is a streams2 stream
pack.entry(
{ name: 'Dockerfile' },
['FROM scratch'].concat(labels.map((l) => `LABEL ${l}`)).join('\n'),
(err) => {
if (err) {
throw err;
}
pack.finalize();
},
);
// Create an empty image
const stream = await docker.buildImage(pack, { t: name });
return await new Promise((resolve, reject) => {
docker.modem.followProgress(stream, (err: any, res: any) => {
if (err) {
reject(err);
}
const ids = res
.map((evt: any) => evt?.aux?.ID ?? null)
.filter((id: string | null) => !!id);
assert(ids.length > 0, 'expected at least an image id after building');
resolve(ids[ids.length - 1]);
});
});
}

39
test/lib/mocha-hooks.ts Normal file
View File

@ -0,0 +1,39 @@
import * as sinon from 'sinon';
import log from '~/lib/supervisor-console';
/**
* Mocha runs these hooks before/after each test suite (beforeAll/afterAll)
* or before/after each test (beforeEach/afterEach), the same as with regular test hooks.
*
* Do here any setup that needs to affect all tests. When in doubt though, use regular hooks
* https://mochajs.org/#test-fixture-decision-tree-wizard-thing
*/
export const mochaHooks = {
beforeAll() {
// disable log output during testing
sinon.stub(log, 'debug');
sinon.stub(log, 'warn');
sinon.stub(log, 'info');
sinon.stub(log, 'success');
sinon.stub(log, 'event');
sinon.stub(log, 'error');
},
afterEach() {
(log.debug as sinon.SinonStub).reset();
(log.warn as sinon.SinonStub).reset();
(log.info as sinon.SinonStub).reset();
(log.success as sinon.SinonStub).reset();
(log.event as sinon.SinonStub).reset();
(log.error as sinon.SinonStub).reset();
},
afterAll() {
(log.debug as sinon.SinonStub).restore();
(log.warn as sinon.SinonStub).restore();
(log.info as sinon.SinonStub).restore();
(log.success as sinon.SinonStub).restore();
(log.event as sinon.SinonStub).restore();
(log.error as sinon.SinonStub).restore();
},
};

View File

@ -7,6 +7,7 @@ module.exports = {
'ts-node/register/transpile-only',
'tsconfig-paths/register',
'test/lib/chai.ts',
'test/lib/mocha-hooks.ts',
],
spec: ['test/unit/**/*.spec.ts'],
timeout: '30000',

View File

@ -1,5 +1,4 @@
import { expect } from 'chai';
import * as sinon from 'sinon';
import App from '~/src/compose/app';
import {
CompositionStep,
@ -10,7 +9,6 @@ import Network from '~/src/compose/network';
import Service from '~/src/compose/service';
import { ServiceComposeConfig } from '~/src/compose/types/service';
import Volume from '~/src/compose/volume';
import log from '~/lib/supervisor-console';
const defaultContext = {
localMode: false,
@ -117,19 +115,6 @@ function expectNoStep(action: CompositionStepAction, steps: CompositionStep[]) {
const defaultNetwork = Network.fromComposeObject('default', 1, 'appuuid', {});
describe('compose/app', () => {
before(() => {
// disable log output during testing
sinon.stub(log, 'debug');
sinon.stub(log, 'warn');
sinon.stub(log, 'info');
sinon.stub(log, 'event');
sinon.stub(log, 'success');
});
after(() => {
// Restore stubbed methods
sinon.restore();
});
describe('volume state behavior', () => {
it('should correctly infer a volume create step', () => {
// Setup current and target apps

View File

@ -3,11 +3,10 @@ import * as sinon from 'sinon';
import { Network } from '~/src/compose/network';
import { NetworkInspectInfo } from 'dockerode';
import { createNetwork, withMockerode } from '~/test-lib/mockerode';
import { log } from '~/lib/supervisor-console';
describe('compose/network', () => {
describe('compose/network: unit tests', () => {
describe('creating a network from a compose object', () => {
it('creates a default network configuration if no config is given', () => {
const network = Network.fromComposeObject(
@ -91,7 +90,7 @@ describe('compose/network', () => {
});
it('warns about IPAM configuration without both gateway and subnet', () => {
const logSpy = sinon.spy(log, 'warn');
const logStub = log.warn as sinon.SinonStub;
Network.fromComposeObject('default', 12345, 'deadbeef', {
ipam: {
@ -105,12 +104,12 @@ describe('compose/network', () => {
},
});
expect(logSpy).to.have.been.calledOnce;
expect(logSpy).to.have.been.calledWithMatch(
expect(logStub).to.have.been.calledOnce;
expect(logStub).to.have.been.calledWithMatch(
'Network IPAM config entries must have both a subnet and gateway',
);
logSpy.resetHistory();
logStub.resetHistory();
Network.fromComposeObject('default', 12345, 'deadbeef', {
ipam: {
@ -124,12 +123,10 @@ describe('compose/network', () => {
},
});
expect(logSpy).to.have.been.calledOnce;
expect(logSpy).to.have.been.calledWithMatch(
expect(logStub).to.have.been.calledOnce;
expect(logStub).to.have.been.calledWithMatch(
'Network IPAM config entries must have both a subnet and gateway',
);
logSpy.restore();
});
it('parses values from a compose object', () => {
@ -509,221 +506,4 @@ describe('compose/network', () => {
).to.be.false;
});
});
describe('creating networks', () => {
it('creates a new network on the engine with the given data', async () => {
await withMockerode(async (mockerode) => {
const network = Network.fromComposeObject(
'default',
12345,
'deadbeef',
{
ipam: {
driver: 'default',
config: [
{
subnet: '172.20.0.0/16',
ip_range: '172.20.10.0/24',
gateway: '172.20.0.1',
},
],
options: {},
},
},
);
// Create the network
await network.create();
// Check that the create function was called with proper arguments
expect(mockerode.createNetwork).to.have.been.calledOnceWith({
Name: 'deadbeef_default',
Driver: 'bridge',
CheckDuplicate: true,
IPAM: {
Driver: 'default',
Config: [
{
Subnet: '172.20.0.0/16',
IPRange: '172.20.10.0/24',
Gateway: '172.20.0.1',
},
],
Options: {},
},
EnableIPv6: false,
Internal: false,
Labels: {
'io.balena.supervised': 'true',
'io.balena.app-id': '12345',
},
Options: {},
});
});
});
it('throws the error if there is a problem while creating the network', async () => {
await withMockerode(async (mockerode) => {
const network = Network.fromComposeObject(
'default',
12345,
'deadbeef',
{
ipam: {
driver: 'default',
config: [
{
subnet: '172.20.0.0/16',
ip_range: '172.20.10.0/24',
gateway: '172.20.0.1',
},
],
options: {},
},
},
);
// Re-define the dockerode.createNetwork to throw
mockerode.createNetwork.rejects('Unknown engine error');
// Creating the network should fail
return expect(network.create()).to.be.rejected.then((error) =>
expect(error).to.have.property('name', 'Unknown engine error'),
);
});
});
});
describe('removing a network', () => {
it('removes the legacy network from the engine if it exists', async () => {
// Create a mock network to add to the mock engine
const dockerNetwork = createNetwork({
Id: 'aaaaaaa',
Name: '12345_default',
});
await withMockerode(
async (mockerode) => {
// Check that the engine has the network
expect(await mockerode.listNetworks()).to.have.lengthOf(1);
// Create a dummy network object
const network = Network.fromComposeObject(
'default',
12345,
'deadbeef',
{},
);
// Perform the operation
await network.remove();
// The removal step should delete the object from the engine data
expect(mockerode.removeNetwork).to.have.been.calledOnceWith(
'aaaaaaa',
);
},
{ networks: [dockerNetwork] },
);
});
it('removes the network from the engine if it exists', async () => {
// Create a mock network to add to the mock engine
const dockerNetwork = createNetwork({
Id: 'deadbeef',
Name: 'a173bdb734884b778f5cc3dffd18733e_default',
Labels: {
'io.balena.supervised': 'true',
'io.balena.app-id': '12345',
},
});
await withMockerode(
async (mockerode) => {
// Check that the engine has the network
expect(await mockerode.listNetworks()).to.have.lengthOf(1);
// Create a dummy network object
const network = Network.fromComposeObject(
'default',
12345,
'a173bdb734884b778f5cc3dffd18733e',
{},
);
// Perform the operation
await network.remove();
// The removal step should delete the object from the engine data
expect(mockerode.removeNetwork).to.have.been.calledOnceWith(
'deadbeef',
);
},
{ networks: [dockerNetwork] },
);
});
it('ignores the request if the given network does not exist on the engine', async () => {
// Create a mock network to add to the mock engine
const mockNetwork = createNetwork({
Id: 'aaaaaaaa',
Name: 'some_network',
});
await withMockerode(
async (mockerode) => {
// Check that the engine has the network
expect(await mockerode.listNetworks()).to.have.lengthOf(1);
// Create a dummy network object
const network = Network.fromComposeObject(
'default',
12345,
'deadbeef',
{},
);
// This should not fail
await expect(network.remove()).to.not.be.rejected;
// We expect the network state to remain constant
expect(await mockerode.listNetworks()).to.have.lengthOf(1);
},
{ networks: [mockNetwork] },
);
});
it('throws the error if there is a problem while removing the network', async () => {
// Create a mock network to add to the mock engine
const mockNetwork = createNetwork({
Id: 'aaaaaaaa',
Name: 'a173bdb734884b778f5cc3dffd18733e_default',
Labels: {
'io.balena.app-id': '12345',
},
});
await withMockerode(
async (mockerode) => {
// We can change the return value of the mockerode removeNetwork
// to have the remove operation fail
mockerode.removeNetwork.throws({
statusCode: 500,
message: 'Failed to remove the network',
});
// Create a dummy network object
const network = Network.fromComposeObject(
'default',
12345,
'a173bdb734884b778f5cc3dffd18733e',
{},
);
await expect(network.remove()).to.be.rejected;
},
{ networks: [mockNetwork] },
);
});
});
});

View File

@ -1,5 +1,4 @@
import * as _ from 'lodash';
import * as sinon from 'sinon';
import { expect } from 'chai';
import { createContainer } from '~/test-lib/mockerode';
@ -8,9 +7,6 @@ import Service from '~/src/compose/service';
import Volume from '~/src/compose/volume';
import * as ServiceT from '~/src/compose/types/service';
import * as constants from '~/lib/constants';
import * as apiKeys from '~/lib/api-keys';
import log from '~/lib/supervisor-console';
const configs = {
simple: {
@ -30,19 +26,7 @@ const configs = {
},
};
describe('compose/service', () => {
before(() => {
// disable log output during testing
sinon.stub(log, 'debug');
sinon.stub(log, 'warn');
sinon.stub(log, 'info');
sinon.stub(log, 'success');
});
after(() => {
sinon.restore();
});
describe('compose/service: unit tests', () => {
describe('Creating a service instance from a compose object', () => {
it('extends environment variables with additional OS info', async () => {
const extendEnvVarsOpts = {
@ -825,66 +809,6 @@ describe('compose/service', () => {
.that.deep.equals([gpuDeviceRequest]);
});
});
describe('io.balena.supervisor-api', () => {
it('sets BALENA_SUPERVISOR_HOST, BALENA_SUPERVISOR_PORT and BALENA_SUPERVISOR_ADDRESS env vars', async () => {
const service = await Service.fromComposeObject(
{
appId: 123456,
serviceId: 123456,
serviceName: 'foobar',
labels: {
'io.balena.features.supervisor-api': '1',
},
},
{
appName: 'test',
supervisorApiHost: 'supervisor',
listenPort: 48484,
} as any,
);
expect(
service.config.environment['BALENA_SUPERVISOR_HOST'],
).to.be.equal('supervisor');
expect(
service.config.environment['BALENA_SUPERVISOR_PORT'],
).to.be.equal('48484');
expect(
service.config.environment['BALENA_SUPERVISOR_ADDRESS'],
).to.be.equal('http://supervisor:48484');
});
it('sets BALENA_API_KEY env var to the scoped API key value', async () => {
// TODO: should we add an integration test that checks that the value used for the API key comes
// from the database
sinon.stub(apiKeys, 'generateScopedKey').resolves('this is a secret');
const service = await Service.fromComposeObject(
{
appId: 123456,
serviceId: 123456,
serviceName: 'foobar',
labels: {
'io.balena.features.supervisor-api': '1',
},
},
{
appName: 'test',
supervisorApiHost: 'supervisor',
listenPort: 48484,
} as any,
);
expect(
service.config.environment['BALENA_SUPERVISOR_API_KEY'],
).to.be.equal('this is a secret');
(apiKeys.generateScopedKey as sinon.SinonStub).restore();
});
});
});
describe('Creating service instances from docker configuration', () => {

View File

@ -1,12 +1,7 @@
import { expect } from 'chai';
import { SinonStub, stub } from 'sinon';
import Volume from '~/src/compose/volume';
import * as logTypes from '~/lib/log-types';
import * as logger from '~/src/logger';
import { createVolume, withMockerode } from '~/test-lib/mockerode';
describe('compose/volume', () => {
describe('compose/volume: unit tests', () => {
describe('creating a volume from a compose object', () => {
it('should use proper defaults when no compose configuration is provided', () => {
const volume = Volume.fromComposeObject(
@ -167,96 +162,6 @@ describe('compose/volume', () => {
});
});
describe('creating a docker volume from options', () => {
before(() => {
stub(logger, 'logSystemEvent');
});
afterEach(() => {
(logger.logSystemEvent as SinonStub).reset();
});
after(() => {
(logger.logSystemEvent as SinonStub).restore();
});
it('should use defaults to create the volume when no options are given', async () => {
await withMockerode(async (mockerode) => {
const volume = Volume.fromComposeObject(
'one_volume',
1032480,
'deadbeef',
);
await volume.create();
expect(mockerode.createVolume).to.have.been.calledOnceWith({
Name: '1032480_one_volume',
Driver: 'local',
Labels: {
'io.balena.supervised': 'true',
'io.balena.app-uuid': 'deadbeef',
},
DriverOpts: {},
});
});
});
it('should pass configuration options to the engine', async () => {
await withMockerode(async (mockerode) => {
const volume = Volume.fromComposeObject(
'one_volume',
1032480,
'deadbeef',
{
driver_opts: {
opt1: 'test',
},
labels: {
'my-label': 'test-label',
},
},
);
await volume.create();
expect(mockerode.createVolume).to.have.been.calledOnceWith({
Name: '1032480_one_volume',
Driver: 'local',
Labels: {
'my-label': 'test-label',
'io.balena.supervised': 'true',
'io.balena.app-uuid': 'deadbeef',
},
DriverOpts: {
opt1: 'test',
},
});
expect(logger.logSystemEvent).to.have.been.calledOnceWith(
logTypes.createVolume,
);
});
});
it('should log successful volume creation to the cloud', async () => {
await withMockerode(async (mockerode) => {
const volume = Volume.fromComposeObject(
'one_volume',
1032480,
'deadbeef',
);
await volume.create();
expect(mockerode.createVolume).to.have.been.calledOnce;
expect(logger.logSystemEvent).to.have.been.calledOnceWith(
logTypes.createVolume,
);
});
});
});
describe('comparing volume configuration', () => {
it('should ignore name and supervisor labels in the comparison', () => {
expect(
@ -409,126 +314,4 @@ describe('compose/volume', () => {
).to.be.true;
});
});
describe('removing volumes', () => {
before(() => {
stub(logger, 'logSystemEvent');
});
afterEach(() => {
(logger.logSystemEvent as SinonStub).reset();
});
after(() => {
(logger.logSystemEvent as SinonStub).restore();
});
it('should remove the volume from the engine if it exists', async () => {
const dockerVolume = createVolume({
Name: '1234_aaa',
});
await withMockerode(
async (mockerode) => {
const volume = Volume.fromComposeObject('aaa', 1234, 'deadbeef');
// Check engine state before (this is really to test that mockerode is doing its job)
expect((await mockerode.listVolumes()).Volumes).to.have.lengthOf(1);
expect(await mockerode.getVolume('1234_aaa').inspect()).to.deep.equal(
dockerVolume.inspectInfo,
);
// Remove the volume
await volume.remove();
// Check that the remove method was called
expect(mockerode.removeVolume).to.have.been.calledOnceWith(
'1234_aaa',
);
},
{ volumes: [dockerVolume] },
);
});
it('should report the volume removal as a system event', async () => {
const dockerVolume = createVolume({
Name: '1234_aaa',
});
await withMockerode(
async (mockerode) => {
const volume = Volume.fromComposeObject('aaa', 1234, 'deadbeef');
// Check engine state before
expect((await mockerode.listVolumes()).Volumes).to.have.lengthOf(1);
// Remove the volume
await volume.remove();
// Check that the remove method was called
expect(mockerode.removeVolume).to.have.been.calledOnceWith(
'1234_aaa',
);
// Check that log entry was generated
expect(logger.logSystemEvent).to.have.been.calledOnceWith(
logTypes.removeVolume,
);
},
{ volumes: [dockerVolume] },
);
});
it('should report an error if the volume does not exist', async () => {
const dockerVolume = createVolume({
Name: '4567_bbb',
});
await withMockerode(
async (mockerode) => {
const volume = Volume.fromComposeObject('aaa', 1234, 'deadbeef');
// Check engine state before
expect((await mockerode.listVolumes()).Volumes).to.have.lengthOf(1);
// Remove the volume, this should not throw
await expect(volume.remove()).to.not.be.rejected;
// Check that the remove method was called
expect(mockerode.removeVolume).to.not.have.been.called;
// Check that log entry was generated
expect(logger.logSystemEvent).to.have.been.calledWith(
logTypes.removeVolumeError,
);
},
{ volumes: [dockerVolume] },
);
});
it('should report an error if a problem happens while removing the volume', async () => {
const dockerVolume = createVolume({
Name: '1234_aaa',
});
await withMockerode(
async (mockerode) => {
const volume = Volume.fromComposeObject('aaa', 1234, 'deadbeef');
// Stub the mockerode method to fail
mockerode.removeVolume.rejects('Something bad happened');
// Check engine state before
expect((await mockerode.listVolumes()).Volumes).to.have.lengthOf(1);
// Remove the volume, this should not throw
await expect(volume.remove()).to.not.be.rejected;
// Check that log entry was generated
expect(logger.logSystemEvent).to.have.been.calledWith(
logTypes.removeVolumeError,
);
},
{ volumes: [dockerVolume] },
);
});
});
});

View File

@ -90,13 +90,7 @@ describe('lib/fs-utils', () => {
});
});
/**
* TODO: Un-skip this test after all fs tests that write to a test file system use
* mock-fs instead. Hypothesis: exists isn't handling the relative directory it's
* being passed well. When all unit tests use mock-fs, we can set process.env.ROOT_MOUNTPOINT
* to `/mnt/root` so we can have an absolute path in all these tests.
*/
describe.skip('exists', () => {
describe('exists', () => {
before(mockFs);
after(unmockFs);

View File

@ -0,0 +1,17 @@
import { expect } from 'chai';
import * as path from 'path';
import * as updateLock from '~/lib/update-lock';
describe('lib/update-lock: unit tests', () => {
describe('lockPath', () => {
it('should return path prefix of service lockfiles on host', () => {
expect(updateLock.lockPath(123)).to.equal(
path.join(updateLock.BASE_LOCK_DIR, '123'),
);
expect(updateLock.lockPath(123, 'main')).to.equal(
path.join(updateLock.BASE_LOCK_DIR, '123', 'main'),
);
});
});
});