mirror of
https://github.com/balena-os/balena-supervisor.git
synced 2025-01-29 15:44:13 +00:00
Move lib/fs-utils tests to testfs
This removes mock-fs as a dependency Change-type: patch Signed-off-by: Christina Ying Wang <christina@balena.io>
This commit is contained in:
parent
1e9c2dc437
commit
9c968b8d06
17
package-lock.json
generated
17
package-lock.json
generated
@ -30,7 +30,6 @@
|
|||||||
"@types/lodash": "^4.14.186",
|
"@types/lodash": "^4.14.186",
|
||||||
"@types/memoizee": "^0.4.8",
|
"@types/memoizee": "^0.4.8",
|
||||||
"@types/mocha": "^10.0.6",
|
"@types/mocha": "^10.0.6",
|
||||||
"@types/mock-fs": "^4.13.1",
|
|
||||||
"@types/morgan": "^1.9.3",
|
"@types/morgan": "^1.9.3",
|
||||||
"@types/node": "^20.11.24",
|
"@types/node": "^20.11.24",
|
||||||
"@types/request": "^2.48.8",
|
"@types/request": "^2.48.8",
|
||||||
@ -78,7 +77,6 @@
|
|||||||
"memoizee": "^0.4.14",
|
"memoizee": "^0.4.14",
|
||||||
"mocha": "^10.4.0",
|
"mocha": "^10.4.0",
|
||||||
"mocha-pod": "^2.0.5",
|
"mocha-pod": "^2.0.5",
|
||||||
"mock-fs": "^4.14.0",
|
|
||||||
"morgan": "^1.10.0",
|
"morgan": "^1.10.0",
|
||||||
"network-checker": "^0.1.1",
|
"network-checker": "^0.1.1",
|
||||||
"nock": "^13.1.2",
|
"nock": "^13.1.2",
|
||||||
@ -1644,15 +1642,6 @@
|
|||||||
"integrity": "sha512-dJvrYWxP/UcXm36Qn36fxhUKu8A/xMRXVT2cliFF1Z7UA9liG5Psj3ezNSZw+5puH2czDXRLcXQxf8JbJt0ejg==",
|
"integrity": "sha512-dJvrYWxP/UcXm36Qn36fxhUKu8A/xMRXVT2cliFF1Z7UA9liG5Psj3ezNSZw+5puH2czDXRLcXQxf8JbJt0ejg==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/@types/mock-fs": {
|
|
||||||
"version": "4.13.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@types/mock-fs/-/mock-fs-4.13.1.tgz",
|
|
||||||
"integrity": "sha512-m6nFAJ3lBSnqbvDZioawRvpLXSaPyn52Srf7OfzjubYbYX8MTUdIgDxQl0wEapm4m/pNYSd9TXocpQ0TvZFlYA==",
|
|
||||||
"dev": true,
|
|
||||||
"dependencies": {
|
|
||||||
"@types/node": "*"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@types/morgan": {
|
"node_modules/@types/morgan": {
|
||||||
"version": "1.9.3",
|
"version": "1.9.3",
|
||||||
"resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.3.tgz",
|
"resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.3.tgz",
|
||||||
@ -10234,12 +10223,6 @@
|
|||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/mock-fs": {
|
|
||||||
"version": "4.14.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-4.14.0.tgz",
|
|
||||||
"integrity": "sha512-qYvlv/exQ4+svI3UOvPUpLDF0OMX5euvUH0Ny4N5QyRyhNdgAgUrVH3iUINSzEPLvx0kbo/Bp28GJKIqvE7URw==",
|
|
||||||
"dev": true
|
|
||||||
},
|
|
||||||
"node_modules/morgan": {
|
"node_modules/morgan": {
|
||||||
"version": "1.10.0",
|
"version": "1.10.0",
|
||||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz",
|
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz",
|
||||||
|
@ -56,7 +56,6 @@
|
|||||||
"@types/lodash": "^4.14.186",
|
"@types/lodash": "^4.14.186",
|
||||||
"@types/memoizee": "^0.4.8",
|
"@types/memoizee": "^0.4.8",
|
||||||
"@types/mocha": "^10.0.6",
|
"@types/mocha": "^10.0.6",
|
||||||
"@types/mock-fs": "^4.13.1",
|
|
||||||
"@types/morgan": "^1.9.3",
|
"@types/morgan": "^1.9.3",
|
||||||
"@types/node": "^20.11.24",
|
"@types/node": "^20.11.24",
|
||||||
"@types/request": "^2.48.8",
|
"@types/request": "^2.48.8",
|
||||||
@ -104,7 +103,6 @@
|
|||||||
"memoizee": "^0.4.14",
|
"memoizee": "^0.4.14",
|
||||||
"mocha": "^10.4.0",
|
"mocha": "^10.4.0",
|
||||||
"mocha-pod": "^2.0.5",
|
"mocha-pod": "^2.0.5",
|
||||||
"mock-fs": "^4.14.0",
|
|
||||||
"morgan": "^1.10.0",
|
"morgan": "^1.10.0",
|
||||||
"network-checker": "^0.1.1",
|
"network-checker": "^0.1.1",
|
||||||
"nock": "^13.1.2",
|
"nock": "^13.1.2",
|
||||||
|
249
test/integration/lib/fs-utils.spec.ts
Normal file
249
test/integration/lib/fs-utils.spec.ts
Normal file
@ -0,0 +1,249 @@
|
|||||||
|
import { expect } from 'chai';
|
||||||
|
import * as path from 'path';
|
||||||
|
import { promises as fs } from 'fs';
|
||||||
|
import type { TestFs } from 'mocha-pod';
|
||||||
|
import { testfs } from 'mocha-pod';
|
||||||
|
import { watch } from 'chokidar';
|
||||||
|
|
||||||
|
import * as fsUtils from '~/lib/fs-utils';
|
||||||
|
|
||||||
|
describe('lib/fs-utils', () => {
|
||||||
|
const file1 = 'file.1';
|
||||||
|
const filePath1 = '/test/file.1';
|
||||||
|
const file2 = 'file.2';
|
||||||
|
const filePath2 = '/test/file.2';
|
||||||
|
|
||||||
|
describe('writeAndSyncFile', () => {
|
||||||
|
let tFs: TestFs.Enabled;
|
||||||
|
beforeEach(async () => {
|
||||||
|
tFs = await testfs(
|
||||||
|
{
|
||||||
|
[filePath1]: 'foo',
|
||||||
|
},
|
||||||
|
{ cleanup: ['/test/*'] },
|
||||||
|
).enable();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await tFs.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should write and sync string data', async () => {
|
||||||
|
await fsUtils.writeAndSyncFile(filePath1, 'foo bar');
|
||||||
|
expect(await fs.readFile(filePath1, 'utf-8')).to.equal('foo bar');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should write and sync buffers', async () => {
|
||||||
|
await fsUtils.writeAndSyncFile(filePath1, Buffer.from('bar foo'));
|
||||||
|
expect(await fs.readFile(filePath1, 'utf-8')).to.equal('bar foo');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('writeFileAtomic', () => {
|
||||||
|
let tFs: TestFs.Enabled;
|
||||||
|
beforeEach(async () => {
|
||||||
|
tFs = await testfs(
|
||||||
|
{
|
||||||
|
[filePath2]: 'foo',
|
||||||
|
},
|
||||||
|
{ cleanup: ['/test/*'] },
|
||||||
|
).enable();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await tFs.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should write string data atomically', async () => {
|
||||||
|
// Watch for added files, there should be a [file].new due to atomic rename
|
||||||
|
const addedFiles: string[] = [];
|
||||||
|
const watcher = watch('/test').on('add', (p) => addedFiles.push(p));
|
||||||
|
|
||||||
|
await fsUtils.writeFileAtomic(filePath2, 'foo baz');
|
||||||
|
expect(await fs.readFile(filePath2, 'utf-8')).to.equal('foo baz');
|
||||||
|
|
||||||
|
expect(addedFiles).to.have.deep.include.members([
|
||||||
|
filePath2,
|
||||||
|
`${filePath2}.new`,
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Clean up watcher
|
||||||
|
await watcher.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should write buffer data atomically', async () => {
|
||||||
|
// Watch for added files, there should be a [file].new due to atomic rename
|
||||||
|
const addedFiles: string[] = [];
|
||||||
|
const watcher = watch('/test').on('add', (p) => addedFiles.push(p));
|
||||||
|
|
||||||
|
await fsUtils.writeFileAtomic(filePath2, Buffer.from('baz foo'));
|
||||||
|
expect(await fs.readFile(filePath2, 'utf-8')).to.equal('baz foo');
|
||||||
|
|
||||||
|
expect(addedFiles).to.have.deep.include.members([
|
||||||
|
filePath2,
|
||||||
|
`${filePath2}.new`,
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Clean up watcher
|
||||||
|
await watcher.close();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('safeRename', () => {
|
||||||
|
let tFs: TestFs.Enabled;
|
||||||
|
beforeEach(async () => {
|
||||||
|
tFs = await testfs(
|
||||||
|
{
|
||||||
|
[filePath1]: 'foo',
|
||||||
|
[filePath2]: 'bar',
|
||||||
|
},
|
||||||
|
{ cleanup: ['/test/*'] },
|
||||||
|
).enable();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await tFs.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should rename a file', async () => {
|
||||||
|
await fsUtils.safeRename(filePath1, `${filePath1}.rename`);
|
||||||
|
const dirContents = await fs.readdir('/test');
|
||||||
|
expect(dirContents).to.have.length(2);
|
||||||
|
expect(dirContents).to.deep.include.members([`${file1}.rename`, file2]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace an existing file', async () => {
|
||||||
|
await fsUtils.safeRename(filePath1, filePath2);
|
||||||
|
const dirContents = await fs.readdir('/test');
|
||||||
|
expect(dirContents).to.have.length(1);
|
||||||
|
expect(dirContents).to.include(file2);
|
||||||
|
expect(dirContents).to.not.include(file1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('exists', () => {
|
||||||
|
let tFs: TestFs.Enabled;
|
||||||
|
beforeEach(async () => {
|
||||||
|
tFs = await testfs(
|
||||||
|
{
|
||||||
|
[filePath1]: 'foo',
|
||||||
|
},
|
||||||
|
{ cleanup: ['/test/*'] },
|
||||||
|
).enable();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await tFs.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return whether a file exists', async () => {
|
||||||
|
expect(await fsUtils.exists(filePath1)).to.be.true;
|
||||||
|
await fs.unlink(filePath1).catch(() => {
|
||||||
|
/* noop */
|
||||||
|
});
|
||||||
|
expect(await fsUtils.exists(filePath1)).to.be.false;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('mkdirp', () => {
|
||||||
|
let tFs: TestFs.Enabled;
|
||||||
|
beforeEach(async () => {
|
||||||
|
tFs = await testfs(
|
||||||
|
{
|
||||||
|
'/test': {},
|
||||||
|
},
|
||||||
|
{ cleanup: ['/test/*'] },
|
||||||
|
).enable();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await tFs.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should recursively create directories', async () => {
|
||||||
|
const directory = path.join('/test', 'test1', 'test2', 'test3');
|
||||||
|
await fsUtils.mkdirp(directory);
|
||||||
|
expect(() => fs.readdir(directory)).to.not.throw();
|
||||||
|
// TODO: testfs cleanup doesn't seem to support directories
|
||||||
|
await fs.rm('/test/test1', { recursive: true });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('unlinkAll', () => {
|
||||||
|
let tFs: TestFs.Enabled;
|
||||||
|
beforeEach(async () => {
|
||||||
|
tFs = await testfs(
|
||||||
|
{
|
||||||
|
[filePath1]: 'foo',
|
||||||
|
[filePath2]: 'bar',
|
||||||
|
},
|
||||||
|
{ cleanup: ['/test/*'] },
|
||||||
|
).enable();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await tFs.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should unlink a single file', async () => {
|
||||||
|
await fsUtils.unlinkAll(filePath1);
|
||||||
|
expect(await fs.readdir('/test')).to.not.include(file1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should unlink multiple files', async () => {
|
||||||
|
await fsUtils.unlinkAll(filePath1, filePath2);
|
||||||
|
expect(await fs.readdir('/test')).to.have.length(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('touch', () => {
|
||||||
|
let tFs: TestFs.Enabled;
|
||||||
|
beforeEach(async () => {
|
||||||
|
tFs = await testfs(
|
||||||
|
{
|
||||||
|
[filePath1]: testfs.file({
|
||||||
|
contents: '',
|
||||||
|
mtime: new Date('2024-01-01T00:00:00'),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
{ cleanup: ['/test/*'] },
|
||||||
|
).enable();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await tFs.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates the file if it does not exist', async () => {
|
||||||
|
await fsUtils.touch('/test/somefile');
|
||||||
|
expect(await fs.readdir('/test')).to.include('somefile');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('updates the file mtime if file already exists', async () => {
|
||||||
|
const statsBefore = await fs.stat(filePath1);
|
||||||
|
await fsUtils.touch(filePath1);
|
||||||
|
const statsAfter = await fs.stat(filePath1);
|
||||||
|
|
||||||
|
// Mtime should be different
|
||||||
|
expect(statsAfter.mtime.getTime()).to.not.equal(
|
||||||
|
statsBefore.mtime.getTime(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('allows setting a custom time for existing files', async () => {
|
||||||
|
const customTime = new Date('1981-11-24T12:00:00');
|
||||||
|
await fsUtils.touch(filePath1, customTime);
|
||||||
|
const statsAfter = await fs.stat(filePath1);
|
||||||
|
|
||||||
|
expect(statsAfter.mtime.getTime()).to.be.equal(customTime.getTime());
|
||||||
|
});
|
||||||
|
|
||||||
|
it('allows setting a custom time for newly created files', async () => {
|
||||||
|
const customTime = new Date('1981-11-24T12:00:00');
|
||||||
|
await fsUtils.touch('somefile', customTime);
|
||||||
|
const statsAfter = await fs.stat('somefile');
|
||||||
|
|
||||||
|
expect(statsAfter.mtime.getTime()).to.be.equal(customTime.getTime());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -1,169 +0,0 @@
|
|||||||
import { expect } from 'chai';
|
|
||||||
import * as path from 'path';
|
|
||||||
import { promises as fs } from 'fs';
|
|
||||||
import type { SinonSpy } from 'sinon';
|
|
||||||
import { spy } from 'sinon';
|
|
||||||
import mock = require('mock-fs');
|
|
||||||
|
|
||||||
import * as fsUtils from '~/lib/fs-utils';
|
|
||||||
import { pathOnRoot } from '~/lib/host-utils';
|
|
||||||
|
|
||||||
describe('lib/fs-utils', () => {
|
|
||||||
const testFileName1 = 'file.1';
|
|
||||||
const testFileName2 = 'file.2';
|
|
||||||
const testFile1 = pathOnRoot(testFileName1);
|
|
||||||
const testFile2 = pathOnRoot(testFileName2);
|
|
||||||
|
|
||||||
const mockFs = () => {
|
|
||||||
mock({
|
|
||||||
[testFile1]: mock.file({
|
|
||||||
content: 'foo',
|
|
||||||
mtime: new Date('2022-01-04T00:00:00'),
|
|
||||||
}),
|
|
||||||
[testFile2]: mock.file({
|
|
||||||
content: 'bar',
|
|
||||||
mtime: new Date('2022-01-04T00:00:00'),
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const unmockFs = () => {
|
|
||||||
mock.restore();
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('writeAndSyncFile', () => {
|
|
||||||
before(mockFs);
|
|
||||||
after(unmockFs);
|
|
||||||
|
|
||||||
it('should write and sync string data', async () => {
|
|
||||||
await fsUtils.writeAndSyncFile(testFile1, 'foo bar');
|
|
||||||
expect(await fs.readFile(testFile1, 'utf-8')).to.equal('foo bar');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should write and sync buffers', async () => {
|
|
||||||
await fsUtils.writeAndSyncFile(testFile1, Buffer.from('bar foo'));
|
|
||||||
expect(await fs.readFile(testFile1, 'utf-8')).to.equal('bar foo');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('writeFileAtomic', () => {
|
|
||||||
before(() => {
|
|
||||||
spy(fs, 'rename');
|
|
||||||
mockFs();
|
|
||||||
});
|
|
||||||
|
|
||||||
after(() => {
|
|
||||||
(fs.rename as SinonSpy).restore();
|
|
||||||
unmockFs();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should write string data atomically', async () => {
|
|
||||||
await fsUtils.writeFileAtomic(testFile1, 'foo baz');
|
|
||||||
expect(await fs.readFile(testFile1, 'utf-8')).to.equal('foo baz');
|
|
||||||
expect(fs.rename).to.have.been.calledWith(`${testFile1}.new`, testFile1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should write buffer data atomically', async () => {
|
|
||||||
await fsUtils.writeFileAtomic(testFile1, 'baz foo');
|
|
||||||
expect(await fs.readFile(testFile1, 'utf-8')).to.equal('baz foo');
|
|
||||||
expect(fs.rename).to.have.been.calledWith(`${testFile1}.new`, testFile1);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('safeRename', () => {
|
|
||||||
beforeEach(mockFs);
|
|
||||||
afterEach(unmockFs);
|
|
||||||
|
|
||||||
it('should rename a file', async () => {
|
|
||||||
await fsUtils.safeRename(testFile1, testFile1 + 'rename');
|
|
||||||
const dirContents = await fs.readdir(pathOnRoot());
|
|
||||||
expect(dirContents).to.have.length(2);
|
|
||||||
expect(dirContents).to.not.include(testFileName1);
|
|
||||||
expect(dirContents).to.include(testFileName1 + 'rename');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should replace an existing file', async () => {
|
|
||||||
await fsUtils.safeRename(testFile1, testFile2);
|
|
||||||
const dirContents = await fs.readdir(pathOnRoot());
|
|
||||||
expect(dirContents).to.have.length(1);
|
|
||||||
expect(dirContents).to.include(testFileName2);
|
|
||||||
expect(dirContents).to.not.include(testFileName1);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('exists', () => {
|
|
||||||
before(mockFs);
|
|
||||||
after(unmockFs);
|
|
||||||
|
|
||||||
it('should return whether a file exists', async () => {
|
|
||||||
expect(await fsUtils.exists(testFile1)).to.be.true;
|
|
||||||
await fs.unlink(testFile1).catch(() => {
|
|
||||||
/* noop */
|
|
||||||
});
|
|
||||||
expect(await fsUtils.exists(testFile1)).to.be.false;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('mkdirp', () => {
|
|
||||||
before(mockFs);
|
|
||||||
after(unmockFs);
|
|
||||||
|
|
||||||
it('should recursively create directories', async () => {
|
|
||||||
const directory = path.join(pathOnRoot('test1'), 'test2', 'test3');
|
|
||||||
await fsUtils.mkdirp(directory);
|
|
||||||
expect(() => fs.readdir(directory)).to.not.throw();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('unlinkAll', () => {
|
|
||||||
beforeEach(mockFs);
|
|
||||||
afterEach(unmockFs);
|
|
||||||
|
|
||||||
it('should unlink a single file', async () => {
|
|
||||||
await fsUtils.unlinkAll(testFile1);
|
|
||||||
expect(await fs.readdir(pathOnRoot())).to.not.include(testFileName1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should unlink multiple files', async () => {
|
|
||||||
await fsUtils.unlinkAll(testFile1, testFile2);
|
|
||||||
expect(await fs.readdir(pathOnRoot())).to.have.length(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('touch', () => {
|
|
||||||
beforeEach(mockFs);
|
|
||||||
afterEach(unmockFs);
|
|
||||||
|
|
||||||
it('creates the file if it does not exist', async () => {
|
|
||||||
await fsUtils.touch('somefile');
|
|
||||||
expect(await fsUtils.exists('somefile')).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates the file mtime if file already exists', async () => {
|
|
||||||
const statsBefore = await fs.stat(testFile1);
|
|
||||||
await fsUtils.touch(testFile1);
|
|
||||||
const statsAfter = await fs.stat(testFile1);
|
|
||||||
|
|
||||||
// Mtime should be different
|
|
||||||
expect(statsAfter.mtime.getTime()).to.not.equal(
|
|
||||||
statsBefore.mtime.getTime(),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('allows setting a custom time for existing files', async () => {
|
|
||||||
const customTime = new Date('1981-11-24T12:00:00');
|
|
||||||
await fsUtils.touch(testFile1, customTime);
|
|
||||||
const statsAfter = await fs.stat(testFile1);
|
|
||||||
|
|
||||||
expect(statsAfter.mtime.getTime()).to.be.equal(customTime.getTime());
|
|
||||||
});
|
|
||||||
|
|
||||||
it('allows setting a custom time for newly created files', async () => {
|
|
||||||
const customTime = new Date('1981-11-24T12:00:00');
|
|
||||||
await fsUtils.touch('somefile', customTime);
|
|
||||||
const statsAfter = await fs.stat('somefile');
|
|
||||||
|
|
||||||
expect(statsAfter.mtime.getTime()).to.be.equal(customTime.getTime());
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
Loading…
x
Reference in New Issue
Block a user