Skip to content

Commit caec819

Browse files
committed
Tests on worker thread terminate while doing a checkout
Checkout leverages libgit2 threads and when applying filters it runs JS callbacks. These tests check that when running checkout on a worker thread and this is terminated, it exists gracefully without memory leaks.
1 parent 7b248d9 commit caec819

File tree

6 files changed

+297
-4
lines changed

6 files changed

+297
-4
lines changed

test/tests/filter.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -335,7 +335,7 @@ describe("Filter", function() {
335335

336336
it("can run sync callback on checkout without deadlocking", function() { // jshint ignore:line
337337
var test = this;
338-
var syncCallbackResult = true;
338+
var syncCallbackResult = 1;
339339

340340
return Registry.register(filterName, {
341341
apply: function() {
@@ -365,9 +365,10 @@ describe("Filter", function() {
365365
});
366366
});
367367

368+
// Temporary workaround for LFS checkout. Test skipped.
369+
// To activate when reverting workaround.
368370
// 'Checkout.head' and 'Submodule.lookup' do work with the repo locked.
369371
// They should work together without deadlocking.
370-
// Temporary workaround for LFS checkout. Test skipped to be reverted.
371372
it.skip("can run async callback on checkout without deadlocking", function() { // jshint ignore:line
372373
var test = this;
373374
var submoduleNameIn = "vendor/libgit2";

test/tests/worker.js

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@ const path = require("path");
22
const assert = require("assert");
33
const fse = require("fs-extra");
44
const local = path.join.bind(path, __dirname);
5+
const NodeGit = require("../../");
56

7+
let filterName = "psuedo_filter";
68
let Worker;
79

810
try {
@@ -24,6 +26,15 @@ if (Worker) {
2426
});
2527
});
2628

29+
afterEach(function() {
30+
return NodeGit.FilterRegistry.unregister(filterName)
31+
.catch(function(error) {
32+
if (error === NodeGit.Error.CODE.ERROR) {
33+
throw new Error("Cannot unregister filter");
34+
}
35+
});
36+
});
37+
2738
it("can perform basic functionality via worker thread", function(done) {
2839
const workerPath = local("../utils/worker.js");
2940
const worker = new Worker(workerPath, {
@@ -127,5 +138,78 @@ if (Worker) {
127138
}
128139
});
129140
});
141+
142+
// This tests that while calling filter's apply callbacks and the worker
143+
// is terminated, node exits gracefully. To make sure we terminate the
144+
// worker during a checkout, continuous checkouts will be running in a loop.
145+
it("can kill worker thread while doing a checkout and exit gracefully", function(done) { // jshint ignore:line
146+
const workerPath = local("../utils/worker_checkout.js");
147+
const worker = new Worker(workerPath, {
148+
workerData: {
149+
clonePath,
150+
url: "https://github.com/nodegit/test.git"
151+
}
152+
});
153+
worker.on("message", (message) => {
154+
switch (message) {
155+
case "init":
156+
// give enough time for the worker to start applying the filter
157+
// during continuous checkouts
158+
setTimeout(() => { worker.terminate(); }, 10000);
159+
break;
160+
case "success":
161+
assert.fail();
162+
break;
163+
case "failure":
164+
assert.fail();
165+
break;
166+
}
167+
});
168+
worker.on("error", () => assert.fail());
169+
worker.on("exit", (code) => {
170+
if (code == 1) {
171+
done();
172+
} else {
173+
assert.fail();
174+
}
175+
});
176+
});
177+
178+
// This tests that after calling filter's apply callbacks and the worker
179+
// is terminated, there will be no memory leaks.
180+
it("can track objects to free on context shutdown after multiple checkouts", function(done) { // jshint ignore:line
181+
let testOk;
182+
const workerPath = local("../utils/worker_context_aware_checkout.js");
183+
const worker = new Worker(workerPath, {
184+
workerData: {
185+
clonePath,
186+
url: "https://github.com/nodegit/test.git"
187+
}
188+
});
189+
worker.on("message", (message) => {
190+
switch (message) {
191+
case "numbersMatch":
192+
testOk = true;
193+
worker.terminate();
194+
break;
195+
case "numbersDoNotMatch":
196+
testOk = false;
197+
worker.terminate();
198+
break;
199+
case "failure":
200+
assert.fail();
201+
break;
202+
}
203+
});
204+
worker.on("error", () => assert.fail());
205+
worker.on("exit", (code) => {
206+
if (code === 1 && testOk === true) {
207+
done();
208+
}
209+
else {
210+
assert.fail();
211+
}
212+
});
213+
});
130214
});
131215
}

test/utils/loop_checkout.js

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
const fse = require("fs-extra");
2+
const path = require("path");
3+
const NodeGit = require("../../");
4+
5+
const getDirExtFiles = function(dir, ext, done) {
6+
let results = [];
7+
fse.readdir(dir, function(err, list) {
8+
if (err) {
9+
return done(err);
10+
}
11+
let i = 0;
12+
(function next() {
13+
let file = list[i++];
14+
if (!file) {
15+
return done(null, results);
16+
}
17+
file = path.resolve(dir, file);
18+
fse.stat(file, function(err, stat) {
19+
if (stat && stat.isDirectory()) {
20+
getDirExtFiles(file, ext, function(err, res) {
21+
results = results.concat(res);
22+
next();
23+
});
24+
} else {
25+
if (path.extname(file) == ".".concat(ext)) {
26+
results.push(file);
27+
}
28+
next();
29+
}
30+
});
31+
})();
32+
});
33+
};
34+
35+
const getDirFilesToChange = function(dir, ext) {
36+
return new Promise(function(resolve, reject) {
37+
getDirExtFiles(dir, ext, function(err, results) {
38+
if (err) {
39+
reject(err);
40+
}
41+
resolve(results);
42+
});
43+
});
44+
};
45+
46+
// Changes the content of files with extension 'ext'
47+
// in directory 'dir' recursively.
48+
// Returns relative file paths
49+
const changeDirExtFiles = function (dir, ext, newText) {
50+
let filesChanged = [];
51+
return getDirFilesToChange(dir, ext)
52+
.then(function(filesWithExt) {
53+
filesWithExt.forEach(function(file) {
54+
fse.writeFile(
55+
file,
56+
newText,
57+
{ encoding: "utf-8" }
58+
);
59+
filesChanged.push(path.relative(dir, file));
60+
});
61+
return filesChanged;
62+
})
63+
.catch(function(err) {
64+
throw new Error("Error getting files with extension .".concat(ext));
65+
});
66+
};
67+
68+
// 'times' to limit the number of iterations in the loop.
69+
// 0 means no limit.
70+
const loopingCheckoutHead = async function(repoPath, repo, times) {
71+
const text0 = "Text0: changing content to trigger checkout";
72+
const text1 = "Text1: changing content to trigger checkout";
73+
74+
let iteration = 0;
75+
for (let i = 0; true; i = ++i%2) {
76+
const newText = (i == 0) ? text0 : text1;
77+
const jsRelativeFilePahts = await changeDirExtFiles(repoPath, "js", newText); // jshint ignore:line
78+
let checkoutOpts = {
79+
checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE,
80+
paths: jsRelativeFilePahts
81+
};
82+
await NodeGit.Checkout.head(repo, checkoutOpts);
83+
84+
if (++iteration == times) {
85+
break;
86+
}
87+
}
88+
return;
89+
};
90+
91+
module.exports = loopingCheckoutHead;

test/utils/worker_checkout.js

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
const {
2+
isMainThread,
3+
parentPort,
4+
workerData
5+
} = require("worker_threads");
6+
const assert = require("assert");
7+
const NodeGit = require("../../");
8+
const loopingCheckoutHead = require("./loop_checkout.js");
9+
10+
if (isMainThread) {
11+
throw new Error("Must be run via worker thread");
12+
}
13+
14+
parentPort.postMessage("init");
15+
16+
const { clonePath, url } = workerData;
17+
const cloneOpts = {
18+
fetchOpts: {
19+
callbacks: {
20+
certificateCheck: () => 0
21+
}
22+
}
23+
};
24+
25+
let repository;
26+
let filterName = "psuedo_filter";
27+
let applyCallbackResult = 1;
28+
29+
return NodeGit.Clone(url, clonePath, cloneOpts)
30+
.then(function(_repository) {
31+
repository = _repository;
32+
assert.ok(repository instanceof NodeGit.Repository);
33+
return NodeGit.FilterRegistry.register(filterName, {
34+
apply: function() {
35+
applyCallbackResult = 0;
36+
},
37+
check: function() {
38+
return NodeGit.Error.CODE.OK;
39+
}
40+
}, 0);
41+
})
42+
.then(function(result) {
43+
assert.strictEqual(result, NodeGit.Error.CODE.OK);
44+
return loopingCheckoutHead(clonePath, repository, 0);
45+
}).then(function() {
46+
assert.strictEqual(applyCallbackResult, 0);
47+
parentPort.postMessage("success");
48+
})
49+
.catch((err) => {
50+
parentPort.postMessage("failure");
51+
});

test/utils/worker_context_aware.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ return NodeGit.Clone(url, clonePath, opts)
4141
garbageCollect();
4242

4343
// Count total of objects left after being created/destroyed
44-
const selfFreeingCount =
44+
const freeingCount =
4545
NodeGit.Cert.getNonSelfFreeingConstructedCount() +
4646
NodeGit.Repository.getSelfFreeingInstanceCount() +
4747
NodeGit.Commit.getSelfFreeingInstanceCount() +
@@ -50,7 +50,7 @@ return NodeGit.Clone(url, clonePath, opts)
5050

5151
const numberOfTrackedObjects = NodeGit.getNumberOfTrackedObjects();
5252

53-
if (selfFreeingCount === numberOfTrackedObjects) {
53+
if (freeingCount === numberOfTrackedObjects) {
5454
parentPort.postMessage("numbersMatch");
5555
}
5656
else {
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
const {
2+
isMainThread,
3+
parentPort,
4+
workerData
5+
} = require("worker_threads");
6+
const garbageCollect = require("./garbage_collect.js");
7+
const assert = require("assert");
8+
const NodeGit = require("../../");
9+
const loopingCheckoutHead = require("./loop_checkout.js");
10+
const { promisify } = require("util");
11+
12+
if (isMainThread) {
13+
throw new Error("Must be run via worker thread");
14+
}
15+
16+
const { clonePath, url } = workerData;
17+
const cloneOpts = {
18+
fetchOpts: {
19+
callbacks: {
20+
certificateCheck: () => 0
21+
}
22+
}
23+
};
24+
25+
let repository;
26+
let filterName = "psuedo_filter";
27+
let applyCallbackResult = 1;
28+
29+
return NodeGit.Clone(url, clonePath, cloneOpts)
30+
.then(function(_repository) {
31+
repository = _repository;
32+
assert.ok(repository instanceof NodeGit.Repository);
33+
return NodeGit.FilterRegistry.register(filterName, {
34+
apply: function() {
35+
applyCallbackResult = 0;
36+
},
37+
check: function() {
38+
return NodeGit.Error.CODE.OK;
39+
}
40+
}, 0);
41+
})
42+
.then(function(result) {
43+
assert.strictEqual(result, NodeGit.Error.CODE.OK);
44+
return loopingCheckoutHead(clonePath, repository, 10);
45+
}).then(function() {
46+
assert.strictEqual(applyCallbackResult, 0);
47+
// Tracked objects must work too when the Garbage Collector is triggered
48+
garbageCollect();
49+
50+
// Count total of objects left after being created/destroyed
51+
const freeingCount =
52+
NodeGit.Cert.getNonSelfFreeingConstructedCount() +
53+
NodeGit.FilterSource.getNonSelfFreeingConstructedCount() +
54+
NodeGit.Buf.getNonSelfFreeingConstructedCount() +
55+
NodeGit.Repository.getSelfFreeingInstanceCount();
56+
57+
const numberOfTrackedObjects = NodeGit.getNumberOfTrackedObjects();
58+
59+
if (freeingCount === numberOfTrackedObjects) {
60+
parentPort.postMessage("numbersMatch");
61+
}
62+
else {
63+
parentPort.postMessage("numbersDoNotMatch");
64+
}
65+
return promisify(setTimeout)(50000);
66+
}).catch((err) => parentPort.postMessage("failure"));

0 commit comments

Comments
 (0)