Skip to content

Commit 5f6825d

Browse files
misc cleanup to improve readability
1 parent 4cb2b87 commit 5f6825d

File tree

2 files changed

+137
-23
lines changed

2 files changed

+137
-23
lines changed

src/sessions.ts

Lines changed: 50 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import { setTimeout } from 'timers/promises';
2+
13
import { Binary, type Document, Long, type Timestamp } from './bson';
24
import type { CommandOptions, Connection } from './cmap/connection';
35
import { ConnectionPoolMetrics } from './cmap/metrics';
@@ -731,10 +733,10 @@ export class ClientSession
731733
: processTimeMS();
732734

733735
let committed = false;
734-
let result: any;
736+
let result: T;
735737

736738
try {
737-
while (!committed) {
739+
for (let retry = 0; !committed; ++retry) {
738740
this.startTransaction(options); // may throw on error
739741

740742
try {
@@ -770,7 +772,7 @@ export class ClientSession
770772

771773
if (
772774
fnError.hasErrorLabel(MongoErrorLabel.TransientTransactionError) &&
773-
(this.timeoutContext != null || processTimeMS() - startTime < MAX_TIMEOUT)
775+
(this.timeoutContext?.csotEnabled() || processTimeMS() - startTime < MAX_TIMEOUT)
774776
) {
775777
continue;
776778
}
@@ -788,32 +790,57 @@ export class ClientSession
788790
await this.commitTransaction();
789791
committed = true;
790792
} catch (commitError) {
791-
/*
792-
* Note: a maxTimeMS error will have the MaxTimeMSExpired
793-
* code (50) and can be reported as a top-level error or
794-
* inside writeConcernError, ex.
795-
* { ok:0, code: 50, codeName: 'MaxTimeMSExpired' }
796-
* { ok:1, writeConcernError: { code: 50, codeName: 'MaxTimeMSExpired' } }
797-
*/
798-
if (
799-
!isMaxTimeMSExpiredError(commitError) &&
800-
commitError.hasErrorLabel(MongoErrorLabel.UnknownTransactionCommitResult) &&
801-
(this.timeoutContext != null || processTimeMS() - startTime < MAX_TIMEOUT)
802-
) {
803-
continue;
804-
}
805-
806-
if (
807-
commitError.hasErrorLabel(MongoErrorLabel.TransientTransactionError) &&
808-
(this.timeoutContext != null || processTimeMS() - startTime < MAX_TIMEOUT)
809-
) {
810-
break;
793+
// If CSOT is enabled, we repeatedly retry until timeoutMS expires. This is enforced by providing a
794+
// timeoutContext to each async API, which know how to cancel themselves (i.e., the next retry will
795+
// abort the withTransaction call).
796+
// If CSOT is not enabled, do we still have time remaining or have we timed out?
797+
const hasTimedOut =
798+
!this.timeoutContext?.csotEnabled() && processTimeMS() - startTime >= MAX_TIMEOUT;
799+
800+
if (!hasTimedOut) {
801+
if (
802+
!isMaxTimeMSExpiredError(commitError) &&
803+
commitError.hasErrorLabel(MongoErrorLabel.UnknownTransactionCommitResult)
804+
) {
805+
/*
806+
* Note: a maxTimeMS error will have the MaxTimeMSExpired
807+
* code (50) and can be reported as a top-level error or
808+
* inside writeConcernError, ex.
809+
* { ok:0, code: 50, codeName: 'MaxTimeMSExpired' }
810+
* { ok:1, writeConcernError: { code: 50, codeName: 'MaxTimeMSExpired' } }
811+
*/
812+
continue;
813+
}
814+
815+
if (commitError.hasErrorLabel(MongoErrorLabel.TransientTransactionError)) {
816+
const BACKOFF_INITIAL_MS = 5;
817+
const BACKOFF_MAX_MS = 500;
818+
const BACKOFF_GROWTH = 1.5;
819+
const jitter = Math.random();
820+
const backoffMS =
821+
jitter * Math.min(BACKOFF_INITIAL_MS * BACKOFF_GROWTH ** retry, BACKOFF_MAX_MS);
822+
823+
const willExceedTransactionDeadline =
824+
(this.timeoutContext?.csotEnabled() &&
825+
backoffMS > this.timeoutContext.remainingTimeMS) ||
826+
processTimeMS() + backoffMS > startTime + MAX_TIMEOUT;
827+
828+
if (willExceedTransactionDeadline) {
829+
break;
830+
}
831+
832+
await setTimeout(backoffMS);
833+
834+
break;
835+
}
811836
}
812837

813838
throw commitError;
814839
}
815840
}
816841
}
842+
843+
// @ts-expect-error Result is always defined if we reach here, the for-loop above convinces TS it is not.
817844
return result;
818845
} finally {
819846
this.timeoutContext = null;
Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
import { expect } from 'chai';
2+
import { test } from 'mocha';
3+
import * as sinon from 'sinon';
4+
5+
import { type ClientSession, type Collection, type MongoClient } from '../../../src';
6+
import { configureFailPoint, type FailCommandFailPoint, measureDuration } from '../../tools/utils';
7+
8+
const failCommand: FailCommandFailPoint = {
9+
configureFailPoint: 'failCommand',
10+
mode: {
11+
times: 13
12+
},
13+
data: {
14+
failCommands: ['commitTransaction'],
15+
errorCode: 251 // no such transaction
16+
}
17+
};
18+
19+
describe('Retry Backoff is Enforced', function () {
20+
// 1. let client be a MongoClient
21+
let client: MongoClient;
22+
23+
// 2. let coll be a collection
24+
let collection: Collection;
25+
26+
beforeEach(async function () {
27+
client = this.configuration.newClient();
28+
collection = client.db('foo').collection('bar');
29+
});
30+
31+
afterEach(async function () {
32+
sinon.restore();
33+
await client?.close();
34+
});
35+
36+
test(
37+
'works',
38+
{
39+
requires: {
40+
mongodb: '>=4.4', // failCommand
41+
topology: '!single' // transactions can't run on standalone servers
42+
}
43+
},
44+
async function () {
45+
const randomStub = sinon.stub(Math, 'random');
46+
47+
// 3.i Configure the random number generator used for jitter to always return 0
48+
randomStub.returns(0);
49+
50+
// 3.ii Configure a fail point that forces 13 retries
51+
await configureFailPoint(this.configuration, failCommand);
52+
53+
// 3.iii
54+
const callback = async (s: ClientSession) => {
55+
await collection.insertOne({}, { session: s });
56+
};
57+
58+
// 3.iv Let no_backoff_time be the duration of the withTransaction API call
59+
const { duration: noBackoffTime } = await measureDuration(() => {
60+
return client.withSession(async s => {
61+
await s.withTransaction(callback);
62+
});
63+
});
64+
65+
// 4.i Configure the random number generator used for jitter to always return 1.
66+
randomStub.returns(1);
67+
68+
// 4.ii Configure a fail point that forces 13 retries like in step 3.2.
69+
await configureFailPoint(this.configuration, failCommand);
70+
71+
// 4.iii Use the same callback defined in 3.3.
72+
// 4.iv Let with_backoff_time be the duration of the withTransaction API call
73+
const { duration: fullBackoffDuration } = await measureDuration(() => {
74+
return client.withSession(async s => {
75+
await s.withTransaction(callback);
76+
});
77+
});
78+
79+
// 5. Compare the two time between the two runs.
80+
// The sum of 13 backoffs is roughly 2.2 seconds. There is a 1-second window to account for potential variance between the two runs.
81+
expect(fullBackoffDuration).to.be.within(
82+
noBackoffTime + 2200 - 1000,
83+
noBackoffTime + 2200 + 1000
84+
);
85+
}
86+
);
87+
});

0 commit comments

Comments
 (0)