Skip to content

Commit fbf6d5f

Browse files
authored
fix: lower mine worker retries (#695)
1 parent e2f8fa7 commit fbf6d5f

File tree

5 files changed

+13
-25
lines changed

5 files changed

+13
-25
lines changed

src/db/transactions/db.ts

Lines changed: 6 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import superjson from "superjson";
2-
import { env } from "../../utils/env";
3-
import { redis } from "../../utils/redis/redis";
4-
import { AnyTransaction } from "../../utils/transaction/types";
2+
import { MAX_REDIS_BATCH_SIZE, redis } from "../../utils/redis/redis";
3+
import type { AnyTransaction } from "../../utils/transaction/types";
54

65
/**
76
* Schemas
@@ -111,13 +110,9 @@ export class TransactionDB {
111110
}
112111

113112
const result: AnyTransaction[] = [];
114-
for (
115-
let i = 0;
116-
i < queueIds.length;
117-
i += env.__EXPERIMENTAL_REDIS_BATCH_SIZE
118-
) {
113+
for (let i = 0; i < queueIds.length; i += MAX_REDIS_BATCH_SIZE) {
119114
const keys = queueIds
120-
.slice(i, i + env.__EXPERIMENTAL_REDIS_BATCH_SIZE)
115+
.slice(i, i + MAX_REDIS_BATCH_SIZE)
121116
.map(this.transactionDetailsKey);
122117
const vals = await redis.mget(...keys);
123118

@@ -141,13 +136,9 @@ export class TransactionDB {
141136
}
142137

143138
let numDeleted = 0;
144-
for (
145-
let i = 0;
146-
i < queueIds.length;
147-
i += env.__EXPERIMENTAL_REDIS_BATCH_SIZE
148-
) {
139+
for (let i = 0; i < queueIds.length; i += MAX_REDIS_BATCH_SIZE) {
149140
const keys = queueIds
150-
.slice(i, i + env.__EXPERIMENTAL_REDIS_BATCH_SIZE)
141+
.slice(i, i + MAX_REDIS_BATCH_SIZE)
151142
.map(this.transactionDetailsKey);
152143
numDeleted += await redis.unlink(...keys);
153144
}

src/utils/env.ts

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -90,18 +90,14 @@ export const env = createEnv({
9090
// Sets the max amount of memory Redis can use.
9191
// "0" means use all available memory.
9292
REDIS_MAXMEMORY: z.string().default("0"),
93-
// Sets the max batch Redis will handle in batch operations like MGET and UNLINK.
94-
// This will be removed if a consistent batch size works for all use cases.
95-
// ioredis has issues with batches over 100k+ (source: https://github.com/redis/ioredis/issues/801).
96-
__EXPERIMENTAL_REDIS_BATCH_SIZE: z.coerce.number().default(50_000),
9793
// Sets the number of recent transactions to store. Older transactions are pruned periodically.
9894
// In testing, 100k transactions consumes ~300mb memory.
9995
TRANSACTION_HISTORY_COUNT: z.coerce.number().default(100_000),
10096
// Sets the number of recent completed jobs in each queue.
10197
QUEUE_COMPLETE_HISTORY_COUNT: z.coerce.number().default(2_000),
10298
// Sets the number of recent failed jobs in each queue.
10399
// These limits are higher to debug failed jobs.
104-
QUEUE_FAIL_HISTORY_COUNT: z.coerce.number().default(20_000),
100+
QUEUE_FAIL_HISTORY_COUNT: z.coerce.number().default(10_000),
105101
// Sets the number of recent nonces to map to queue IDs.
106102
NONCE_MAP_COUNT: z.coerce.number().default(10_000),
107103
},
@@ -134,8 +130,6 @@ export const env = createEnv({
134130
process.env.CONFIRM_TRANSACTION_QUEUE_CONCURRENCY,
135131
ENGINE_MODE: process.env.ENGINE_MODE,
136132
REDIS_MAXMEMORY: process.env.REDIS_MAXMEMORY,
137-
__EXPERIMENTAL_REDIS_BATCH_SIZE:
138-
process.env.__EXPERIMENTAL_REDIS_BATCH_SIZE,
139133
TRANSACTION_HISTORY_COUNT: process.env.TRANSACTION_HISTORY_COUNT,
140134
GLOBAL_RATE_LIMIT_PER_MIN: process.env.GLOBAL_RATE_LIMIT_PER_MIN,
141135
DD_TRACER_ACTIVATED: process.env.DD_TRACER_ACTIVATED,

src/utils/redis/redis.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@ import Redis from "ioredis";
22
import { env } from "../env";
33
import { logger } from "../logger";
44

5+
// ioredis has issues with batches over 100k+ (source: https://github.com/redis/ioredis/issues/801).
6+
export const MAX_REDIS_BATCH_SIZE = 50_000;
7+
58
export const redis = new Redis(env.REDIS_URL, {
69
enableAutoPipelining: true,
710
maxRetriesPerRequest: null,

src/worker/queues/mineTransactionQueue.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ export class MineTransactionQueue {
2323
const jobId = this.jobId(data);
2424
await this.q.add(jobId, serialized, {
2525
jobId,
26-
attempts: 200, // > 30 minutes with the backoffStrategy defined on the worker
26+
attempts: 100, // > 30 minutes with the backoffStrategy defined on the worker
2727
backoff: { type: "custom" },
2828
});
2929
};

src/worker/tasks/mineTransactionWorker.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -271,8 +271,8 @@ export const initMineTransactionWorker = () => {
271271
connection: redis,
272272
settings: {
273273
backoffStrategy: (attemptsMade: number) => {
274-
// Retries after: 2s, 4s, 6s, 8s, 10s, 10s, 10s, 10s, ...
275-
return Math.min(attemptsMade * 2_000, 10_000);
274+
// Retries at 2s, 4s, 6s, ..., 18s, 20s, 20s, 20s, ...
275+
return Math.min(attemptsMade * 2_000, 20_000);
276276
},
277277
},
278278
});

0 commit comments

Comments
 (0)