Skip to content

Commit f560813

Browse files
ShogunPandaAVVS
authored andcommitted
chore: Formatted code using prettier.
1 parent aba3c74 commit f560813

16 files changed

+536
-447
lines changed

benchmarks/autopipelining-cluster.ts

+37-29
Original file line numberDiff line numberDiff line change
@@ -1,60 +1,68 @@
1-
import { cronometro } from 'cronometro'
2-
import { readFileSync } from 'fs'
3-
import { join } from 'path'
4-
import Cluster from '../lib/cluster'
5-
6-
const numNodes = parseInt(process.env.NODES || '3', 10)
7-
const iterations = parseInt(process.env.ITERATIONS || '10000', 10)
8-
const batchSize = parseInt(process.env.BATCH_SIZE || '1000', 10)
9-
const keys = readFileSync(join(__dirname, `fixtures/cluster-${numNodes}.txt`), 'utf-8').split('\n')
10-
const configuration = Array.from(Array(numNodes), (_, i) => ({ host: '127.0.0.1', port: 30000 + i + 1 }))
11-
let cluster
1+
import { cronometro } from "cronometro";
2+
import { readFileSync } from "fs";
3+
import { join } from "path";
4+
import Cluster from "../lib/cluster";
5+
6+
const numNodes = parseInt(process.env.NODES || "3", 10);
7+
const iterations = parseInt(process.env.ITERATIONS || "10000", 10);
8+
const batchSize = parseInt(process.env.BATCH_SIZE || "1000", 10);
9+
const keys = readFileSync(
10+
join(__dirname, `fixtures/cluster-${numNodes}.txt`),
11+
"utf-8"
12+
).split("\n");
13+
const configuration = Array.from(Array(numNodes), (_, i) => ({
14+
host: "127.0.0.1",
15+
port: 30000 + i + 1,
16+
}));
17+
let cluster;
1218

1319
function command(): string {
14-
const choice = Math.random()
20+
const choice = Math.random();
1521

1622
if (choice < 0.3) {
17-
return 'ttl'
23+
return "ttl";
1824
} else if (choice < 0.6) {
19-
return 'exists'
25+
return "exists";
2026
}
2127

22-
return 'get'
28+
return "get";
2329
}
2430

2531
function test() {
26-
const index = Math.floor(Math.random() * keys.length)
32+
const index = Math.floor(Math.random() * keys.length);
2733

28-
return Promise.all(Array.from(Array(batchSize)).map(() => cluster[command()](keys[index])))
34+
return Promise.all(
35+
Array.from(Array(batchSize)).map(() => cluster[command()](keys[index]))
36+
);
2937
}
3038

3139
function after(cb) {
32-
cluster.quit()
33-
cb()
40+
cluster.quit();
41+
cb();
3442
}
3543

3644
cronometro(
3745
{
3846
default: {
3947
test,
4048
before(cb) {
41-
cluster = new Cluster(configuration)
49+
cluster = new Cluster(configuration);
4250

43-
cb()
51+
cb();
4452
},
45-
after
53+
after,
4654
},
47-
'enableAutoPipelining=true': {
55+
"enableAutoPipelining=true": {
4856
test,
4957
before(cb) {
50-
cluster = new Cluster(configuration, { enableAutoPipelining: true })
51-
cb()
58+
cluster = new Cluster(configuration, { enableAutoPipelining: true });
59+
cb();
5260
},
53-
after
54-
}
61+
after,
62+
},
5563
},
5664
{
5765
iterations,
58-
print: { compare: true }
66+
print: { compare: true },
5967
}
60-
)
68+
);

benchmarks/autopipelining-single.ts

+32-27
Original file line numberDiff line numberDiff line change
@@ -1,58 +1,63 @@
1-
import { cronometro } from 'cronometro'
2-
import { readFileSync } from 'fs'
3-
import { join } from 'path'
4-
import Redis from '../lib/redis'
5-
6-
const iterations = parseInt(process.env.ITERATIONS || '10000', 10)
7-
const batchSize = parseInt(process.env.BATCH_SIZE || '1000', 10)
8-
const keys = readFileSync(join(__dirname, 'fixtures/cluster-3.txt'), 'utf-8').split('\n')
9-
let redis
1+
import { cronometro } from "cronometro";
2+
import { readFileSync } from "fs";
3+
import { join } from "path";
4+
import Redis from "../lib/redis";
5+
6+
const iterations = parseInt(process.env.ITERATIONS || "10000", 10);
7+
const batchSize = parseInt(process.env.BATCH_SIZE || "1000", 10);
8+
const keys = readFileSync(
9+
join(__dirname, "fixtures/cluster-3.txt"),
10+
"utf-8"
11+
).split("\n");
12+
let redis;
1013

1114
function command(): string {
12-
const choice = Math.random()
15+
const choice = Math.random();
1316

1417
if (choice < 0.3) {
15-
return 'ttl'
18+
return "ttl";
1619
} else if (choice < 0.6) {
17-
return 'exists'
20+
return "exists";
1821
}
1922

20-
return 'get'
23+
return "get";
2124
}
2225

2326
function test() {
24-
const index = Math.floor(Math.random() * keys.length)
27+
const index = Math.floor(Math.random() * keys.length);
2528

26-
return Promise.all(Array.from(Array(batchSize)).map(() => redis[command()](keys[index])))
29+
return Promise.all(
30+
Array.from(Array(batchSize)).map(() => redis[command()](keys[index]))
31+
);
2732
}
2833

2934
function after(cb) {
30-
redis.quit()
31-
cb()
35+
redis.quit();
36+
cb();
3237
}
3338

3439
cronometro(
3540
{
3641
default: {
3742
test,
3843
before(cb) {
39-
redis = new Redis()
44+
redis = new Redis();
4045

41-
cb()
46+
cb();
4247
},
43-
after
48+
after,
4449
},
45-
'enableAutoPipelining=true': {
50+
"enableAutoPipelining=true": {
4651
test,
4752
before(cb) {
48-
redis = new Redis({ enableAutoPipelining: true })
49-
cb()
53+
redis = new Redis({ enableAutoPipelining: true });
54+
cb();
5055
},
51-
after
52-
}
56+
after,
57+
},
5358
},
5459
{
5560
iterations,
56-
print: { compare: true }
61+
print: { compare: true },
5762
}
58-
)
63+
);

benchmarks/dropBuffer.ts

+19-19
Original file line numberDiff line numberDiff line change
@@ -1,38 +1,38 @@
1-
import { cronometro } from 'cronometro'
2-
import Redis from '../lib/redis'
1+
import { cronometro } from "cronometro";
2+
import Redis from "../lib/redis";
33

4-
let redis
4+
let redis;
55

66
cronometro(
77
{
88
default: {
99
test() {
10-
return redis.set('foo', 'bar')
10+
return redis.set("foo", "bar");
1111
},
1212
before(cb) {
13-
redis = new Redis()
14-
cb()
13+
redis = new Redis();
14+
cb();
1515
},
1616
after(cb) {
17-
redis.quit()
18-
cb()
19-
}
17+
redis.quit();
18+
cb();
19+
},
2020
},
21-
'dropBufferSupport=true': {
21+
"dropBufferSupport=true": {
2222
test() {
23-
return redis.set('foo', 'bar')
23+
return redis.set("foo", "bar");
2424
},
2525
before(cb) {
26-
redis = new Redis({ dropBufferSupport: true })
27-
cb()
26+
redis = new Redis({ dropBufferSupport: true });
27+
cb();
2828
},
2929
after(cb) {
30-
redis.quit()
31-
cb()
32-
}
33-
}
30+
redis.quit();
31+
cb();
32+
},
33+
},
3434
},
3535
{
36-
print: { compare: true }
36+
print: { compare: true },
3737
}
38-
)
38+
);

benchmarks/fixtures/generate.ts

+44-33
Original file line numberDiff line numberDiff line change
@@ -1,67 +1,78 @@
1-
'use strict'
1+
"use strict";
22

3-
const start = process.hrtime.bigint()
3+
const start = process.hrtime.bigint();
44

5-
import * as calculateSlot from 'cluster-key-slot'
6-
import { writeFileSync } from 'fs'
7-
import { join } from 'path'
8-
import { v4 as uuid } from 'uuid'
5+
import * as calculateSlot from "cluster-key-slot";
6+
import { writeFileSync } from "fs";
7+
import { join } from "path";
8+
import { v4 as uuid } from "uuid";
99

1010
// Input parameters
11-
const numKeys = parseInt(process.env.KEYS || '1000000', 10)
12-
const numNodes = parseInt(process.env.NODES || '3', 10)
11+
const numKeys = parseInt(process.env.KEYS || "1000000", 10);
12+
const numNodes = parseInt(process.env.NODES || "3", 10);
1313

1414
// Prepare topology
15-
const maxSlot = 16384
16-
const destination = join(__dirname, `cluster-${numNodes}.txt`)
17-
const counts = Array.from(Array(numNodes), () => 0)
18-
const keys = []
15+
const maxSlot = 16384;
16+
const destination = join(__dirname, `cluster-${numNodes}.txt`);
17+
const counts = Array.from(Array(numNodes), () => 0);
18+
const keys = [];
1919

2020
/*
2121
This algorithm is taken and adapted from Redis source code
2222
See: https://github.com/redis/redis/blob/d9f970d8d3f0b694f1e8915cab6d4eab9cfb2ef1/src/redis-cli.c#L5453
2323
*/
24-
const nodes = [] // This only holds starting slot, since the ending slot can be computed out of the next one
25-
let first = 0
26-
let cursor = 0
27-
const slotsPerNode = maxSlot / numNodes
24+
const nodes = []; // This only holds starting slot, since the ending slot can be computed out of the next one
25+
let first = 0;
26+
let cursor = 0;
27+
const slotsPerNode = maxSlot / numNodes;
2828

2929
for (let i = 0; i < numNodes; i++) {
30-
let last = Math.round(cursor + slotsPerNode - 1)
30+
let last = Math.round(cursor + slotsPerNode - 1);
3131

3232
if (last > maxSlot || i === numNodes - 1) {
33-
last = maxSlot - 1
33+
last = maxSlot - 1;
3434
}
3535

3636
if (last < first) {
37-
last = first
37+
last = first;
3838
}
3939

40-
nodes.push(first)
41-
first = last + 1
42-
cursor += slotsPerNode
40+
nodes.push(first);
41+
first = last + 1;
42+
cursor += slotsPerNode;
4343
}
4444

4545
// Generate keys and also track slot allocations
4646
for (let i = 0; i < numKeys; i++) {
47-
const key = uuid()
48-
const slot = calculateSlot(key)
49-
const node = nodes.findIndex((start, i) => i === numNodes - 1 || (slot >= start && slot < nodes[i + 1]))
47+
const key = uuid();
48+
const slot = calculateSlot(key);
49+
const node = nodes.findIndex(
50+
(start, i) => i === numNodes - 1 || (slot >= start && slot < nodes[i + 1])
51+
);
5052

51-
counts[node]++
52-
keys.push(key)
53+
counts[node]++;
54+
keys.push(key);
5355
}
5456

5557
// Save keys
56-
writeFileSync(destination, keys.join('\n'))
58+
writeFileSync(destination, keys.join("\n"));
5759

5860
// Print summary
59-
console.log(`Generated ${numKeys} keys in ${(Number(process.hrtime.bigint() - start) / 1e6).toFixed(2)} ms `)
61+
console.log(
62+
`Generated ${numKeys} keys in ${(
63+
Number(process.hrtime.bigint() - start) / 1e6
64+
).toFixed(2)} ms `
65+
);
6066

6167
for (let i = 0; i < numNodes; i++) {
62-
const from = nodes[i]
63-
const to = (i === numNodes - 1 ? maxSlot : nodes[i + 1]) - 1
68+
const from = nodes[i];
69+
const to = (i === numNodes - 1 ? maxSlot : nodes[i + 1]) - 1;
6470
console.log(
65-
` - Generated ${counts[i]} keys for node(s) serving slots ${from}-${to} (${((counts[i] * 100) / numKeys).toFixed(2)} %)`
66-
)
71+
` - Generated ${
72+
counts[i]
73+
} keys for node(s) serving slots ${from}-${to} (${(
74+
(counts[i] * 100) /
75+
numKeys
76+
).toFixed(2)} %)`
77+
);
6778
}

0 commit comments

Comments
 (0)