Skip to content

Commit 437985e

Browse files
committed
feat: updated benchmarks
1 parent 64c4e6e commit 437985e

File tree

13 files changed

+1084
-114
lines changed

13 files changed

+1084
-114
lines changed

include/process/base_process.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,7 @@ class base_process {
201201

202202
// parent
203203
PyOS_AfterFork_Parent();
204+
LOGELAPSED("svc_fork time ", svc_init_fork);
204205
pickl.~pickling();
205206

206207
// Release the main GIL

res.txt

Lines changed: 1 addition & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1 @@
1-
Running a farm of 1 workers and 5120 tasks. Each task is 50ms long and has a size of 100 bytes. Using processes-based strategy
2-
res[100] = res.get(100, []); res.get(100).append((1, 256227.337)) # bytes = 100 ms = 50
3-
Running a farm of 2 workers and 5120 tasks. Each task is 50ms long and has a size of 100 bytes. Using processes-based strategy
4-
res[100] = res.get(100, []); res.get(100).append((2, 128123.051)) # bytes = 100 ms = 50
5-
Running a farm of 4 workers and 5120 tasks. Each task is 50ms long and has a size of 100 bytes. Using processes-based strategy
6-
res[100] = res.get(100, []); res.get(100).append((4, 64074.72)) # bytes = 100 ms = 50
7-
Running a farm of 8 workers and 5120 tasks. Each task is 50ms long and has a size of 100 bytes. Using processes-based strategy
8-
res[100] = res.get(100, []); res.get(100).append((8, 32052.79)) # bytes = 100 ms = 50
9-
Running a farm of 12 workers and 5120 tasks. Each task is 50ms long and has a size of 100 bytes. Using processes-based strategy
10-
res[100] = res.get(100, []); res.get(100).append((12, 21393.738)) # bytes = 100 ms = 50
11-
Running a farm of 26 workers and 5120 tasks. Each task is 50ms long and has a size of 100 bytes. Using processes-based strategy
12-
res[100] = res.get(100, []); res.get(100).append((26, 9913.782)) # bytes = 100 ms = 50
13-
Running a farm of 36 workers and 5120 tasks. Each task is 50ms long and has a size of 100 bytes. Using processes-based strategy
14-
res[100] = res.get(100, []); res.get(100).append((36, 7215.942)) # bytes = 100 ms = 50
15-
Running a farm of 48 workers and 5120 tasks. Each task is 50ms long and has a size of 100 bytes. Using processes-based strategy
16-
res[100] = res.get(100, []); res.get(100).append((48, 5482.052)) # bytes = 100 ms = 50
17-
Running a farm of 64 workers and 5120 tasks. Each task is 50ms long and has a size of 100 bytes. Using processes-based strategy
18-
res[100] = res.get(100, []); res.get(100).append((64, 4197.795)) # bytes = 100 ms = 50
1+
25,28,29,30,32,30,34,38,36,38,40,38,44,43,42,43,44,46,47,50,47,51,44,47,47,45,

thesis/bench.ipynb

Lines changed: 56 additions & 6 deletions
Large diffs are not rendered by default.

thesis/bench_many_ms.ipynb

Lines changed: 250 additions & 0 deletions
Large diffs are not rendered by default.

thesis/farm/plot_512_1ms_new.ipynb

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -797,28 +797,28 @@
797797
},
798798
{
799799
"cell_type": "code",
800-
"execution_count": 4,
800+
"execution_count": 7,
801801
"metadata": {},
802802
"outputs": [
803803
{
804804
"name": "stdout",
805805
"output_type": "stream",
806806
"text": [
807-
"[100, [256227.337, 128123.051, 64074.72, 32052.79, 21393.738, 9913.782, 7215.942, 5482.052, 4197.795]],\n"
807+
"[10, [51252.803, 25647.669, 12847.717, 6462.909, 4367.318, 2109.714, 1616.25, 1304.495, 1086.843]],\n"
808808
]
809809
}
810810
],
811811
"source": [
812812
"res = dict()\n",
813-
"res[100] = res.get(100, []); res.get(100).append((1, 256227.337)) # bytes = 100 ms = 50\n",
814-
"res[100] = res.get(100, []); res.get(100).append((2, 128123.051)) # bytes = 100 ms = 50\n",
815-
"res[100] = res.get(100, []); res.get(100).append((4, 64074.72)) # bytes = 100 ms = 50\n",
816-
"res[100] = res.get(100, []); res.get(100).append((8, 32052.79)) # bytes = 100 ms = 50\n",
817-
"res[100] = res.get(100, []); res.get(100).append((12, 21393.738)) # bytes = 100 ms = 50\n",
818-
"res[100] = res.get(100, []); res.get(100).append((26, 9913.782)) # bytes = 100 ms = 50\n",
819-
"res[100] = res.get(100, []); res.get(100).append((36, 7215.942)) # bytes = 100 ms = 50\n",
820-
"res[100] = res.get(100, []); res.get(100).append((48, 5482.052)) # bytes = 100 ms = 50\n",
821-
"res[100] = res.get(100, []); res.get(100).append((64, 4197.795)) # bytes = 100 ms = 50\n",
813+
"res[10] = res.get(10, []); res.get(10).append((1, 51252.803)) # bytes = 10 ms = 50\n",
814+
"res[10] = res.get(10, []); res.get(10).append((2, 25647.669)) # bytes = 10 ms = 50\n",
815+
"res[10] = res.get(10, []); res.get(10).append((4, 12847.717)) # bytes = 10 ms = 50\n",
816+
"res[10] = res.get(10, []); res.get(10).append((8, 6462.909)) # bytes = 10 ms = 50\n",
817+
"res[10] = res.get(10, []); res.get(10).append((12, 4367.318)) # bytes = 10 ms = 50\n",
818+
"res[10] = res.get(10, []); res.get(10).append((26, 2109.714)) # bytes = 10 ms = 50\n",
819+
"res[10] = res.get(10, []); res.get(10).append((36, 1616.25)) # bytes = 10 ms = 50\n",
820+
"res[10] = res.get(10, []); res.get(10).append((48, 1304.495)) # bytes = 10 ms = 50\n",
821+
"res[10] = res.get(10, []); res.get(10).append((64, 1086.843)) # bytes = 10 ms = 50\n",
822822
"\n",
823823
"final_result = []\n",
824824
"for bytes, lis in res.items():\n",

thesis/lowerbounds.ipynb

Lines changed: 44 additions & 44 deletions
Large diffs are not rendered by default.

thesis/servicetime.ipynb

Lines changed: 8 additions & 8 deletions
Large diffs are not rendered by default.

thesis/spawntime.ipynb

Lines changed: 115 additions & 0 deletions
Large diffs are not rendered by default.

thesis/with-numpy.ipynb

Lines changed: 185 additions & 11 deletions
Large diffs are not rendered by default.

thesis/with-numpy.png

-25.5 KB
Loading

0 commit comments

Comments
 (0)