Skip to content

Commit d159c1a

Browse files
kripkenbelraquib
authored andcommitted
Fix pthreads with closure compiler (emscripten-core#9569)
Turns out much of the pthreads runtime was not closure-safe. This PR fixes that, using quoted strings as closure expects. While doing that I noticed that some of the things in place for pthreads + MODULARIZE would also help closure. In both cases the key thing is that worker.js, that loads the main JS file, is separate from it, so it needs proper interfaces and can't just rely on global variables (which in closure are minified in the main JS, and in MODULARIZE are in another scope). So I removed things like makeAsmExportAccessInPthread, makeAsmGlobalAccessInPthread, makeAsmExportAndGlobalAssignTargetInPthread which makes the code simpler. Because of that I was also able to remove a bunch of globals from worker.js which further simplifies things, and should also help node.js workers (where the global scope is very different, emscripten-core#6567). Most of this PR is straightforward according to the above notes. One slightly tricky thing is the stack setup, which I refactored to use a new Module.applyStackValues method that worker.js calls at the right time (as the stack is set up after the JS loads). Code size effects: Before this PR: 69,567 bytes. After this PR: 69,644. After this PR, plus closure: 34,909. So this adds only 77 bytes, while also making the code simpler + making it possible to run closure and decrease size by 50%. In addition, I have followup work to further reduce the non-closure code size too, so this regression will be fixed (and more).
1 parent cfd1206 commit d159c1a

13 files changed

+187
-190
lines changed

emcc.py

+7-1
Original file line numberDiff line numberDiff line change
@@ -1195,6 +1195,9 @@ def is_supported_link_flag(f):
11951195
# These runtime methods are called from worker.js
11961196
shared.Settings.EXPORTED_RUNTIME_METHODS += ['establishStackSpace', 'dynCall_ii']
11971197

1198+
if shared.Settings.STACK_OVERFLOW_CHECK:
1199+
shared.Settings.EXPORTED_RUNTIME_METHODS += ['writeStackCookie', 'checkStackCookie', 'abortStackOverflow']
1200+
11981201
if shared.Settings.MODULARIZE_INSTANCE:
11991202
shared.Settings.MODULARIZE = 1
12001203

@@ -1361,7 +1364,10 @@ def is_supported_link_flag(f):
13611364
newargs += ['-pthread']
13621365
# some pthreads code is in asm.js library functions, which are auto-exported; for the wasm backend, we must
13631366
# manually export them
1364-
shared.Settings.EXPORTED_FUNCTIONS += ['_emscripten_get_global_libc', '___pthread_tsd_run_dtors', '__register_pthread_ptr', '_pthread_self', '___emscripten_pthread_data_constructor']
1367+
shared.Settings.EXPORTED_FUNCTIONS += [
1368+
'_emscripten_get_global_libc', '___pthread_tsd_run_dtors',
1369+
'__register_pthread_ptr', '_pthread_self',
1370+
'___emscripten_pthread_data_constructor', '_emscripten_futex_wake']
13651371

13661372
# set location of worker.js
13671373
shared.Settings.PTHREAD_WORKER_FILE = unsuffixed(os.path.basename(target)) + '.worker.js'

src/closure-externs.js

+12
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,18 @@ Math.max = function() {};
4848
Math.clz32 = function() {};
4949
Math.trunc = function() {};
5050

51+
/**
52+
* Atomics
53+
*/
54+
55+
var Atomics = {};
56+
Atomics.compareExchange = function() {};
57+
Atomics.exchange = function() {};
58+
Atomics.wait = function() {};
59+
Atomics.notify = function() {};
60+
Atomics.load = function() {};
61+
Atomics.store = function() {};
62+
5163
/**
5264
* SIMD.js support (not in upstream closure yet).
5365
*/

src/library.js

+7-7
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@
2525
LibraryManager.library = {
2626
// keep this low in memory, because we flatten arrays with them in them
2727
#if USE_PTHREADS
28-
_impure_ptr: '; if (ENVIRONMENT_IS_PTHREAD) __impure_ptr = PthreadWorkerInit.__impure_ptr; else PthreadWorkerInit.__impure_ptr __impure_ptr = {{{ makeStaticAlloc(4) }}}',
29-
__dso_handle: '; if (ENVIRONMENT_IS_PTHREAD) ___dso_handle = PthreadWorkerInit.___dso_handle; else PthreadWorkerInit.___dso_handle = ___dso_handle = {{{ makeStaticAlloc(4) }}}',
28+
_impure_ptr: '; if (ENVIRONMENT_IS_PTHREAD) __impure_ptr = PthreadWorkerInit["__impure_ptr"]; else PthreadWorkerInit["__impure_ptr"] = {{{ makeStaticAlloc(4) }}}',
29+
__dso_handle: '; if (ENVIRONMENT_IS_PTHREAD) ___dso_handle = PthreadWorkerInit["___dso_handle"]; else PthreadWorkerInit["___dso_handle"] = ___dso_handle = {{{ makeStaticAlloc(4) }}}',
3030
#else
3131
_impure_ptr: '{{{ makeStaticAlloc(1) }}}',
3232
__dso_handle: '{{{ makeStaticAlloc(1) }}}',
@@ -1880,9 +1880,9 @@ LibraryManager.library = {
18801880

18811881
// Statically allocated time struct.
18821882
#if USE_PTHREADS
1883-
__tm_current: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_current = PthreadWorkerInit.___tm_current; else PthreadWorkerInit.___tm_current = ___tm_current = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
1884-
__tm_timezone: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_timezone = PthreadWorkerInit.___tm_timezone; else PthreadWorkerInit.___tm_timezone = ___tm_timezone = {{{ makeStaticString("GMT") }}}',
1885-
__tm_formatted: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_formatted = PthreadWorkerInit.___tm_formatted; else PthreadWorkerInit.___tm_formatted = ___tm_formatted = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
1883+
__tm_current: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_current = PthreadWorkerInit["___tm_current"]; else PthreadWorkerInit["___tm_current"] = ___tm_current = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
1884+
__tm_timezone: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_timezone = PthreadWorkerInit["___tm_timezone"]; else PthreadWorkerInit["___tm_timezone"] = ___tm_timezone = {{{ makeStaticString("GMT") }}}',
1885+
__tm_formatted: '; if (ENVIRONMENT_IS_PTHREAD) ___tm_formatted = PthreadWorkerInit["___tm_formatted"]; else PthreadWorkerInit["___tm_formatted"] = ___tm_formatted = {{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
18861886
#else
18871887
__tm_current: '{{{ makeStaticAlloc(C_STRUCTS.tm.__size__) }}}',
18881888
// Statically allocated copy of the string "GMT" for gmtime() to point to
@@ -3239,8 +3239,8 @@ LibraryManager.library = {
32393239
// ==========================================================================
32403240

32413241
#if USE_PTHREADS
3242-
in6addr_any: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_any = PthreadWorkerInit._in6addr_any; else PthreadWorkerInit._in6addr_any = _in6addr_any = {{{ makeStaticAlloc(16) }}}',
3243-
in6addr_loopback: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_loopback = PthreadWorkerInit._in6addr_loopback; else PthreadWorkerInit._in6addr_loopback = _in6addr_loopback = {{{ makeStaticAlloc(16) }}}',
3242+
in6addr_any: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_any = PthreadWorkerInit["_in6addr_any"]; else PthreadWorkerInit["_in6addr_any"] = _in6addr_any = {{{ makeStaticAlloc(16) }}}',
3243+
in6addr_loopback: '; if (ENVIRONMENT_IS_PTHREAD) _in6addr_loopback = PthreadWorkerInit["_in6addr_loopback"]; else PthreadWorkerInit["_in6addr_loopback"] = _in6addr_loopback = {{{ makeStaticAlloc(16) }}}',
32443244
#else
32453245
in6addr_any:
32463246
'{{{ makeStaticAlloc(16) }}}',

src/library_fetch.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
var LibraryFetch = {
99
#if USE_PTHREADS
1010
$Fetch__postset: 'if (!ENVIRONMENT_IS_PTHREAD) Fetch.staticInit();',
11-
fetch_work_queue: '; if (ENVIRONMENT_IS_PTHREAD) _fetch_work_queue = PthreadWorkerInit._fetch_work_queue; else PthreadWorkerInit._fetch_work_queue = _fetch_work_queue = {{{ makeStaticAlloc(12) }}}',
11+
fetch_work_queue: '; if (ENVIRONMENT_IS_PTHREAD) _fetch_work_queue = PthreadWorkerInit["_fetch_work_queue"]; else PthreadWorkerInit["_fetch_work_queue"] = _fetch_work_queue = {{{ makeStaticAlloc(12) }}}',
1212
#else
1313
$Fetch__postset: 'Fetch.staticInit();',
1414
fetch_work_queue: '{{{ makeStaticAlloc(12) }}}',

src/library_pthread.js

+72-58
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,11 @@
44
// found in the LICENSE file.
55

66
var LibraryPThread = {
7-
$PThread__postset: 'if (!ENVIRONMENT_IS_PTHREAD) PThread.initMainThreadBlock();',
8-
$PThread__deps: ['$PROCINFO', '_register_pthread_ptr', 'emscripten_main_thread_process_queued_calls', '$ERRNO_CODES', 'emscripten_futex_wake'],
7+
$PThread__postset: 'if (!ENVIRONMENT_IS_PTHREAD) PThread.initMainThreadBlock(); else PThread.initWorker();',
8+
$PThread__deps: ['$PROCINFO', '_register_pthread_ptr',
9+
'emscripten_main_thread_process_queued_calls',
10+
'$ERRNO_CODES', 'emscripten_futex_wake', '_kill_thread',
11+
'_cancel_thread', '_cleanup_thread'],
912
$PThread: {
1013
MAIN_THREAD_ID: 1, // A special constant that identifies the main JS thread ID.
1114
mainThreadInfo: {
@@ -65,6 +68,16 @@ var LibraryPThread = {
6568
PThread.createProfilerBlock(PThread.mainThreadBlock);
6669
PThread.setThreadName(PThread.mainThreadBlock, "Browser main thread");
6770
PThread.setThreadStatus(PThread.mainThreadBlock, {{{ cDefine('EM_THREAD_STATUS_RUNNING') }}});
71+
#endif
72+
},
73+
initWorker: function() {
74+
#if USE_CLOSURE_COMPILER
75+
// worker.js is not compiled together with us, and must access certain
76+
// things.
77+
PThread['receiveObjectTransfer'] = PThread.receiveObjectTransfer;
78+
PThread['setThreadStatus'] = PThread.setThreadStatus;
79+
PThread['threadCancel'] = PThread.threadCancel;
80+
PThread['threadExit'] = PThread.threadExit;
6881
#endif
6982
},
7083
// Maps pthread_t to pthread info objects
@@ -178,7 +191,7 @@ var LibraryPThread = {
178191
if (ENVIRONMENT_IS_PTHREAD) {
179192
// Note: in theory we would like to return any offscreen canvases back to the main thread,
180193
// but if we ever fetched a rendering context for them that would not be valid, so we don't try.
181-
postMessage({ cmd: 'exit' });
194+
postMessage({ 'cmd': 'exit' });
182195
}
183196
}
184197
},
@@ -190,7 +203,7 @@ var LibraryPThread = {
190203
_emscripten_futex_wake(threadInfoStruct + {{{ C_STRUCTS.pthread.threadStatus }}}, {{{ cDefine('INT_MAX') }}}); // wake all threads
191204
threadInfoStruct = selfThreadId = 0; // Not hosting a pthread anymore in this worker, reset the info structures to null.
192205
__register_pthread_ptr(0, 0, 0); // Unregister the thread block also inside the asm.js scope.
193-
postMessage({ cmd: 'cancelDone' });
206+
postMessage({ 'cmd': 'cancelDone' });
194207
},
195208

196209
terminateAllThreads: function() {
@@ -305,32 +318,32 @@ var LibraryPThread = {
305318

306319
// Ask the new worker to load up the Emscripten-compiled page. This is a heavy operation.
307320
worker.postMessage({
308-
cmd: 'load',
321+
'cmd': 'load',
309322
// If the application main .js file was loaded from a Blob, then it is not possible
310323
// to access the URL of the current script that could be passed to a Web Worker so that
311324
// it could load up the same file. In that case, developer must either deliver the Blob
312325
// object in Module['mainScriptUrlOrBlob'], or a URL to it, so that pthread Workers can
313326
// independently load up the same main application file.
314-
urlOrBlob: Module['mainScriptUrlOrBlob'] || _scriptDir,
327+
'urlOrBlob': Module['mainScriptUrlOrBlob'] || _scriptDir,
315328
#if WASM
316-
wasmMemory: wasmMemory,
317-
wasmModule: wasmModule,
329+
'wasmMemory': wasmMemory,
330+
'wasmModule': wasmModule,
318331
#if LOAD_SOURCE_MAP
319-
wasmSourceMap: wasmSourceMap,
332+
'wasmSourceMap': wasmSourceMap,
320333
#endif
321334
#if USE_OFFSET_CONVERTER
322-
wasmOffsetConverter: wasmOffsetConverter,
335+
'wasmOffsetConverter': wasmOffsetConverter,
323336
#endif
324337
#else
325-
buffer: HEAPU8.buffer,
326-
asmJsUrlOrBlob: Module["asmJsUrlOrBlob"],
338+
'buffer': HEAPU8.buffer,
339+
'asmJsUrlOrBlob': Module["asmJsUrlOrBlob"],
327340
#endif
328341
#if !WASM_BACKEND
329-
tempDoublePtr: tempDoublePtr,
342+
'tempDoublePtr': tempDoublePtr,
330343
#endif
331-
DYNAMIC_BASE: DYNAMIC_BASE,
332-
DYNAMICTOP_PTR: DYNAMICTOP_PTR,
333-
PthreadWorkerInit: PthreadWorkerInit
344+
'DYNAMIC_BASE': DYNAMIC_BASE,
345+
'DYNAMICTOP_PTR': DYNAMICTOP_PTR,
346+
'PthreadWorkerInit': PthreadWorkerInit
334347
});
335348
PThread.unusedWorkers.push(worker);
336349
}
@@ -346,34 +359,35 @@ var LibraryPThread = {
346359
var worker = workers[i];
347360
(function(worker) {
348361
worker.onmessage = function(e) {
349-
var d = e.data;
362+
var d = e['data'];
363+
var cmd = d['cmd'];
350364
// Sometimes we need to backproxy events to the calling thread (e.g. HTML5 DOM events handlers such as emscripten_set_mousemove_callback()), so keep track in a globally accessible variable about the thread that initiated the proxying.
351365
if (worker.pthread) PThread.currentProxiedOperationCallerThread = worker.pthread.threadInfoStruct;
352366

353367
// If this message is intended to a recipient that is not the main thread, forward it to the target thread.
354-
if (d.targetThread && d.targetThread != _pthread_self()) {
368+
if (d['targetThread'] && d['targetThread'] != _pthread_self()) {
355369
var thread = PThread.pthreads[d.targetThread];
356370
if (thread) {
357-
thread.worker.postMessage(e.data, d.transferList);
371+
thread.worker.postMessage(e.data, d['transferList']);
358372
} else {
359-
console.error('Internal error! Worker sent a message "' + d.cmd + '" to target pthread ' + d.targetThread + ', but that thread no longer exists!');
373+
console.error('Internal error! Worker sent a message "' + cmd + '" to target pthread ' + d['targetThread'] + ', but that thread no longer exists!');
360374
}
361375
PThread.currentProxiedOperationCallerThread = undefined;
362376
return;
363377
}
364378

365-
if (d.cmd === 'processQueuedMainThreadWork') {
379+
if (cmd === 'processQueuedMainThreadWork') {
366380
// TODO: Must post message to main Emscripten thread in PROXY_TO_WORKER mode.
367381
_emscripten_main_thread_process_queued_calls();
368-
} else if (d.cmd === 'spawnThread') {
382+
} else if (cmd === 'spawnThread') {
369383
__spawn_thread(e.data);
370-
} else if (d.cmd === 'cleanupThread') {
371-
__cleanup_thread(d.thread);
372-
} else if (d.cmd === 'killThread') {
373-
__kill_thread(d.thread);
374-
} else if (d.cmd === 'cancelThread') {
375-
__cancel_thread(d.thread);
376-
} else if (d.cmd === 'loaded') {
384+
} else if (cmd === 'cleanupThread') {
385+
__cleanup_thread(d['thread']);
386+
} else if (cmd === 'killThread') {
387+
__kill_thread(d['thread']);
388+
} else if (cmd === 'cancelThread') {
389+
__cancel_thread(d['thread']);
390+
} else if (cmd === 'loaded') {
377391
worker.loaded = true;
378392
// If this Worker is already pending to start running a thread, launch the thread now
379393
if (worker.runPthread) {
@@ -384,34 +398,34 @@ var LibraryPThread = {
384398
if (numWorkersLoaded === numWorkers && onFinishedLoading) {
385399
onFinishedLoading();
386400
}
387-
} else if (d.cmd === 'print') {
388-
out('Thread ' + d.threadId + ': ' + d.text);
389-
} else if (d.cmd === 'printErr') {
390-
err('Thread ' + d.threadId + ': ' + d.text);
391-
} else if (d.cmd === 'alert') {
392-
alert('Thread ' + d.threadId + ': ' + d.text);
393-
} else if (d.cmd === 'exit') {
401+
} else if (cmd === 'print') {
402+
out('Thread ' + d['threadId'] + ': ' + d['text']);
403+
} else if (cmd === 'printErr') {
404+
err('Thread ' + d['threadId'] + ': ' + d['text']);
405+
} else if (cmd === 'alert') {
406+
alert('Thread ' + d['threadId'] + ': ' + d['text']);
407+
} else if (cmd === 'exit') {
394408
var detached = worker.pthread && Atomics.load(HEAPU32, (worker.pthread.thread + {{{ C_STRUCTS.pthread.detached }}}) >> 2);
395409
if (detached) {
396410
PThread.returnWorkerToPool(worker);
397411
}
398-
} else if (d.cmd === 'exitProcess') {
412+
} else if (cmd === 'exitProcess') {
399413
// A pthread has requested to exit the whole application process (runtime).
400414
noExitRuntime = false;
401415
try {
402-
exit(d.returnCode);
416+
exit(d['returnCode']);
403417
} catch (e) {
404418
if (e instanceof ExitStatus) return;
405419
throw e;
406420
}
407-
} else if (d.cmd === 'cancelDone') {
421+
} else if (cmd === 'cancelDone') {
408422
PThread.returnWorkerToPool(worker);
409-
} else if (d.cmd === 'objectTransfer') {
423+
} else if (cmd === 'objectTransfer') {
410424
PThread.receiveObjectTransfer(e.data);
411425
} else if (e.data.target === 'setimmediate') {
412426
worker.postMessage(e.data); // Worker wants to postMessage() to itself to implement setImmediate() emulation.
413427
} else {
414-
err("worker sent an unknown command " + d.cmd);
428+
err("worker sent an unknown command " + cmd);
415429
}
416430
PThread.currentProxiedOperationCallerThread = undefined;
417431
};
@@ -483,7 +497,7 @@ var LibraryPThread = {
483497
if (ENVIRONMENT_IS_PTHREAD) throw 'Internal Error! _cancel_thread() can only ever be called from main application thread!';
484498
if (!pthread_ptr) throw 'Internal Error! Null pthread_ptr in _cancel_thread!';
485499
var pthread = PThread.pthreads[pthread_ptr];
486-
pthread.worker.postMessage({ cmd: 'cancel' });
500+
pthread.worker.postMessage({ 'cmd': 'cancel' });
487501
},
488502

489503
_spawn_thread: function(threadParams) {
@@ -537,17 +551,17 @@ var LibraryPThread = {
537551

538552
worker.pthread = pthread;
539553
var msg = {
540-
cmd: 'run',
541-
start_routine: threadParams.startRoutine,
542-
arg: threadParams.arg,
543-
threadInfoStruct: threadParams.pthread_ptr,
544-
selfThreadId: threadParams.pthread_ptr, // TODO: Remove this since thread ID is now the same as the thread address.
545-
parentThreadId: threadParams.parent_pthread_ptr,
546-
stackBase: threadParams.stackBase,
547-
stackSize: threadParams.stackSize,
554+
'cmd': 'run',
555+
'start_routine': threadParams.startRoutine,
556+
'arg': threadParams.arg,
557+
'threadInfoStruct': threadParams.pthread_ptr,
558+
'selfThreadId': threadParams.pthread_ptr, // TODO: Remove this since thread ID is now the same as the thread address.
559+
'parentThreadId': threadParams.parent_pthread_ptr,
560+
'stackBase': threadParams.stackBase,
561+
'stackSize': threadParams.stackSize,
548562
#if OFFSCREENCANVAS_SUPPORT
549-
moduleCanvasId: threadParams.moduleCanvasId,
550-
offscreenCanvases: threadParams.offscreenCanvases,
563+
'moduleCanvasId': threadParams.moduleCanvasId,
564+
'offscreenCanvases': threadParams.offscreenCanvases,
551565
#endif
552566
};
553567
worker.runPthread = function() {
@@ -562,7 +576,7 @@ var LibraryPThread = {
562576
},
563577

564578
_num_logical_cores__deps: ['emscripten_force_num_logical_cores'],
565-
_num_logical_cores: '; if (ENVIRONMENT_IS_PTHREAD) __num_logical_cores = PthreadWorkerInit.__num_logical_cores; else { PthreadWorkerInit.__num_logical_cores = __num_logical_cores = {{{ makeStaticAlloc(4) }}}; HEAPU32[__num_logical_cores>>2] = navigator["hardwareConcurrency"] || ' + {{{ PTHREAD_HINT_NUM_CORES }}} + '; }',
579+
_num_logical_cores: '; if (ENVIRONMENT_IS_PTHREAD) __num_logical_cores = PthreadWorkerInit["__num_logical_cores"]; else { PthreadWorkerInit["__num_logical_cores"] = __num_logical_cores = {{{ makeStaticAlloc(4) }}}; HEAPU32[__num_logical_cores>>2] = navigator["hardwareConcurrency"] || ' + {{{ PTHREAD_HINT_NUM_CORES }}} + '; }',
566580

567581
emscripten_has_threading_support: function() {
568582
return typeof SharedArrayBuffer !== 'undefined';
@@ -822,7 +836,7 @@ var LibraryPThread = {
822836
Atomics.store(HEAPU32, (thread + {{{ C_STRUCTS.pthread.detached }}} ) >> 2, 1); // Mark the thread as detached.
823837

824838
if (!ENVIRONMENT_IS_PTHREAD) __cleanup_thread(thread);
825-
else postMessage({ cmd: 'cleanupThread', thread: thread });
839+
else postMessage({ 'cmd': 'cleanupThread', 'thread': thread });
826840
return 0;
827841
}
828842
// TODO HACK! Replace the _js variant with just _pthread_testcancel:
@@ -854,7 +868,7 @@ var LibraryPThread = {
854868
}
855869
if (signal != 0) {
856870
if (!ENVIRONMENT_IS_PTHREAD) __kill_thread(thread);
857-
else postMessage({ cmd: 'killThread', thread: thread});
871+
else postMessage({ 'cmd': 'killThread', 'thread': thread});
858872
}
859873
return 0;
860874
},
@@ -876,7 +890,7 @@ var LibraryPThread = {
876890
}
877891
Atomics.compareExchange(HEAPU32, (thread + {{{ C_STRUCTS.pthread.threadStatus }}} ) >> 2, 0, 2); // Signal the thread that it needs to cancel itself.
878892
if (!ENVIRONMENT_IS_PTHREAD) __cancel_thread(thread);
879-
else postMessage({ cmd: 'cancelThread', thread: thread});
893+
else postMessage({ 'cmd': 'cancelThread', 'thread': thread});
880894
return 0;
881895
},
882896

@@ -1068,7 +1082,7 @@ var LibraryPThread = {
10681082
},
10691083

10701084
// Stores the memory address that the main thread is waiting on, if any.
1071-
_main_thread_futex_wait_address: '; if (ENVIRONMENT_IS_PTHREAD) __main_thread_futex_wait_address = PthreadWorkerInit.__main_thread_futex_wait_address; else PthreadWorkerInit.__main_thread_futex_wait_address = __main_thread_futex_wait_address = {{{ makeStaticAlloc(4) }}}',
1085+
_main_thread_futex_wait_address: '; if (ENVIRONMENT_IS_PTHREAD) __main_thread_futex_wait_address = PthreadWorkerInit["__main_thread_futex_wait_address"]; else PthreadWorkerInit["__main_thread_futex_wait_address"] = __main_thread_futex_wait_address = {{{ makeStaticAlloc(4) }}}',
10721086

10731087
// Returns 0 on success, or one of the values -ETIMEDOUT, -EWOULDBLOCK or -EINVAL on error.
10741088
emscripten_futex_wait__deps: ['_main_thread_futex_wait_address', 'emscripten_main_thread_process_queued_calls'],
@@ -1159,7 +1173,7 @@ var LibraryPThread = {
11591173

11601174
__call_main: function(argc, argv) {
11611175
var returnCode = _main(argc, argv);
1162-
if (!noExitRuntime) postMessage({ cmd: 'exitProcess', returnCode: returnCode });
1176+
if (!noExitRuntime) postMessage({ 'cmd': 'exitProcess', 'returnCode': returnCode });
11631177
return returnCode;
11641178
},
11651179

0 commit comments

Comments
 (0)