@@ -7,6 +7,7 @@ use failure::{bail, format_err, Error};
7
7
use walrus:: ir:: Value ;
8
8
use walrus:: { DataId , FunctionId , InitExpr , ValType } ;
9
9
use walrus:: { ExportItem , GlobalId , GlobalKind , ImportKind , MemoryId , Module } ;
10
+ use wasm_bindgen_wasm_conventions as wasm_conventions;
10
11
11
12
const PAGE_SIZE : u32 = 1 << 16 ;
12
13
@@ -16,6 +17,7 @@ const PAGE_SIZE: u32 = 1 << 16;
16
17
pub struct Config {
17
18
maximum_memory : u32 ,
18
19
thread_stack_size : u32 ,
20
+ enabled : bool ,
19
21
}
20
22
21
23
impl Config {
@@ -24,9 +26,15 @@ impl Config {
24
26
Config {
25
27
maximum_memory : 1 << 30 , // 1GB
26
28
thread_stack_size : 1 << 20 , // 1MB
29
+ enabled : env:: var ( "WASM_BINDGEN_THREADS" ) . is_ok ( ) ,
27
30
}
28
31
}
29
32
33
+ /// Is threaded Wasm enabled?
34
+ pub fn is_enabled ( & self ) -> bool {
35
+ self . enabled
36
+ }
37
+
30
38
/// Specify the maximum amount of memory the wasm module can ever have.
31
39
///
32
40
/// We'll be specifying that the memory for this wasm module is shared, and
@@ -79,18 +87,22 @@ impl Config {
79
87
///
80
88
/// More and/or less may happen here over time, stay tuned!
81
89
pub fn run ( & self , module : & mut Module ) -> Result < ( ) , Error > {
90
+ if !self . enabled {
91
+ return Ok ( ( ) ) ;
92
+ }
93
+
82
94
// Compatibility with older LLVM outputs. Newer LLVM outputs, when
83
95
// atomics are enabled, emit a shared memory. That's a good indicator
84
96
// that we have work to do. If shared memory isn't enabled, though then
85
97
// this isn't an atomic module so there's nothing to do. We still allow,
86
98
// though, an environment variable to force us to go down this path to
87
99
// remain compatibile with older LLVM outputs.
88
- let memory = find_memory ( module) ?;
89
- if !module. memories . get ( memory) . shared && env :: var ( "WASM_BINDGEN_THREADS" ) . is_err ( ) {
100
+ let memory = wasm_conventions :: get_memory ( module) ?;
101
+ if !module. memories . get ( memory) . shared {
90
102
return Ok ( ( ) ) ;
91
103
}
92
104
93
- let stack_pointer = find_stack_pointer ( module) ?;
105
+ let stack_pointer = wasm_conventions :: get_shadow_stack_pointer ( module) ?;
94
106
let addr = allocate_static_data ( module, memory, 4 , 4 ) ?;
95
107
let zero = InitExpr :: Value ( Value :: I32 ( 0 ) ) ;
96
108
let globals = Globals {
@@ -207,17 +219,6 @@ fn switch_data_segments_to_passive(
207
219
Ok ( ret)
208
220
}
209
221
210
- fn find_memory ( module : & mut Module ) -> Result < MemoryId , Error > {
211
- let mut memories = module. memories . iter ( ) ;
212
- let memory = memories
213
- . next ( )
214
- . ok_or_else ( || format_err ! ( "currently incompatible with no memory modules" ) ) ?;
215
- if memories. next ( ) . is_some ( ) {
216
- bail ! ( "only one memory is currently supported" ) ;
217
- }
218
- Ok ( memory. id ( ) )
219
- }
220
-
221
222
fn update_memory ( module : & mut Module , memory : MemoryId , max : u32 ) -> Result < MemoryId , Error > {
222
223
assert ! ( max % PAGE_SIZE == 0 ) ;
223
224
let memory = module. memories . get_mut ( memory) ;
@@ -313,37 +314,6 @@ fn allocate_static_data(
313
314
Ok ( address)
314
315
}
315
316
316
- fn find_stack_pointer ( module : & mut Module ) -> Result < Option < GlobalId > , Error > {
317
- let candidates = module
318
- . globals
319
- . iter ( )
320
- . filter ( |g| g. ty == ValType :: I32 )
321
- . filter ( |g| g. mutable )
322
- . filter ( |g| match g. kind {
323
- GlobalKind :: Local ( _) => true ,
324
- GlobalKind :: Import ( _) => false ,
325
- } )
326
- . collect :: < Vec < _ > > ( ) ;
327
-
328
- if candidates. len ( ) == 0 {
329
- return Ok ( None ) ;
330
- }
331
- if candidates. len ( ) > 2 {
332
- bail ! ( "too many mutable globals to infer the stack pointer" ) ;
333
- }
334
- if candidates. len ( ) == 1 {
335
- return Ok ( Some ( candidates[ 0 ] . id ( ) ) ) ;
336
- }
337
-
338
- // If we've got two mutable globals then we're in a pretty standard
339
- // situation for threaded code where one is the stack pointer and one is the
340
- // TLS base offset. We need to figure out which is which, and we basically
341
- // assume LLVM's current codegen where the first is the stack pointer.
342
- //
343
- // TODO: have an actual check here.
344
- Ok ( Some ( candidates[ 0 ] . id ( ) ) )
345
- }
346
-
347
317
enum InitMemory {
348
318
Segments ( Vec < PassiveSegment > ) ,
349
319
Call {
@@ -358,7 +328,7 @@ fn inject_start(
358
328
memory_init : InitMemory ,
359
329
globals : & Globals ,
360
330
addr : u32 ,
361
- stack_pointer : Option < GlobalId > ,
331
+ stack_pointer : GlobalId ,
362
332
stack_size : u32 ,
363
333
memory : MemoryId ,
364
334
) -> Result < ( ) , Error > {
@@ -393,30 +363,28 @@ fn inject_start(
393
363
// we give ourselves a stack via memory.grow and we update our stack
394
364
// pointer as the default stack pointer is surely wrong for us.
395
365
|body| {
396
- if let Some ( stack_pointer) = stack_pointer {
397
- // local0 = grow_memory(stack_size);
398
- body. i32_const ( ( stack_size / PAGE_SIZE ) as i32 )
399
- . memory_grow ( memory)
400
- . local_set ( local) ;
401
-
402
- // if local0 == -1 then trap
403
- body. block ( None , |body| {
404
- let target = body. id ( ) ;
405
- body. local_get ( local)
406
- . i32_const ( -1 )
407
- . binop ( BinaryOp :: I32Ne )
408
- . br_if ( target)
409
- . unreachable ( ) ;
410
- } ) ;
411
-
412
- // stack_pointer = local0 + stack_size
366
+ // local0 = grow_memory(stack_size);
367
+ body. i32_const ( ( stack_size / PAGE_SIZE ) as i32 )
368
+ . memory_grow ( memory)
369
+ . local_set ( local) ;
370
+
371
+ // if local0 == -1 then trap
372
+ body. block ( None , |body| {
373
+ let target = body. id ( ) ;
413
374
body. local_get ( local)
414
- . i32_const ( PAGE_SIZE as i32 )
415
- . binop ( BinaryOp :: I32Mul )
416
- . i32_const ( stack_size as i32 )
417
- . binop ( BinaryOp :: I32Add )
418
- . global_set ( stack_pointer) ;
419
- }
375
+ . i32_const ( -1 )
376
+ . binop ( BinaryOp :: I32Ne )
377
+ . br_if ( target)
378
+ . unreachable ( ) ;
379
+ } ) ;
380
+
381
+ // stack_pointer = local0 + stack_size
382
+ body. local_get ( local)
383
+ . i32_const ( PAGE_SIZE as i32 )
384
+ . binop ( BinaryOp :: I32Mul )
385
+ . i32_const ( stack_size as i32 )
386
+ . binop ( BinaryOp :: I32Add )
387
+ . global_set ( stack_pointer) ;
420
388
} ,
421
389
// If the thread ID is zero then we can skip the update of the stack
422
390
// pointer as we know our stack pointer is valid. We need to initialize
0 commit comments