@@ -163,7 +163,7 @@ pub fn represent_type(cx: &CrateContext, t: ty::t) -> Rc<Repr> {
163
163
fn represent_type_uncached ( cx : & CrateContext , t : ty:: t ) -> Repr {
164
164
match ty:: get ( t) . sty {
165
165
ty:: ty_tup( ref elems) => {
166
- return Univariant ( mk_struct ( cx, elems. as_slice ( ) , false ) , false )
166
+ return Univariant ( mk_struct ( cx, elems. as_slice ( ) , false , t ) , false )
167
167
}
168
168
ty:: ty_struct( def_id, ref substs) => {
169
169
let fields = ty:: lookup_struct_fields ( cx. tcx ( ) , def_id) ;
@@ -174,12 +174,12 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
174
174
let dtor = ty:: ty_dtor ( cx. tcx ( ) , def_id) . has_drop_flag ( ) ;
175
175
if dtor { ftys. push ( ty:: mk_bool ( ) ) ; }
176
176
177
- return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , packed) , dtor)
177
+ return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , packed, t ) , dtor)
178
178
}
179
179
ty:: ty_unboxed_closure( def_id, _) => {
180
180
let upvars = ty:: unboxed_closure_upvars ( cx. tcx ( ) , def_id) ;
181
181
let upvar_types = upvars. iter ( ) . map ( |u| u. ty ) . collect :: < Vec < _ > > ( ) ;
182
- return Univariant ( mk_struct ( cx, upvar_types. as_slice ( ) , false ) ,
182
+ return Univariant ( mk_struct ( cx, upvar_types. as_slice ( ) , false , t ) ,
183
183
false )
184
184
}
185
185
ty:: ty_enum( def_id, ref substs) => {
@@ -194,7 +194,8 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
194
194
// (Typechecking will reject discriminant-sizing attrs.)
195
195
assert_eq ! ( hint, attr:: ReprAny ) ;
196
196
let ftys = if dtor { vec ! ( ty:: mk_bool( ) ) } else { vec ! ( ) } ;
197
- return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , false ) , dtor) ;
197
+ return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , false , t) ,
198
+ dtor) ;
198
199
}
199
200
200
201
if !dtor && cases. iter ( ) . all ( |c| c. tys . len ( ) == 0 ) {
@@ -225,15 +226,17 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
225
226
assert_eq ! ( hint, attr:: ReprAny ) ;
226
227
let mut ftys = cases. get ( 0 ) . tys . clone ( ) ;
227
228
if dtor { ftys. push ( ty:: mk_bool ( ) ) ; }
228
- return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , false ) , dtor) ;
229
+ return Univariant ( mk_struct ( cx, ftys. as_slice ( ) , false , t) ,
230
+ dtor) ;
229
231
}
230
232
231
233
if !dtor && cases. len ( ) == 2 && hint == attr:: ReprAny {
232
234
// Nullable pointer optimization
233
235
let mut discr = 0 ;
234
236
while discr < 2 {
235
- if cases. get ( 1 - discr) . is_zerolen ( cx) {
236
- let st = mk_struct ( cx, cases. get ( discr) . tys . as_slice ( ) , false ) ;
237
+ if cases. get ( 1 - discr) . is_zerolen ( cx, t) {
238
+ let st = mk_struct ( cx, cases. get ( discr) . tys . as_slice ( ) ,
239
+ false , t) ;
237
240
match cases. get ( discr) . find_ptr ( ) {
238
241
Some ( ThinPointer ( _) ) if st. fields . len ( ) == 1 => {
239
242
return RawNullablePointer {
@@ -263,11 +266,15 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
263
266
slo : 0 , shi : ( cases. len ( ) - 1 ) as i64 } ;
264
267
let ity = range_to_inttype ( cx, hint, & bounds) ;
265
268
266
- return General ( ity , cases. iter ( ) . map ( |c| {
269
+ let fields : Vec < _ > = cases. iter ( ) . map ( |c| {
267
270
let mut ftys = vec ! ( ty_of_inttype( ity) ) . append ( c. tys . as_slice ( ) ) ;
268
271
if dtor { ftys. push ( ty:: mk_bool ( ) ) ; }
269
- mk_struct ( cx, ftys. as_slice ( ) , false )
270
- } ) . collect ( ) , dtor) ;
272
+ mk_struct ( cx, ftys. as_slice ( ) , false , t)
273
+ } ) . collect ( ) ;
274
+
275
+ ensure_enum_fits_in_address_space ( cx, ity, fields. as_slice ( ) , t) ;
276
+
277
+ General ( ity, fields, dtor)
271
278
}
272
279
_ => cx. sess ( ) . bug ( format ! ( "adt::represent_type called on non-ADT type: {}" ,
273
280
ty_to_string( cx. tcx( ) , t) ) . as_slice ( ) )
@@ -288,8 +295,8 @@ pub enum PointerField {
288
295
}
289
296
290
297
impl Case {
291
- fn is_zerolen ( & self , cx : & CrateContext ) -> bool {
292
- mk_struct ( cx, self . tys . as_slice ( ) , false ) . size == 0
298
+ fn is_zerolen ( & self , cx : & CrateContext , scapegoat : ty :: t ) -> bool {
299
+ mk_struct ( cx, self . tys . as_slice ( ) , false , scapegoat ) . size == 0
293
300
}
294
301
295
302
fn find_ptr ( & self ) -> Option < PointerField > {
@@ -344,29 +351,25 @@ fn get_cases(tcx: &ty::ctxt, def_id: ast::DefId, substs: &subst::Substs) -> Vec<
344
351
} ) . collect ( )
345
352
}
346
353
347
- fn mk_struct ( cx : & CrateContext , tys : & [ ty:: t ] , packed : bool ) -> Struct {
348
- if tys. iter ( ) . all ( |& ty| ty:: type_is_sized ( cx. tcx ( ) , ty) ) {
349
- let lltys = tys. iter ( ) . map ( |& ty| type_of:: sizing_type_of ( cx, ty) ) . collect :: < Vec < _ > > ( ) ;
350
- let llty_rec = Type :: struct_ ( cx, lltys. as_slice ( ) , packed) ;
351
- Struct {
352
- size : machine:: llsize_of_alloc ( cx, llty_rec) ,
353
- align : machine:: llalign_of_min ( cx, llty_rec) ,
354
- sized : true ,
355
- packed : packed,
356
- fields : Vec :: from_slice ( tys) ,
357
- }
354
+ fn mk_struct ( cx : & CrateContext , tys : & [ ty:: t ] , packed : bool , scapegoat : ty:: t ) -> Struct {
355
+ let sized = tys. iter ( ) . all ( |& ty| ty:: type_is_sized ( cx. tcx ( ) , ty) ) ;
356
+ let lltys : Vec < Type > = if sized {
357
+ tys. iter ( )
358
+ . map ( |& ty| type_of:: sizing_type_of ( cx, ty) ) . collect ( )
358
359
} else {
359
- // Ignore any dynamically sized fields.
360
- let lltys = tys. iter ( ) . filter ( |& ty| ty:: type_is_sized ( cx. tcx ( ) , * ty) )
361
- . map ( |& ty| type_of:: sizing_type_of ( cx, ty) ) . collect :: < Vec < _ > > ( ) ;
362
- let llty_rec = Type :: struct_ ( cx, lltys. as_slice ( ) , packed) ;
363
- Struct {
364
- size : machine:: llsize_of_alloc ( cx, llty_rec) ,
365
- align : machine:: llalign_of_min ( cx, llty_rec) ,
366
- sized : false ,
367
- packed : packed,
368
- fields : Vec :: from_slice ( tys) ,
369
- }
360
+ tys. iter ( ) . filter ( |& ty| ty:: type_is_sized ( cx. tcx ( ) , * ty) )
361
+ . map ( |& ty| type_of:: sizing_type_of ( cx, ty) ) . collect ( )
362
+ } ;
363
+
364
+ ensure_struct_fits_in_address_space ( cx, lltys. as_slice ( ) , packed, scapegoat) ;
365
+
366
+ let llty_rec = Type :: struct_ ( cx, lltys. as_slice ( ) , packed) ;
367
+ Struct {
368
+ size : machine:: llsize_of_alloc ( cx, llty_rec) ,
369
+ align : machine:: llalign_of_min ( cx, llty_rec) ,
370
+ sized : sized,
371
+ packed : packed,
372
+ fields : Vec :: from_slice ( tys) ,
370
373
}
371
374
}
372
375
@@ -461,6 +464,48 @@ pub fn ty_of_inttype(ity: IntType) -> ty::t {
461
464
}
462
465
}
463
466
467
+ // LLVM doesn't like types that don't fit in the address space
468
+ fn ensure_struct_fits_in_address_space ( ccx : & CrateContext ,
469
+ fields : & [ Type ] ,
470
+ packed : bool ,
471
+ scapegoat : ty:: t ) {
472
+ let mut offset = 0 ;
473
+ for & llty in fields. iter ( ) {
474
+ if !packed {
475
+ let type_align = machine:: llalign_of_min ( ccx, llty) ;
476
+ offset = roundup ( offset, type_align) ;
477
+ }
478
+ offset += machine:: llsize_of_alloc ( ccx, llty) ;
479
+
480
+ // We can get away with checking for overflow once per iteration,
481
+ // because field sizes are less than 1<<60.
482
+ if offset >= ccx. max_obj_size ( ) {
483
+ ccx. report_overbig_object ( scapegoat) ;
484
+ }
485
+ }
486
+ }
487
+
488
+ fn union_size_and_align ( sts : & [ Struct ] ) -> ( machine:: llsize , machine:: llalign ) {
489
+ let size = sts. iter ( ) . map ( |st| st. size ) . max ( ) . unwrap ( ) ;
490
+ let most_aligned = sts. iter ( ) . max_by ( |st| st. align ) . unwrap ( ) ;
491
+ ( size, most_aligned. align )
492
+ }
493
+
494
+ fn ensure_enum_fits_in_address_space ( ccx : & CrateContext ,
495
+ discr : IntType ,
496
+ fields : & [ Struct ] ,
497
+ scapegoat : ty:: t ) {
498
+ let discr_size = machine:: llsize_of_alloc ( ccx, ll_inttype ( ccx, discr) ) ;
499
+ let ( field_size, field_align) = union_size_and_align ( fields) ;
500
+
501
+ // This can't overflow because field_size, discr_size, field_align < 1<<60
502
+ let total_size = roundup ( discr_size, field_align) + field_size;
503
+
504
+ if total_size >= ccx. max_obj_size ( ) {
505
+ ccx. report_overbig_object ( scapegoat) ;
506
+ }
507
+ }
508
+
464
509
465
510
/**
466
511
* LLVM-level types are a little complicated.
@@ -523,13 +568,12 @@ fn generic_type_of(cx: &CrateContext,
523
568
// of the size.
524
569
//
525
570
// FIXME #10604: this breaks when vector types are present.
526
- let size = sts. iter ( ) . map ( |st| st. size ) . max ( ) . unwrap ( ) ;
527
- let most_aligned = sts. iter ( ) . max_by ( |st| st. align ) . unwrap ( ) ;
528
- let align = most_aligned. align ;
571
+ let ( size, align) = union_size_and_align ( sts. as_slice ( ) ) ;
572
+ let align_s = align as u64 ;
529
573
let discr_ty = ll_inttype ( cx, ity) ;
530
- let discr_size = machine:: llsize_of_alloc ( cx, discr_ty) as u64 ;
531
- let align_units = ( size + align - 1 ) / align - 1 ;
532
- let pad_ty = match align {
574
+ let discr_size = machine:: llsize_of_alloc ( cx, discr_ty) ;
575
+ let align_units = ( size + align_s - 1 ) / align_s - 1 ;
576
+ let pad_ty = match align_s {
533
577
1 => Type :: array ( & Type :: i8 ( cx) , align_units) ,
534
578
2 => Type :: array ( & Type :: i16 ( cx) , align_units) ,
535
579
4 => Type :: array ( & Type :: i32 ( cx) , align_units) ,
@@ -539,10 +583,10 @@ fn generic_type_of(cx: &CrateContext,
539
583
align_units) ,
540
584
_ => fail ! ( "unsupported enum alignment: {:?}" , align)
541
585
} ;
542
- assert_eq ! ( machine:: llalign_of_min( cx, pad_ty) as u64 , align) ;
543
- assert_eq ! ( align % discr_size, 0 ) ;
586
+ assert_eq ! ( machine:: llalign_of_min( cx, pad_ty) , align) ;
587
+ assert_eq ! ( align_s % discr_size, 0 ) ;
544
588
let fields = vec ! ( discr_ty,
545
- Type :: array( & discr_ty, align / discr_size - 1 ) ,
589
+ Type :: array( & discr_ty, align_s / discr_size - 1 ) ,
546
590
pad_ty) ;
547
591
match name {
548
592
None => Type :: struct_ ( cx, fields. as_slice ( ) , false ) ,
0 commit comments