diff options
author | Emilio Cobos Álvarez <emilio@crisal.io> | 2020-01-29 00:40:39 +0100 |
---|---|---|
committer | Emilio Cobos Álvarez <emilio@crisal.io> | 2020-01-29 02:27:24 +0100 |
commit | 0153dfe230c63a2ece4df5d1c4ed8231d58d3ea6 (patch) | |
tree | ca7d9c4f89da0ae333befab5b6da9c3d0dd55ec8 | |
parent | e0a0400e946a3786bfde0984386e9c235188043e (diff) |
ir: Account for packedness when computing bitfield sizes.
Fixes #1716
-rw-r--r-- | src/ir/comp.rs | 84 | ||||
-rw-r--r-- | tests/expectations/tests/divide-by-zero-in-struct-layout.rs | 7 | ||||
-rw-r--r-- | tests/expectations/tests/packed-bitfield.rs | 179 | ||||
-rw-r--r-- | tests/headers/divide-by-zero-in-struct-layout.h | 4 | ||||
-rw-r--r-- | tests/headers/packed-bitfield.h | 5 |
5 files changed, 239 insertions, 40 deletions
diff --git a/src/ir/comp.rs b/src/ir/comp.rs index a82fd756..cc58e910 100644 --- a/src/ir/comp.rs +++ b/src/ir/comp.rs @@ -496,6 +496,7 @@ impl FieldMethods for RawField { fn raw_fields_to_fields_and_bitfield_units<I>( ctx: &BindgenContext, raw_fields: I, + packed: bool, ) -> Result<Vec<Field>, ()> where I: IntoIterator<Item = RawField>, @@ -533,6 +534,7 @@ where &mut bitfield_unit_count, &mut fields, bitfields, + packed, )?; } @@ -551,6 +553,7 @@ fn bitfields_to_allocation_units<E, I>( bitfield_unit_count: &mut usize, fields: &mut E, raw_bitfields: I, + packed: bool, ) -> Result<(), ()> where E: Extend<Field>, @@ -575,17 +578,22 @@ where unit_size_in_bits: usize, unit_align_in_bits: usize, bitfields: Vec<Bitfield>, + packed: bool, ) where E: Extend<Field>, { *bitfield_unit_count += 1; - let align = bytes_from_bits_pow2(unit_align_in_bits); + let align = if packed { + 1 + } else { + bytes_from_bits_pow2(unit_align_in_bits) + }; let size = align_to(unit_size_in_bits, align * 8) / 8; let layout = Layout::new(size, align); fields.extend(Some(Field::Bitfields(BitfieldUnit { nth: *bitfield_unit_count, - layout: layout, - bitfields: bitfields, + layout, + bitfields, }))); } @@ -607,34 +615,39 @@ where let bitfield_align = bitfield_layout.align; let mut offset = unit_size_in_bits; - if is_ms_struct { - if unit_size_in_bits != 0 && - (bitfield_width == 0 || - bitfield_width > unfilled_bits_in_unit) - { - // We've reached the end of this allocation unit, so flush it - // and its bitfields. - unit_size_in_bits = align_to(unit_size_in_bits, unit_align * 8); - flush_allocation_unit( - fields, - bitfield_unit_count, - unit_size_in_bits, - unit_align, - mem::replace(&mut bitfields_in_unit, vec![]), - ); + if !packed { + if is_ms_struct { + if unit_size_in_bits != 0 && + (bitfield_width == 0 || + bitfield_width > unfilled_bits_in_unit) + { + // We've reached the end of this allocation unit, so flush it + // and its bitfields. + unit_size_in_bits = + align_to(unit_size_in_bits, unit_align * 8); + flush_allocation_unit( + fields, + bitfield_unit_count, + unit_size_in_bits, + unit_align, + mem::replace(&mut bitfields_in_unit, vec![]), + packed, + ); - // Now we're working on a fresh bitfield allocation unit, so reset - // the current unit size and alignment. - offset = 0; - unit_align = 0; - } - } else { - if offset != 0 && - (bitfield_width == 0 || - (offset & (bitfield_align * 8 - 1)) + bitfield_width > - bitfield_size * 8) - { - offset = align_to(offset, bitfield_align * 8); + // Now we're working on a fresh bitfield allocation unit, so reset + // the current unit size and alignment. + offset = 0; + unit_align = 0; + } + } else { + if offset != 0 && + (bitfield_width == 0 || + (offset & (bitfield_align * 8 - 1)) + + bitfield_width > + bitfield_size * 8) + { + offset = align_to(offset, bitfield_align * 8); + } } } @@ -677,6 +690,7 @@ where unit_size_in_bits, unit_align, bitfields_in_unit, + packed, ); } @@ -717,7 +731,7 @@ impl CompFields { } } - fn compute_bitfield_units(&mut self, ctx: &BindgenContext) { + fn compute_bitfield_units(&mut self, ctx: &BindgenContext, packed: bool) { let raws = match *self { CompFields::BeforeComputingBitfieldUnits(ref mut raws) => { mem::replace(raws, vec![]) @@ -727,7 +741,7 @@ impl CompFields { } }; - let result = raw_fields_to_fields_and_bitfield_units(ctx, raws); + let result = raw_fields_to_fields_and_bitfield_units(ctx, raws, packed); match result { Ok(fields_and_units) => { @@ -1126,7 +1140,7 @@ impl CompInfo { /// Do we see a virtual function during parsing? /// Get the has_own_virtual_method boolean. pub fn has_own_virtual_method(&self) -> bool { - return self.has_own_virtual_method; + self.has_own_virtual_method } /// Did we see a destructor when parsing this type? @@ -1566,7 +1580,9 @@ impl CompInfo { /// Compute this compound structure's bitfield allocation units. pub fn compute_bitfield_units(&mut self, ctx: &BindgenContext) { - self.fields.compute_bitfield_units(ctx); + // TODO(emilio): If we could detect #pragma packed here we'd fix layout + // tests in divide-by-zero-in-struct-layout.rs + self.fields.compute_bitfield_units(ctx, self.packed_attr) } /// Assign for each anonymous field a generated name. diff --git a/tests/expectations/tests/divide-by-zero-in-struct-layout.rs b/tests/expectations/tests/divide-by-zero-in-struct-layout.rs index 77ccce3d..34a82ca0 100644 --- a/tests/expectations/tests/divide-by-zero-in-struct-layout.rs +++ b/tests/expectations/tests/divide-by-zero-in-struct-layout.rs @@ -114,15 +114,14 @@ impl WithBitfield { #[repr(C, packed)] #[derive(Debug, Default, Copy, Clone)] pub struct WithBitfieldAndAttrPacked { - pub _bitfield_1: __BindgenBitfieldUnit<[u8; 0usize], u8>, + pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize], u8>, pub a: ::std::os::raw::c_uint, - pub __bindgen_padding_0: u8, } impl WithBitfieldAndAttrPacked { #[inline] - pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 0usize], u8> { + pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 1usize], u8> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit< - [u8; 0usize], + [u8; 1usize], u8, > = Default::default(); __bindgen_bitfield_unit diff --git a/tests/expectations/tests/packed-bitfield.rs b/tests/expectations/tests/packed-bitfield.rs new file mode 100644 index 00000000..62608af8 --- /dev/null +++ b/tests/expectations/tests/packed-bitfield.rs @@ -0,0 +1,179 @@ +/* automatically generated by rust-bindgen */ + +#![allow( + dead_code, + non_snake_case, + non_camel_case_types, + non_upper_case_globals +)] + +#[repr(C)] +#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] +pub struct __BindgenBitfieldUnit<Storage, Align> { + storage: Storage, + align: [Align; 0], +} +impl<Storage, Align> __BindgenBitfieldUnit<Storage, Align> { + #[inline] + pub const fn new(storage: Storage) -> Self { + Self { storage, align: [] } + } +} +impl<Storage, Align> __BindgenBitfieldUnit<Storage, Align> +where + Storage: AsRef<[u8]> + AsMut<[u8]>, +{ + #[inline] + pub fn get_bit(&self, index: usize) -> bool { + debug_assert!(index / 8 < self.storage.as_ref().len()); + let byte_index = index / 8; + let byte = self.storage.as_ref()[byte_index]; + let bit_index = if cfg!(target_endian = "big") { + 7 - (index % 8) + } else { + index % 8 + }; + let mask = 1 << bit_index; + byte & mask == mask + } + #[inline] + pub fn set_bit(&mut self, index: usize, val: bool) { + debug_assert!(index / 8 < self.storage.as_ref().len()); + let byte_index = index / 8; + let byte = &mut self.storage.as_mut()[byte_index]; + let bit_index = if cfg!(target_endian = "big") { + 7 - (index % 8) + } else { + index % 8 + }; + let mask = 1 << bit_index; + if val { + *byte |= mask; + } else { + *byte &= !mask; + } + } + #[inline] + pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 { + debug_assert!(bit_width <= 64); + debug_assert!(bit_offset / 8 < self.storage.as_ref().len()); + debug_assert!( + (bit_offset + (bit_width as usize)) / 8 <= + self.storage.as_ref().len() + ); + let mut val = 0; + for i in 0..(bit_width as usize) { + if self.get_bit(i + bit_offset) { + let index = if cfg!(target_endian = "big") { + bit_width as usize - 1 - i + } else { + i + }; + val |= 1 << index; + } + } + val + } + #[inline] + pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) { + debug_assert!(bit_width <= 64); + debug_assert!(bit_offset / 8 < self.storage.as_ref().len()); + debug_assert!( + (bit_offset + (bit_width as usize)) / 8 <= + self.storage.as_ref().len() + ); + for i in 0..(bit_width as usize) { + let mask = 1 << i; + let val_bit_is_set = val & mask == mask; + let index = if cfg!(target_endian = "big") { + bit_width as usize - 1 - i + } else { + i + }; + self.set_bit(index + bit_offset, val_bit_is_set); + } + } +} +#[repr(C, packed)] +#[derive(Debug, Default, Copy, Clone)] +pub struct Date { + pub _bitfield_1: __BindgenBitfieldUnit<[u8; 3usize], u8>, +} +#[test] +fn bindgen_test_layout_Date() { + assert_eq!( + ::std::mem::size_of::<Date>(), + 3usize, + concat!("Size of: ", stringify!(Date)) + ); + assert_eq!( + ::std::mem::align_of::<Date>(), + 1usize, + concat!("Alignment of ", stringify!(Date)) + ); +} +impl Date { + #[inline] + pub fn day(&self) -> ::std::os::raw::c_uchar { + unsafe { + ::std::mem::transmute(self._bitfield_1.get(0usize, 5u8) as u8) + } + } + #[inline] + pub fn set_day(&mut self, val: ::std::os::raw::c_uchar) { + unsafe { + let val: u8 = ::std::mem::transmute(val); + self._bitfield_1.set(0usize, 5u8, val as u64) + } + } + #[inline] + pub fn month(&self) -> ::std::os::raw::c_uchar { + unsafe { + ::std::mem::transmute(self._bitfield_1.get(5usize, 4u8) as u8) + } + } + #[inline] + pub fn set_month(&mut self, val: ::std::os::raw::c_uchar) { + unsafe { + let val: u8 = ::std::mem::transmute(val); + self._bitfield_1.set(5usize, 4u8, val as u64) + } + } + #[inline] + pub fn year(&self) -> ::std::os::raw::c_short { + unsafe { + ::std::mem::transmute(self._bitfield_1.get(9usize, 15u8) as u16) + } + } + #[inline] + pub fn set_year(&mut self, val: ::std::os::raw::c_short) { + unsafe { + let val: u16 = ::std::mem::transmute(val); + self._bitfield_1.set(9usize, 15u8, val as u64) + } + } + #[inline] + pub fn new_bitfield_1( + day: ::std::os::raw::c_uchar, + month: ::std::os::raw::c_uchar, + year: ::std::os::raw::c_short, + ) -> __BindgenBitfieldUnit<[u8; 3usize], u8> { + let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit< + [u8; 3usize], + u8, + > = Default::default(); + __bindgen_bitfield_unit.set(0usize, 5u8, { + let day: u8 = unsafe { ::std::mem::transmute(day) }; + day as u64 + }); + __bindgen_bitfield_unit.set(5usize, 4u8, { + let month: u8 = unsafe { ::std::mem::transmute(month) }; + month as u64 + }); + __bindgen_bitfield_unit.set(9usize, 15u8, { + let year: u16 = unsafe { ::std::mem::transmute(year) }; + year as u64 + }); + __bindgen_bitfield_unit + } +} diff --git a/tests/headers/divide-by-zero-in-struct-layout.h b/tests/headers/divide-by-zero-in-struct-layout.h index 470250d3..7c500cb0 100644 --- a/tests/headers/divide-by-zero-in-struct-layout.h +++ b/tests/headers/divide-by-zero-in-struct-layout.h @@ -1,7 +1,7 @@ // bindgen-flags: --no-layout-tests // -// Unfortunately, we aren't translating the second and third structs correctly -// yet. But we definitely shouldn't divide-by-zero when we see it... +// Unfortunately, we aren't translating the third struct correctly yet. But we +// definitely shouldn't divide-by-zero when we see it... // // Once we fix #981 we should remove the `--no-layout-tests`. diff --git a/tests/headers/packed-bitfield.h b/tests/headers/packed-bitfield.h new file mode 100644 index 00000000..ef23bc6a --- /dev/null +++ b/tests/headers/packed-bitfield.h @@ -0,0 +1,5 @@ +struct Date { + unsigned char day: 5; + unsigned char month: 4; + signed short year: 15; +} __attribute__((packed)); |