|
1 | 1 | // FIXME: This needs an audit for correctness and completeness. |
2 | 2 |
|
3 | 3 | use crate::abi::call::{ |
4 | | - ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, CastTarget, FnAbi, Reg, RegKind, Uniform, |
| 4 | + ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, CastTarget, FnAbi, Reg, Uniform, |
5 | 5 | }; |
6 | | -use crate::abi::{self, HasDataLayout, Size, TyAbiInterface}; |
| 6 | +use crate::abi::{self, HasDataLayout, Scalar, Size, TyAbiInterface, TyAndLayout}; |
| 7 | + |
| 8 | +#[derive(Clone, Debug)] |
| 9 | +pub struct Sdata { |
| 10 | + pub prefix: [Option<Reg>; 8], |
| 11 | + pub prefix_index: usize, |
| 12 | + pub last_offset: Size, |
| 13 | + pub has_float: bool, |
| 14 | + pub arg_attribute: ArgAttribute, |
| 15 | +} |
7 | 16 |
|
8 | | -fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>) -> Option<Uniform> |
| 17 | +fn arg_scalar<C>(cx: &C, scalar: &Scalar, offset: Size, mut data: Sdata) -> Sdata |
9 | 18 | where |
10 | | - Ty: TyAbiInterface<'a, C> + Copy, |
11 | 19 | C: HasDataLayout, |
12 | 20 | { |
13 | | - arg.layout.homogeneous_aggregate(cx).ok().and_then(|ha| ha.unit()).and_then(|unit| { |
14 | | - // Ensure we have at most eight uniquely addressable members. |
15 | | - if arg.layout.size > unit.size.checked_mul(8, cx).unwrap() { |
16 | | - return None; |
| 21 | + let dl = cx.data_layout(); |
| 22 | + |
| 23 | + if scalar.value != abi::F32 && scalar.value != abi::F64 { |
| 24 | + return data; |
| 25 | + } |
| 26 | + |
| 27 | + data.has_float = true; |
| 28 | + |
| 29 | + if !data.last_offset.is_aligned(dl.f64_align.abi) && data.last_offset < offset { |
| 30 | + if data.prefix_index == data.prefix.len() { |
| 31 | + return data; |
17 | 32 | } |
| 33 | + data.prefix[data.prefix_index] = Some(Reg::i32()); |
| 34 | + data.prefix_index += 1; |
| 35 | + data.last_offset = data.last_offset + Reg::i32().size; |
| 36 | + } |
18 | 37 |
|
19 | | - let valid_unit = match unit.kind { |
20 | | - RegKind::Integer => false, |
21 | | - RegKind::Float => false, |
22 | | - RegKind::Vector => arg.layout.size.bits() == 128, |
23 | | - }; |
| 38 | + for _ in 0..((offset - data.last_offset).bits() / 64) |
| 39 | + .min((data.prefix.len() - data.prefix_index) as u64) |
| 40 | + { |
| 41 | + data.prefix[data.prefix_index] = Some(Reg::i64()); |
| 42 | + data.prefix_index += 1; |
| 43 | + data.last_offset = data.last_offset + Reg::i64().size; |
| 44 | + } |
24 | 45 |
|
25 | | - valid_unit.then_some(Uniform { unit, total: arg.layout.size }) |
26 | | - }) |
| 46 | + if data.last_offset < offset { |
| 47 | + if data.prefix_index == data.prefix.len() { |
| 48 | + return data; |
| 49 | + } |
| 50 | + data.prefix[data.prefix_index] = Some(Reg::i32()); |
| 51 | + data.prefix_index += 1; |
| 52 | + data.last_offset = data.last_offset + Reg::i32().size; |
| 53 | + } |
| 54 | + |
| 55 | + if data.prefix_index == data.prefix.len() { |
| 56 | + return data; |
| 57 | + } |
| 58 | + |
| 59 | + if scalar.value == abi::F32 { |
| 60 | + data.arg_attribute = ArgAttribute::InReg; |
| 61 | + data.prefix[data.prefix_index] = Some(Reg::f32()); |
| 62 | + data.last_offset = offset + Reg::f32().size; |
| 63 | + } else { |
| 64 | + data.prefix[data.prefix_index] = Some(Reg::f64()); |
| 65 | + data.last_offset = offset + Reg::f64().size; |
| 66 | + } |
| 67 | + data.prefix_index += 1; |
| 68 | + return data; |
27 | 69 | } |
28 | 70 |
|
29 | | -fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, in_registers_max: Size) |
| 71 | +fn arg_scalar_pair<C>( |
| 72 | + cx: &C, |
| 73 | + scalar1: &Scalar, |
| 74 | + scalar2: &Scalar, |
| 75 | + mut offset: Size, |
| 76 | + mut data: Sdata, |
| 77 | +) -> Sdata |
30 | 78 | where |
31 | | - Ty: TyAbiInterface<'a, C> + Copy, |
32 | 79 | C: HasDataLayout, |
33 | 80 | { |
34 | | - if !arg.layout.is_aggregate() { |
35 | | - arg.extend_integer_width_to(64); |
36 | | - return; |
| 81 | + data = arg_scalar(cx, &scalar1, offset, data); |
| 82 | + if scalar1.value == abi::F32 { |
| 83 | + offset += Reg::f32().size; |
| 84 | + } else if scalar2.value == abi::F64 { |
| 85 | + offset += Reg::f64().size; |
| 86 | + } else if let abi::Int(i, _signed) = scalar1.value { |
| 87 | + offset += i.size(); |
| 88 | + } else if scalar1.value == abi::Pointer { |
| 89 | + offset = offset + Reg::i64().size; |
37 | 90 | } |
38 | 91 |
|
39 | | - // This doesn't intentionally handle structures with floats which needs |
40 | | - // special care below. |
41 | | - if let Some(uniform) = is_homogeneous_aggregate(cx, arg) { |
42 | | - arg.cast_to(uniform); |
43 | | - return; |
| 92 | + if (offset.raw % 4) != 0 && (scalar2.value == abi::F32 || scalar2.value == abi::F64) { |
| 93 | + offset.raw += 4 - (offset.raw % 4); |
| 94 | + } |
| 95 | + data = arg_scalar(cx, &scalar2, offset, data); |
| 96 | + return data; |
| 97 | +} |
| 98 | + |
| 99 | +fn parse_structure<'a, Ty, C>( |
| 100 | + cx: &C, |
| 101 | + layout: TyAndLayout<'a, Ty>, |
| 102 | + mut data: Sdata, |
| 103 | + mut offset: Size, |
| 104 | +) -> Sdata |
| 105 | +where |
| 106 | + Ty: TyAbiInterface<'a, C> + Copy, |
| 107 | + C: HasDataLayout, |
| 108 | +{ |
| 109 | + if let abi::FieldsShape::Union(_) = layout.fields { |
| 110 | + return data; |
44 | 111 | } |
45 | 112 |
|
46 | | - if let abi::FieldsShape::Arbitrary { .. } = arg.layout.fields { |
47 | | - let dl = cx.data_layout(); |
48 | | - let size = arg.layout.size; |
49 | | - let mut prefix = [None; 8]; |
50 | | - let mut prefix_index = 0; |
51 | | - let mut last_offset = Size::ZERO; |
52 | | - let mut has_float = false; |
53 | | - let mut arg_attribute = ArgAttribute::default(); |
54 | | - |
55 | | - for i in 0..arg.layout.fields.count() { |
56 | | - let field = arg.layout.field(cx, i); |
57 | | - let offset = arg.layout.fields.offset(i); |
58 | | - |
59 | | - if let abi::Abi::Scalar(scalar) = &field.abi { |
60 | | - if scalar.value == abi::F32 || scalar.value == abi::F64 { |
61 | | - has_float = true; |
62 | | - |
63 | | - if !last_offset.is_aligned(dl.f64_align.abi) && last_offset < offset { |
64 | | - if prefix_index == prefix.len() { |
65 | | - break; |
66 | | - } |
67 | | - prefix[prefix_index] = Some(Reg::i32()); |
68 | | - prefix_index += 1; |
69 | | - last_offset = last_offset + Reg::i32().size; |
70 | | - } |
71 | | - |
72 | | - for _ in 0..((offset - last_offset).bits() / 64) |
73 | | - .min((prefix.len() - prefix_index) as u64) |
74 | | - { |
75 | | - prefix[prefix_index] = Some(Reg::i64()); |
76 | | - prefix_index += 1; |
77 | | - last_offset = last_offset + Reg::i64().size; |
78 | | - } |
79 | | - |
80 | | - if last_offset < offset { |
81 | | - if prefix_index == prefix.len() { |
82 | | - break; |
83 | | - } |
84 | | - prefix[prefix_index] = Some(Reg::i32()); |
85 | | - prefix_index += 1; |
86 | | - last_offset = last_offset + Reg::i32().size; |
87 | | - } |
88 | | - |
89 | | - if prefix_index == prefix.len() { |
90 | | - break; |
91 | | - } |
92 | | - |
93 | | - if scalar.value == abi::F32 { |
94 | | - arg_attribute = ArgAttribute::InReg; |
95 | | - prefix[prefix_index] = Some(Reg::f32()); |
96 | | - last_offset = offset + Reg::f32().size; |
97 | | - } else { |
98 | | - prefix[prefix_index] = Some(Reg::f64()); |
99 | | - last_offset = offset + Reg::f64().size; |
100 | | - } |
101 | | - prefix_index += 1; |
| 113 | + match layout.abi { |
| 114 | + abi::Abi::Scalar(scalar) => { |
| 115 | + data = arg_scalar(cx, &scalar, offset, data); |
| 116 | + } |
| 117 | + abi::Abi::Aggregate { .. } => { |
| 118 | + for i in 0..layout.fields.count().clone() { |
| 119 | + if offset < layout.fields.offset(i) { |
| 120 | + offset = layout.fields.offset(i); |
102 | 121 | } |
| 122 | + data = parse_structure(cx, layout.field(cx, i).clone(), data.clone(), offset); |
103 | 123 | } |
104 | 124 | } |
105 | | - |
106 | | - if has_float && arg.layout.size <= in_registers_max { |
107 | | - let mut rest_size = size - last_offset; |
108 | | - |
109 | | - if (rest_size.raw % 8) != 0 && prefix_index < prefix.len() { |
110 | | - prefix[prefix_index] = Some(Reg::i32()); |
111 | | - rest_size = rest_size - Reg::i32().size; |
| 125 | + _ => { |
| 126 | + if let abi::Abi::ScalarPair(scalar1, scalar2) = &layout.abi { |
| 127 | + data = arg_scalar_pair(cx, scalar1, scalar2, offset, data); |
112 | 128 | } |
113 | | - |
114 | | - arg.cast_to(CastTarget { |
115 | | - prefix, |
116 | | - rest: Uniform { unit: Reg::i64(), total: rest_size }, |
117 | | - attrs: ArgAttributes { |
118 | | - regular: arg_attribute, |
119 | | - arg_ext: ArgExtension::None, |
120 | | - pointee_size: Size::ZERO, |
121 | | - pointee_align: None, |
122 | | - }, |
123 | | - }); |
124 | | - return; |
125 | 129 | } |
126 | 130 | } |
127 | 131 |
|
| 132 | + return data; |
| 133 | +} |
| 134 | + |
| 135 | +fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, in_registers_max: Size) |
| 136 | +where |
| 137 | + Ty: TyAbiInterface<'a, C> + Copy, |
| 138 | + C: HasDataLayout, |
| 139 | +{ |
| 140 | + if !arg.layout.is_aggregate() { |
| 141 | + arg.extend_integer_width_to(64); |
| 142 | + return; |
| 143 | + } |
| 144 | + |
128 | 145 | let total = arg.layout.size; |
129 | 146 | if total > in_registers_max { |
130 | 147 | arg.make_indirect(); |
131 | 148 | return; |
132 | 149 | } |
133 | 150 |
|
| 151 | + match arg.layout.fields { |
| 152 | + abi::FieldsShape::Primitive => unreachable!(), |
| 153 | + abi::FieldsShape::Array { .. } => { |
| 154 | + // Arrays are passed indirectly |
| 155 | + arg.make_indirect(); |
| 156 | + return; |
| 157 | + } |
| 158 | + abi::FieldsShape::Union(_) => { |
| 159 | + // Unions and are always treated as a series of 64-bit integer chunks |
| 160 | + } |
| 161 | + abi::FieldsShape::Arbitrary { .. } => { |
| 162 | + // Stuctures with floating point numbers need special care. |
| 163 | + |
| 164 | + let mut data = parse_structure( |
| 165 | + cx, |
| 166 | + arg.layout.clone(), |
| 167 | + Sdata { |
| 168 | + prefix: [None; 8], |
| 169 | + prefix_index: 0, |
| 170 | + last_offset: Size::ZERO, |
| 171 | + has_float: false, |
| 172 | + arg_attribute: ArgAttribute::default(), |
| 173 | + }, |
| 174 | + Size { raw: 0 }, |
| 175 | + ); |
| 176 | + |
| 177 | + if data.has_float { |
| 178 | + // Structure { float, int, int } doesn't like to be handled like |
| 179 | + // { float, long int }. Other way around it doesn't mind. |
| 180 | + if data.last_offset < arg.layout.size |
| 181 | + && (data.last_offset.raw % 8) != 0 |
| 182 | + && data.prefix_index < data.prefix.len() |
| 183 | + { |
| 184 | + data.prefix[data.prefix_index] = Some(Reg::i32()); |
| 185 | + data.prefix_index += 1; |
| 186 | + data.last_offset += Reg::i32().size; |
| 187 | + } |
| 188 | + |
| 189 | + let mut rest_size = arg.layout.size - data.last_offset; |
| 190 | + if (rest_size.raw % 8) != 0 && data.prefix_index < data.prefix.len() { |
| 191 | + data.prefix[data.prefix_index] = Some(Reg::i32()); |
| 192 | + rest_size = rest_size - Reg::i32().size; |
| 193 | + } |
| 194 | + |
| 195 | + arg.cast_to(CastTarget { |
| 196 | + prefix: data.prefix, |
| 197 | + rest: Uniform { unit: Reg::i64(), total: rest_size }, |
| 198 | + attrs: ArgAttributes { |
| 199 | + regular: data.arg_attribute, |
| 200 | + arg_ext: ArgExtension::None, |
| 201 | + pointee_size: Size::ZERO, |
| 202 | + pointee_align: None, |
| 203 | + }, |
| 204 | + }); |
| 205 | + return; |
| 206 | + } |
| 207 | + } |
| 208 | + } |
| 209 | + |
134 | 210 | arg.cast_to(Uniform { unit: Reg::i64(), total }); |
135 | 211 | } |
136 | 212 |
|
|
0 commit comments