12
12
#include <asm/processor.h>
13
13
#include <asm/ptrace.h>
14
14
#include <asm/csr.h>
15
+ #include <asm/entry-common.h>
15
16
16
17
#define INSN_MATCH_LB 0x3
17
18
#define INSN_MASK_LB 0x707f
151
152
#define PRECISION_S 0
152
153
#define PRECISION_D 1
153
154
154
- static inline u8 load_u8 (const u8 * addr )
155
+ #ifdef CONFIG_RISCV_M_MODE
156
+ static inline int load_u8 (struct pt_regs * regs , const u8 * addr , u8 * r_val )
155
157
{
156
158
u8 val ;
157
159
158
160
asm volatile ("lbu %0, %1" : "=&r" (val ) : "m" (* addr ));
161
+ * r_val = val ;
159
162
160
- return val ;
163
+ return 0 ;
161
164
}
162
165
163
- static inline void store_u8 (u8 * addr , u8 val )
166
+ static inline int store_u8 (struct pt_regs * regs , u8 * addr , u8 val )
164
167
{
165
168
asm volatile ("sb %0, %1\n" : : "r" (val ), "m" (* addr ));
169
+
170
+ return 0 ;
166
171
}
167
172
168
- static inline ulong get_insn (ulong mepc )
173
+ static inline int get_insn (struct pt_regs * regs , ulong mepc , ulong * r_insn )
169
174
{
170
175
register ulong __mepc asm ("a2" ) = mepc ;
171
176
ulong val , rvc_mask = 3 , tmp ;
@@ -194,9 +199,87 @@ static inline ulong get_insn(ulong mepc)
194
199
: [addr ] "r" (__mepc ), [rvc_mask ] "r" (rvc_mask ),
195
200
[xlen_minus_16 ] "i" (XLEN_MINUS_16 ));
196
201
197
- return val ;
202
+ * r_insn = val ;
203
+
204
+ return 0 ;
205
+ }
206
+ #else
207
+ static inline int load_u8 (struct pt_regs * regs , const u8 * addr , u8 * r_val )
208
+ {
209
+ if (user_mode (regs )) {
210
+ return __get_user (* r_val , addr );
211
+ } else {
212
+ * r_val = * addr ;
213
+ return 0 ;
214
+ }
198
215
}
199
216
217
+ static inline int store_u8 (struct pt_regs * regs , u8 * addr , u8 val )
218
+ {
219
+ if (user_mode (regs )) {
220
+ return __put_user (val , addr );
221
+ } else {
222
+ * addr = val ;
223
+ return 0 ;
224
+ }
225
+ }
226
+
227
+ #define __read_insn (regs , insn , insn_addr ) \
228
+ ({ \
229
+ int __ret; \
230
+ \
231
+ if (user_mode(regs)) { \
232
+ __ret = __get_user(insn, insn_addr); \
233
+ } else { \
234
+ insn = *insn_addr; \
235
+ __ret = 0; \
236
+ } \
237
+ \
238
+ __ret; \
239
+ })
240
+
241
+ static inline int get_insn (struct pt_regs * regs , ulong epc , ulong * r_insn )
242
+ {
243
+ ulong insn = 0 ;
244
+
245
+ if (epc & 0x2 ) {
246
+ ulong tmp = 0 ;
247
+ u16 __user * insn_addr = (u16 __user * )epc ;
248
+
249
+ if (__read_insn (regs , insn , insn_addr ))
250
+ return - EFAULT ;
251
+ /* __get_user() uses regular "lw" which sign extend the loaded
252
+ * value make sure to clear higher order bits in case we "or" it
253
+ * below with the upper 16 bits half.
254
+ */
255
+ insn &= GENMASK (15 , 0 );
256
+ if ((insn & __INSN_LENGTH_MASK ) != __INSN_LENGTH_32 ) {
257
+ * r_insn = insn ;
258
+ return 0 ;
259
+ }
260
+ insn_addr ++ ;
261
+ if (__read_insn (regs , tmp , insn_addr ))
262
+ return - EFAULT ;
263
+ * r_insn = (tmp << 16 ) | insn ;
264
+
265
+ return 0 ;
266
+ } else {
267
+ u32 __user * insn_addr = (u32 __user * )epc ;
268
+
269
+ if (__read_insn (regs , insn , insn_addr ))
270
+ return - EFAULT ;
271
+ if ((insn & __INSN_LENGTH_MASK ) == __INSN_LENGTH_32 ) {
272
+ * r_insn = insn ;
273
+ return 0 ;
274
+ }
275
+ insn &= GENMASK (15 , 0 );
276
+ * r_insn = insn ;
277
+
278
+ return 0 ;
279
+ }
280
+ }
281
+ #endif
282
+
200
283
union reg_data {
201
284
u8 data_bytes [8 ];
202
285
ulong data_ulong ;
@@ -207,10 +290,13 @@ int handle_misaligned_load(struct pt_regs *regs)
207
290
{
208
291
union reg_data val ;
209
292
unsigned long epc = regs -> epc ;
210
- unsigned long insn = get_insn ( epc ) ;
211
- unsigned long addr = csr_read ( mtval ) ;
293
+ unsigned long insn ;
294
+ unsigned long addr = regs -> badaddr ;
212
295
int i , fp = 0 , shift = 0 , len = 0 ;
213
296
297
+ if (get_insn (regs , epc , & insn ))
298
+ return -1 ;
299
+
214
300
regs -> epc = 0 ;
215
301
216
302
if ((insn & INSN_MASK_LW ) == INSN_MATCH_LW ) {
@@ -274,8 +360,10 @@ int handle_misaligned_load(struct pt_regs *regs)
274
360
}
275
361
276
362
val .data_u64 = 0 ;
277
- for (i = 0 ; i < len ; i ++ )
278
- val .data_bytes [i ] = load_u8 ((void * )(addr + i ));
363
+ for (i = 0 ; i < len ; i ++ ) {
364
+ if (load_u8 (regs , (void * )(addr + i ), & val .data_bytes [i ]))
365
+ return -1 ;
366
+ }
279
367
280
368
if (fp )
281
369
return -1 ;
@@ -290,10 +378,13 @@ int handle_misaligned_store(struct pt_regs *regs)
290
378
{
291
379
union reg_data val ;
292
380
unsigned long epc = regs -> epc ;
293
- unsigned long insn = get_insn ( epc ) ;
294
- unsigned long addr = csr_read ( mtval ) ;
381
+ unsigned long insn ;
382
+ unsigned long addr = regs -> badaddr ;
295
383
int i , len = 0 ;
296
384
385
+ if (get_insn (regs , epc , & insn ))
386
+ return -1 ;
387
+
297
388
regs -> epc = 0 ;
298
389
299
390
val .data_ulong = GET_RS2 (insn , regs );
@@ -327,8 +418,10 @@ int handle_misaligned_store(struct pt_regs *regs)
327
418
return -1 ;
328
419
}
329
420
330
- for (i = 0 ; i < len ; i ++ )
331
- store_u8 ((void * )(addr + i ), val .data_bytes [i ]);
421
+ for (i = 0 ; i < len ; i ++ ) {
422
+ if (store_u8 (regs , (void * )(addr + i ), val .data_bytes [i ]))
423
+ return -1 ;
424
+ }
332
425
333
426
regs -> epc = epc + INSN_LEN (insn );
334
427
0 commit comments