summaryrefslogtreecommitdiff
path: root/runtime/interpreter/mterp/armng/object.S
blob: c56ec05b7d273e5231ddf767bc95e08f11f6739d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
%def op_check_cast():
   // Fast-path which gets the class from thread-local cache.
   EXPORT_PC
   FETCH_FROM_THREAD_CACHE r1, 3f
   cmp     rMR, #0
   bne     4f
1:
   lsr     r2, rINST, #8               // r2<- A
   GET_VREG r0, r2                     // r0<- vA (object)
   cmp     r0, #0
   beq     2f
   bl      art_quick_check_instance_of
2:
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip
3:
   mov     r0, rSELF
   ldr     r1, [sp]
   mov     r2, rPC
   bl      nterp_get_class_or_allocate_object
   mov     r1, r0
   b       1b
4:
   bl      art_quick_read_barrier_mark_reg01
   b       1b

%def op_instance_of():
   /* instance-of vA, vB, class@CCCC */
   // Fast-path which gets the class from thread-local cache.
   EXPORT_PC
   FETCH_FROM_THREAD_CACHE r1, 3f
   cmp     rMR, #0
   bne     4f
1:
   lsr     r2, rINST, #12              // r2<- B
   GET_VREG r0, r2                     // r0<- vB (object)
   cmp     r0, #0
   beq     2f
   bl      artInstanceOfFromCode
2:
   ubfx    r1, rINST, #8, #4           // r1<- A
   SET_VREG r0, r1
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip
3:
   mov     r0, rSELF
   ldr     r1, [sp]
   mov     r2, rPC
   bl      nterp_get_class_or_allocate_object
   mov     r1, r0
   b       1b
4:
   bl      art_quick_read_barrier_mark_reg01
   b       1b

%def op_iget_boolean():
%  op_iget(load="ldrb", wide="0", is_object="0")

%def op_iget_byte():
%  op_iget(load="ldrsb", wide="0", is_object="0")

%def op_iget_char():
%  op_iget(load="ldrh", wide="0", is_object="0")

%def op_iget_short():
%  op_iget(load="ldrsh", wide="0", is_object="0")

%def op_iget(load="ldr", wide="0", is_object="0"):
%  slow_path = add_helper(lambda: op_iget_slow_path(load, wide, is_object))
   // Fast-path which gets the field from thread-local cache.
   FETCH_FROM_THREAD_CACHE r0, ${slow_path}
.L${opcode}_resume:
   lsr     r2, rINST, #12              // r2<- B
   GET_VREG r3, r2                     // r3<- object we're operating on
   ubfx    r2, rINST, #8, #4           // r2<- A
   cmp     r3, #0
   beq     common_errNullObject        // object was null
   .if $wide
   add     r3, r3, r0
   ldrd    r0, r1, [r3]
   CLEAR_SHADOW_PAIR r2, ip, lr
   VREG_INDEX_TO_ADDR r2, r2
   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
   .elseif $is_object
   $load   r0, [r3, r0]
   cmp     rMR, #0
   bne     .L${opcode}_read_barrier
.L${opcode}_resume_after_read_barrier:
   SET_VREG_OBJECT r0, r2              // fp[A] <- value
   .else
   $load   r0, [r3, r0]
   SET_VREG r0, r2                     // fp[A] <- value
   .endif
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip
   .if $is_object
.L${opcode}_read_barrier:
   bl      art_quick_read_barrier_mark_reg00
   b       .L${opcode}_resume_after_read_barrier
   .endif

%def op_iget_slow_path(load, wide, is_object):
   mov     r0, rSELF
   ldr     r1, [sp]
   mov     r2, rPC
   mov     r3, #0
   EXPORT_PC
   bl      nterp_get_instance_field_offset
   cmp     r0, #0
   bge     .L${opcode}_resume
   CLEAR_INSTANCE_VOLATILE_MARKER r0
   lsr     r2, rINST, #12              // r2<- B
   GET_VREG r3, r2                     // r3<- object we're operating on
   ubfx    r2, rINST, #8, #4           // r2<- A
   cmp     r3, #0
   beq     common_errNullObject            // object was null
   .if $wide
   add     ip, r3, r0
   ATOMIC_LOAD64 ip, r0, r1, r3, .L${opcode}_slow_path_atomic_load
   dmb     ish
   CLEAR_SHADOW_PAIR r2, ip, lr
   VREG_INDEX_TO_ADDR r2, r2
   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
   .else
   $load   r0, [r3, r0]
   dmb     ish
   .if $is_object
   cmp     rMR, #0
   bne     .L${opcode}_read_barrier
   SET_VREG_OBJECT r0, r2              // fp[A] <- value
   .else
   SET_VREG r0, r2                     // fp[A] <- value
   .endif
   .endif
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip

%def op_iget_wide():
%  op_iget(load="ldr", wide="1", is_object="0")

%def op_iget_object():
%  op_iget(load="ldr", wide="0", is_object="1")

%def op_iput_boolean():
%  op_iput(store="strb", wide="0", is_object="0")

%def op_iput_byte():
%  op_iput(store="strb", wide="0", is_object="0")

%def op_iput_char():
%  op_iput(store="strh", wide="0", is_object="0")

%def op_iput_short():
%  op_iput(store="strh", wide="0", is_object="0")

%def op_iput(store="str", wide="0", is_object="0"):
   // Share slow paths for boolean and byte (strb) and slow paths for char and short (strh).
   // It does not matter to which `.L${opcode}_resume` the slow path returns.
%  slow_path = "nterp_op_iput_helper_" + store + wide + is_object
%  add_helper(lambda: op_iput_slow_path(store, wide, is_object), slow_path)
   .if !$wide
   ubfx    r4, rINST, #8, #4           // r4<- A
   GET_VREG r4, r4                     // r4 <- v[A]
   .endif
   // Fast-path which gets the field from thread-local cache.
   FETCH_FROM_THREAD_CACHE r0, ${slow_path}
.L${opcode}_resume:
   lsr     r1, rINST, #12              // r1<- B
   GET_VREG r1, r1                     // vB (object we're operating on)
   cmp     r1, #0
   beq     common_errNullObject
   .if $wide
   ubfx    r4, rINST, #8, #4           // r4<- A
   VREG_INDEX_TO_ADDR r4, r4
   GET_VREG_WIDE_BY_ADDR r2, r3, r4      // fp[A] <- value
   add     r1, r1, r0
   strd    r2, r3, [r1]
   .else
   $store  r4, [r1, r0]
   WRITE_BARRIER_IF_OBJECT $is_object, r4, r1, .L${opcode}_skip_write_barrier, r0
   .endif
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip

%def op_iput_slow_path(store, wide, is_object):
   mov     r0, rSELF
   ldr     r1, [sp]
   mov     r2, rPC
   .if $is_object
   mov     r3, r4
   .else
   mov     r3, #0
   .endif
   EXPORT_PC
   bl      nterp_get_instance_field_offset
   .if $is_object
   // Reload the value as it may have moved.
   ubfx    r4, rINST, #8, #4           // r4<- A
   GET_VREG r4, r4                     // r4 <- v[A]
   .endif
   cmp     r0, #0
   bge     .L${opcode}_resume
   CLEAR_INSTANCE_VOLATILE_MARKER r0
   .if $wide
   lsr     r4, rINST, #12              // r4<- B
   ubfx    r1, rINST, #8, #4           // r1<- A
   GET_VREG r4, r4                     // vB (object we're operating on)
   cmp     r4, #0
   beq     common_errNullObject
   VREG_INDEX_TO_ADDR r1, r1
   GET_VREG_WIDE_BY_ADDR r2, r3, r1
   add     ip, r4, r0
   dmb     ish
   ATOMIC_STORE64 ip, r2, r3, r0, r1, .L${opcode}_slow_path_atomic_store
   dmb     ish
   .else
   lsr     r1, rINST, #12              // r4<- B
   GET_VREG r1, r1                     // vB (object we're operating on)
   cmp     r1, #0
   beq     common_errNullObject
   dmb     ish
   $store  r4, [r1, r0]
   dmb     ish
   WRITE_BARRIER_IF_OBJECT $is_object, r4, r1, .L${opcode}_slow_path_skip_write_barrier, r0
   .endif
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip

%def op_iput_wide():
%  op_iput(store="str", wide="1", is_object="0")

%def op_iput_object():
%  op_iput(store="str", wide="0", is_object="1")

%def op_sget_boolean():
%  op_sget(load="ldrb", wide="0", is_object="0")

%def op_sget_byte():
%  op_sget(load="ldrsb", wide="0", is_object="0")

%def op_sget_char():
%  op_sget(load="ldrh", wide="0", is_object="0")

%def op_sget_short():
%  op_sget(load="ldrsh", wide="0", is_object="0")

%def op_sget(load="ldr", wide="0", is_object="0"):
%  slow_path = add_helper(lambda: op_sget_slow_path(load, wide, is_object))
   // Fast-path which gets the field from thread-local cache.
   FETCH_FROM_THREAD_CACHE r0, ${slow_path}
.L${opcode}_resume:
   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
   lsr     r2, rINST, #8               // r2 <- A
   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
   cmp     rMR, #0
   bne     .L${opcode}_read_barrier
.L${opcode}_resume_after_read_barrier:
   .if $wide
   add     r0, r0, r1
   ldrd    r0, r1, [r0]
   CLEAR_SHADOW_PAIR r2, ip, lr
   VREG_INDEX_TO_ADDR r2, r2
   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
   .elseif $is_object
   $load   r0, [r0, r1]
   // No need to check the marking register, we know it's not set here.
.L${opcode}_after_reference_load:
   SET_VREG_OBJECT r0, r2              // fp[A] <- value
   .else
   $load   r0, [r0, r1]
   SET_VREG r0, r2                     // fp[A] <- value
   .endif
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip
.L${opcode}_read_barrier:
   bl      art_quick_read_barrier_mark_reg00
   .if $is_object
   ldr     r0, [r0, r1]
.L${opcode}_mark_after_load:
   // Here, we know the marking register is set.
   bl      art_quick_read_barrier_mark_reg00
   b       .L${opcode}_after_reference_load
   .else
   b       .L${opcode}_resume_after_read_barrier
   .endif

%def op_sget_slow_path(load="ldr", wide="0", is_object="0"):
   mov     r0, rSELF
   ldr     r1, [sp]
   mov     r2, rPC
   mov     r3, #0
   EXPORT_PC
   bl      nterp_get_static_field
   tst     r0, #1
   beq     .L${opcode}_resume
   CLEAR_STATIC_VOLATILE_MARKER r0
   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
   lsr     r2, rINST, #8               // r2 <- A
   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
   cmp     rMR, #0
   bne     .L${opcode}_slow_path_read_barrier
.L${opcode}_slow_path_resume_after_read_barrier:
   .if $wide
   add     ip, r0, r1
   ATOMIC_LOAD64 ip, r0, r1, r3, .L${opcode}_slow_path_atomic_load
   dmb     ish
   CLEAR_SHADOW_PAIR r2, ip, lr
   VREG_INDEX_TO_ADDR r2, r2
   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
   .else
   $load   r0, [r0, r1]
   dmb     ish
   .if $is_object
   cmp     rMR, #0
   bne     .L${opcode}_mark_after_load
   SET_VREG_OBJECT r0, r2              // fp[A] <- value
   .else
   SET_VREG r0, r2                     // fp[A] <- value
   .endif
   .endif
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip
.L${opcode}_slow_path_read_barrier:
   bl      art_quick_read_barrier_mark_reg00
   b       .L${opcode}_slow_path_resume_after_read_barrier

%def op_sget_wide():
%  op_sget(load="ldr", wide="1", is_object="0")

%def op_sget_object():
%  op_sget(load="ldr", wide="0", is_object="1")

%def op_sput_boolean():
%  op_sput(store="strb", wide="0", is_object="0")

%def op_sput_byte():
%  op_sput(store="strb", wide="0", is_object="0")

%def op_sput_char():
%  op_sput(store="strh", wide="0", is_object="0")

%def op_sput_short():
%  op_sput(store="strh", wide="0", is_object="0")

%def op_sput(store="str", wide="0", is_object="0"):
   // Share slow paths for boolean and byte (strb) and slow paths for char and short (strh).
   // It does not matter to which `.L${opcode}_resume` the slow path returns.
%  slow_path = "nterp_op_sput_helper_" + store + wide + is_object
%  add_helper(lambda: op_sput_slow_path(store, wide, is_object), slow_path)
   .if !$wide
   lsr     r4, rINST, #8               // r4 <- A
   GET_VREG r4, r4                     // r4 <- v[A]
   .endif
   // Fast-path which gets the field from thread-local cache.
   FETCH_FROM_THREAD_CACHE r0, ${slow_path}
.L${opcode}_resume:
   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
   cmp     rMR, #0
   bne     .L${opcode}_read_barrier
.L${opcode}_resume_after_read_barrier:
   .if $wide
   lsr     r2, rINST, #8               // r2 <- A
   VREG_INDEX_TO_ADDR r2, r2
   GET_VREG_WIDE_BY_ADDR r2, r3, r2    // fp[A] <- value
   add     r0, r0, r1
   strd    r2, r3, [r0]
   .else
   $store  r4, [r0, r1]
   WRITE_BARRIER_IF_OBJECT $is_object, r4, r0, .L${opcode}_skip_write_barrier, r1
   .endif
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip
.L${opcode}_read_barrier:
   bl      art_quick_read_barrier_mark_reg00
   b       .L${opcode}_resume_after_read_barrier

%def op_sput_slow_path(store, wide, is_object):
   mov     r0, rSELF
   ldr     r1, [sp]
   mov     r2, rPC
   .if $is_object
   mov     r3, r4
   .else
   mov     r3, #0
   .endif
   EXPORT_PC
   bl      nterp_get_static_field
   .if $is_object
   // Reload the value as it may have moved.
   lsr     r4, rINST, #8               // r4 <- A
   GET_VREG r4, r4                     // r4 <- v[A]
   .endif
   tst     r0, #1
   beq     .L${opcode}_resume
   CLEAR_STATIC_VOLATILE_MARKER r0
   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
   cmp     rMR, #0
   bne     .L${opcode}_slow_path_read_barrier
.L${opcode}_slow_path_resume_after_read_barrier:
   .if $wide
   lsr     r2, rINST, #8               // r2 <- A
   VREG_INDEX_TO_ADDR r2, r2
   GET_VREG_WIDE_BY_ADDR r2, r3, r2
   add     ip, r0, r1
   dmb     ish
   ATOMIC_STORE64 ip, r2, r3, r0, r1, .L${opcode}_slow_path_atomic_store
   dmb     ish
   .else
   dmb     ish
   $store  r4, [r0, r1]
   dmb     ish
   WRITE_BARRIER_IF_OBJECT $is_object, r4, r0, .L${opcode}_slow_path_skip_write_barrier, r1
   .endif
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip
.L${opcode}_slow_path_read_barrier:
   bl      art_quick_read_barrier_mark_reg00
   b       .L${opcode}_slow_path_resume_after_read_barrier

%def op_sput_wide():
%  op_sput(store="str", wide="1", is_object="0")

%def op_sput_object():
%  op_sput(store="str", wide="0", is_object="1")

%def op_new_instance():
   // The routine is too big to fit in a handler, so jump to it.
   EXPORT_PC
   // Fast-path which gets the class from thread-local cache.
   FETCH_FROM_THREAD_CACHE r0, 2f
   cmp     rMR, #0
   bne     3f
4:
   ldr     lr, [rSELF, #THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET]
   blx     lr
1:
   lsr     r1, rINST, #8                    // r1 <- A
   SET_VREG_OBJECT r0, r1               // fp[A] <- value
   FETCH_ADVANCE_INST 2
   GET_INST_OPCODE ip
   GOTO_OPCODE ip
2:
   mov     r0, rSELF
   ldr     r1, [sp]
   mov     r2, rPC
   bl      nterp_get_class_or_allocate_object
   b       1b
3:
   bl      art_quick_read_barrier_mark_reg00
   b       4b