summaryrefslogtreecommitdiff
path: root/runtime/interpreter/mterp/armng/control_flow.S
blob: ab05228c2c1f0ba8f3d96f16f25c4d13b91d8d20 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
%def bincmp(condition=""):
    /*
     * Generic two-operand compare-and-branch operation.  Provide a "condition"
     * fragment that specifies the comparison to perform.
     *
     * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
     */
    /* if-cmp vA, vB, +CCCC */
    mov     r1, rINST, lsr #12          @ r1<- B
    ubfx    r0, rINST, #8, #4           @ r0<- A
    GET_VREG r3, r1                     @ r3<- vB
    GET_VREG r0, r0                     @ r0<- vA
    FETCH_S rINST, 1                    @ rINST<- branch offset, in code units
    cmp     r0, r3                      @ compare (vA, vB)
    b${condition} 1f
    FETCH_ADVANCE_INST 2
    GET_INST_OPCODE ip                  // extract opcode from rINST
    GOTO_OPCODE ip                      // jump to next instruction
1:
    FETCH_S rINST, 1                    // rINST<- branch offset, in code units
    BRANCH

%def zcmp(condition=""):
    /*
     * Generic one-operand compare-and-branch operation.  Provide a "condition"
     * fragment that specifies the comparison to perform.
     *
     * for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
     */
    /* if-cmp vAA, +BBBB */
    mov     r0, rINST, lsr #8           @ r0<- AA
    GET_VREG r0, r0                     @ r0<- vAA
    FETCH_S rINST, 1                    @ rINST<- branch offset, in code units
    cmp     r0, #0                      // compare (vA, 0)
    b${condition} 1f
    FETCH_ADVANCE_INST 2
    GET_INST_OPCODE ip                  // extract opcode from rINST
    GOTO_OPCODE ip                      // jump to next instruction
1:
    FETCH_S rINST, 1                    // rINST<- branch offset, in code units
    BRANCH

%def op_goto():
/*
 * Unconditional branch, 8-bit offset.
 *
 * The branch distance is a signed code-unit offset, which we need to
 * double to get a byte offset.
 */
    /* goto +AA */
    sbfx    rINST, rINST, #8, #8           // rINST<- ssssssAA (sign-extended)
    BRANCH

%def op_goto_16():
/*
 * Unconditional branch, 16-bit offset.
 *
 * The branch distance is a signed code-unit offset, which we need to
 * double to get a byte offset.
 */
    /* goto/16 +AAAA */
    FETCH_S rINST, 1                    // wINST<- ssssAAAA (sign-extended)
    BRANCH

%def op_goto_32():
/*
 * Unconditional branch, 32-bit offset.
 *
 * The branch distance is a signed code-unit offset, which we need to
 * double to get a byte offset.
 *
 * Because we need the SF bit set, we'll use an adds
 * to convert from Dalvik offset to byte offset.
 */
    /* goto/32 +AAAAAAAA */
    FETCH r0, 1                         // r0<- aaaa (lo)
    FETCH r1, 2                         // r1<- AAAA (hi)
    orrs     rINST, r0, r1, lsl #16      // wINST<- AAAAaaaa
    BRANCH

%def op_if_eq():
%  bincmp(condition="eq")

%def op_if_eqz():
%  zcmp(condition="eq")

%def op_if_ge():
%  bincmp(condition="ge")

%def op_if_gez():
%  zcmp(condition="ge")

%def op_if_gt():
%  bincmp(condition="gt")

%def op_if_gtz():
%  zcmp(condition="gt")

%def op_if_le():
%  bincmp(condition="le")

%def op_if_lez():
%  zcmp(condition="le")

%def op_if_lt():
%  bincmp(condition="lt")

%def op_if_ltz():
%  zcmp(condition="lt")

%def op_if_ne():
%  bincmp(condition="ne")

%def op_if_nez():
%  zcmp(condition="ne")

%def op_packed_switch(func="NterpDoPackedSwitch"):
/*
 * Handle a packed-switch or sparse-switch instruction.  In both cases
 * we decode it and hand it off to a helper function.
 *
 * We don't really expect backward branches in a switch statement, but
 * they're perfectly legal, so we check for them here.
 *
 * for: packed-switch, sparse-switch
 */
    /* op vAA, +BBBB */
    FETCH r0, 1                         @ r0<- bbbb (lo)
    FETCH r1, 2                         @ r1<- BBBB (hi)
    mov     r3, rINST, lsr #8           @ r3<- AA
    orr     r0, r0, r1, lsl #16         @ r0<- BBBBbbbb
    GET_VREG r1, r3                     @ r1<- vAA
    add     r0, rPC, r0, lsl #1         @ r0<- PC + BBBBbbbb*2
    bl      $func                       @ r0<- code-unit branch offset
    mov     rINST, r0
    BRANCH

%def op_sparse_switch():
%  op_packed_switch(func="NterpDoSparseSwitch")

/*
 * Return a 32-bit value.
 */
%def op_return(is_object="0", is_void="0", is_wide="0"):
    .if $is_void
      // Thread fence for constructor
      dmb ishst
    .else
      mov     r2, rINST, lsr #8           @ r2<- AA
      .if $is_wide
        VREG_INDEX_TO_ADDR r2, r2
        GET_VREG_WIDE_BY_ADDR r0, r1, r2 // r0,r1 <- vAA
        // In case we're going back to compiled code, put the
        // result also in d0.
        vmov d0, r0, r1
      .else
        GET_VREG r0, r2                     // r0<- vAA
        .if !$is_object
        // In case we're going back to compiled code, put the
        // result also in s0.
        vmov s0, r0
        .endif
      .endif
    .endif
    .cfi_remember_state
    ldr ip, [rREFS, #-4]
    mov sp, ip
    .cfi_def_cfa sp, CALLEE_SAVES_SIZE
    RESTORE_ALL_CALLEE_SAVES lr_to_pc=1
    .cfi_restore_state

%def op_return_object():
%  op_return(is_object="1", is_void="0", is_wide="0")

%def op_return_void():
%  op_return(is_object="0", is_void="1", is_wide="0")

%def op_return_wide():
%  op_return(is_object="0", is_void="0", is_wide="1")

%def op_throw():
  EXPORT_PC
  mov      r2, rINST, lsr #8           @ r2<- AA
  GET_VREG r0, r2                      @ r0<- vAA (exception object)
  mov r1, rSELF
  bl art_quick_deliver_exception
  bkpt 0