1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_x86.S"
18#include "interpreter/cfi_asm_support.h"
19
20#include "arch/quick_alloc_entrypoints.S"
21
22// For x86, the CFA is esp+4, the address above the pushed return address on the stack.
23
24    /*
25     * Macro that sets up the callee save frame to conform with
26     * Runtime::CreateCalleeSaveMethod(kSaveAllCalleeSaves)
27     */
28MACRO2(SETUP_SAVE_ALL_CALLEE_SAVES_FRAME, got_reg, temp_reg)
29    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
30    PUSH esi
31    PUSH ebp
32    subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
33    CFI_ADJUST_CFA_OFFSET(12)
34    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
35    // Load Runtime::instance_ from GOT.
36    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
37    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
38    // Push save all callee-save method.
39    pushl RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET(REG_VAR(temp_reg))
40    CFI_ADJUST_CFA_OFFSET(4)
41    // Store esp as the top quick frame.
42    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
43    // Ugly compile-time check, but we only have the preprocessor.
44    // Last +4: implicit return address pushed on stack when caller made call.
45#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 3*4 + 16 + 4)
46#error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(X86) size not as expected."
47#endif
48END_MACRO
49
50    /*
51     * Macro that sets up the callee save frame to conform with
52     * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly)
53     */
54MACRO2(SETUP_SAVE_REFS_ONLY_FRAME, got_reg, temp_reg)
55    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
56    PUSH esi
57    PUSH ebp
58    subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
59    CFI_ADJUST_CFA_OFFSET(12)
60    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
61    // Load Runtime::instance_ from GOT.
62    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
63    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
64    // Push save all callee-save method.
65    pushl RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET(REG_VAR(temp_reg))
66    CFI_ADJUST_CFA_OFFSET(4)
67    // Store esp as the top quick frame.
68    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
69
70    // Ugly compile-time check, but we only have the preprocessor.
71    // Last +4: implicit return address pushed on stack when caller made call.
72#if (FRAME_SIZE_SAVE_REFS_ONLY != 3*4 + 16 + 4)
73#error "FRAME_SIZE_SAVE_REFS_ONLY(X86) size not as expected."
74#endif
75END_MACRO
76
77    /*
78     * Macro that sets up the callee save frame to conform with
79     * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly)
80     * and preserves the value of got_reg at entry.
81     */
82MACRO2(SETUP_SAVE_REFS_ONLY_FRAME_PRESERVE_GOT_REG, got_reg, temp_reg)
83    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
84    PUSH esi
85    PUSH ebp
86    PUSH RAW_VAR(got_reg)  // Save got_reg
87    subl MACRO_LITERAL(8), %esp  // Grow stack by 2 words.
88    CFI_ADJUST_CFA_OFFSET(8)
89
90    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
91    // Load Runtime::instance_ from GOT.
92    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
93    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
94    // Push save all callee-save method.
95    pushl RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET(REG_VAR(temp_reg))
96    CFI_ADJUST_CFA_OFFSET(4)
97    // Store esp as the top quick frame.
98    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
99    // Restore got_reg.
100    movl 12(%esp), REG_VAR(got_reg)
101    CFI_RESTORE(RAW_VAR(got_reg))
102
103    // Ugly compile-time check, but we only have the preprocessor.
104    // Last +4: implicit return address pushed on stack when caller made call.
105#if (FRAME_SIZE_SAVE_REFS_ONLY != 3*4 + 16 + 4)
106#error "FRAME_SIZE_SAVE_REFS_ONLY(X86) size not as expected."
107#endif
108END_MACRO
109
110MACRO0(RESTORE_SAVE_REFS_ONLY_FRAME)
111    addl MACRO_LITERAL(16), %esp  // Unwind stack up to saved values
112    CFI_ADJUST_CFA_OFFSET(-16)
113    POP ebp  // Restore callee saves (ebx is saved/restored by the upcall)
114    POP esi
115    POP edi
116END_MACRO
117
118    /*
119     * Macro that sets up the callee save frame to conform with
120     * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs)
121     */
122MACRO2(SETUP_SAVE_REFS_AND_ARGS_FRAME, got_reg, temp_reg)
123    SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY
124
125    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
126    // Load Runtime::instance_ from GOT.
127    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
128    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
129    // Push save all callee-save method.
130    pushl RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET(REG_VAR(temp_reg))
131    CFI_ADJUST_CFA_OFFSET(4)
132    // Store esp as the stop quick frame.
133    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
134END_MACRO
135
136    /*
137     * Macro that sets up the callee save frame to conform with
138     * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs) where the method is passed in EAX.
139     */
140MACRO0(SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_EAX)
141    SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY
142
143    pushl %eax  // Store the ArtMethod reference at the bottom of the stack.
144    CFI_ADJUST_CFA_OFFSET(4)
145    // Store esp as the stop quick frame.
146    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
147END_MACRO
148
149// Restore register and jump to routine
150// Inputs:  EDI contains pointer to code.
151// Notes: Need to pop EAX too (restores Method*)
152MACRO0(RESTORE_SAVE_REFS_AND_ARGS_FRAME_AND_JUMP)
153    POP eax  // Restore Method*
154
155    // Restore FPRs.
156    movsd 0(%esp), %xmm0
157    movsd 8(%esp), %xmm1
158    movsd 16(%esp), %xmm2
159    movsd 24(%esp), %xmm3
160
161    addl MACRO_LITERAL(32), %esp  // Remove FPRs.
162    CFI_ADJUST_CFA_OFFSET(-32)
163
164    POP ecx  // Restore args except eax
165    POP edx
166    POP ebx
167    POP ebp  // Restore callee saves
168    POP esi
169    xchgl 0(%esp),%edi // restore EDI and place code pointer as only value on stack
170    ret
171END_MACRO
172
173    /*
174     * Macro that sets up the callee save frame to conform with
175     * Runtime::CreateCalleeSaveMethod(kSaveEverything)
176     * when EDI and ESI are already saved.
177     */
178MACRO3(SETUP_SAVE_EVERYTHING_FRAME_EDI_ESI_SAVED, got_reg, temp_reg, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
179    // Save core registers from highest to lowest to agree with core spills bitmap.
180    // EDI and ESI, or at least placeholders for them, are already on the stack.
181    PUSH ebp
182    PUSH ebx
183    PUSH edx
184    PUSH ecx
185    PUSH eax
186    // Create space for FPR registers and stack alignment padding.
187    subl MACRO_LITERAL(12 + 8 * 8), %esp
188    CFI_ADJUST_CFA_OFFSET(12 + 8 * 8)
189    // Save FPRs.
190    movsd %xmm0, 12(%esp)
191    movsd %xmm1, 20(%esp)
192    movsd %xmm2, 28(%esp)
193    movsd %xmm3, 36(%esp)
194    movsd %xmm4, 44(%esp)
195    movsd %xmm5, 52(%esp)
196    movsd %xmm6, 60(%esp)
197    movsd %xmm7, 68(%esp)
198
199    SETUP_GOT_NOSAVE RAW_VAR(got_reg)
200    // Load Runtime::instance_ from GOT.
201    movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
202    movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
203    // Push save everything callee-save method.
204    pushl \runtime_method_offset(REG_VAR(temp_reg))
205    CFI_ADJUST_CFA_OFFSET(4)
206    // Store esp as the stop quick frame.
207    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
208
209    // Ugly compile-time check, but we only have the preprocessor.
210    // Last +4: implicit return address pushed on stack when caller made call.
211#if (FRAME_SIZE_SAVE_EVERYTHING != 7*4 + 8*8 + 12 + 4 + 4)
212#error "FRAME_SIZE_SAVE_EVERYTHING(X86) size not as expected."
213#endif
214END_MACRO
215
216    /*
217     * Macro that sets up the callee save frame to conform with
218     * Runtime::CreateCalleeSaveMethod(kSaveEverything)
219     * when EDI is already saved.
220     */
221MACRO3(SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED, got_reg, temp_reg, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
222    // Save core registers from highest to lowest to agree with core spills bitmap.
223    // EDI, or at least a placeholder for it, is already on the stack.
224    PUSH esi
225    SETUP_SAVE_EVERYTHING_FRAME_EDI_ESI_SAVED RAW_VAR(got_reg), RAW_VAR(temp_reg), \runtime_method_offset
226END_MACRO
227
228    /*
229     * Macro that sets up the callee save frame to conform with
230     * Runtime::CreateCalleeSaveMethod(kSaveEverything)
231     */
232MACRO3(SETUP_SAVE_EVERYTHING_FRAME, got_reg, temp_reg, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
233    PUSH edi
234    SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED RAW_VAR(got_reg), RAW_VAR(temp_reg), \runtime_method_offset
235END_MACRO
236
237MACRO0(RESTORE_SAVE_EVERYTHING_FRAME_FRPS)
238    // Restore FPRs. Method and padding is still on the stack.
239    movsd 16(%esp), %xmm0
240    movsd 24(%esp), %xmm1
241    movsd 32(%esp), %xmm2
242    movsd 40(%esp), %xmm3
243    movsd 48(%esp), %xmm4
244    movsd 56(%esp), %xmm5
245    movsd 64(%esp), %xmm6
246    movsd 72(%esp), %xmm7
247END_MACRO
248
249MACRO0(RESTORE_SAVE_EVERYTHING_FRAME_GPRS_EXCEPT_EAX)
250    // Restore core registers (except eax).
251    POP ecx
252    POP edx
253    POP ebx
254    POP ebp
255    POP esi
256    POP edi
257END_MACRO
258
259MACRO0(RESTORE_SAVE_EVERYTHING_FRAME)
260    RESTORE_SAVE_EVERYTHING_FRAME_FRPS
261
262    // Remove save everything callee save method, stack alignment padding and FPRs.
263    addl MACRO_LITERAL(16 + 8 * 8), %esp
264    CFI_ADJUST_CFA_OFFSET(-(16 + 8 * 8))
265
266    POP eax
267    RESTORE_SAVE_EVERYTHING_FRAME_GPRS_EXCEPT_EAX
268END_MACRO
269
270MACRO0(RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX)
271    RESTORE_SAVE_EVERYTHING_FRAME_FRPS
272
273    // Remove save everything callee save method, stack alignment padding and FPRs, skip EAX.
274    addl MACRO_LITERAL(16 + 8 * 8 + 4), %esp
275    CFI_ADJUST_CFA_OFFSET(-(16 + 8 * 8 + 4))
276
277    RESTORE_SAVE_EVERYTHING_FRAME_GPRS_EXCEPT_EAX
278END_MACRO
279
280    /*
281     * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
282     * exception is Thread::Current()->exception_.
283     */
284MACRO0(DELIVER_PENDING_EXCEPTION)
285    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save callee saves for throw
286    DELIVER_PENDING_EXCEPTION_FRAME_READY
287END_MACRO
288
289MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
290    DEFINE_FUNCTION VAR(c_name)
291    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save all registers as basis for long jump context
292    // Outgoing argument set up
293    subl MACRO_LITERAL(12), %esp               // alignment padding
294    CFI_ADJUST_CFA_OFFSET(12)
295    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
296    CFI_ADJUST_CFA_OFFSET(4)
297    call CALLVAR(cxx_name)                     // cxx_name(Thread*)
298    UNREACHABLE
299    END_FUNCTION VAR(c_name)
300END_MACRO
301
302MACRO2(NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING, c_name, cxx_name)
303    DEFINE_FUNCTION VAR(c_name)
304    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx       // save all registers as basis for long jump context
305    // Outgoing argument set up
306    subl MACRO_LITERAL(12), %esp               // alignment padding
307    CFI_ADJUST_CFA_OFFSET(12)
308    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
309    CFI_ADJUST_CFA_OFFSET(4)
310    call CALLVAR(cxx_name)                     // cxx_name(Thread*)
311    UNREACHABLE
312    END_FUNCTION VAR(c_name)
313END_MACRO
314
315MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
316    DEFINE_FUNCTION VAR(c_name)
317    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save all registers as basis for long jump context
318    // Outgoing argument set up
319    subl MACRO_LITERAL(8), %esp                // alignment padding
320    CFI_ADJUST_CFA_OFFSET(8)
321    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
322    CFI_ADJUST_CFA_OFFSET(4)
323    PUSH eax                                   // pass arg1
324    call CALLVAR(cxx_name)                     // cxx_name(arg1, Thread*)
325    UNREACHABLE
326    END_FUNCTION VAR(c_name)
327END_MACRO
328
329MACRO2(TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING, c_name, cxx_name)
330    DEFINE_FUNCTION VAR(c_name)
331    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx       // save all registers as basis for long jump context
332    // Outgoing argument set up
333    PUSH eax                                   // alignment padding
334    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
335    CFI_ADJUST_CFA_OFFSET(4)
336    PUSH ecx                                   // pass arg2
337    PUSH eax                                   // pass arg1
338    call CALLVAR(cxx_name)                     // cxx_name(arg1, arg2, Thread*)
339    UNREACHABLE
340    END_FUNCTION VAR(c_name)
341END_MACRO
342
343    /*
344     * Called by managed code to create and deliver a NullPointerException.
345     */
346NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
347
348    /*
349     * Call installed by a signal handler to create and deliver a NullPointerException.
350     */
351DEFINE_FUNCTION_CUSTOM_CFA art_quick_throw_null_pointer_exception_from_signal, 2 * __SIZEOF_POINTER__
352    // Fault address and return address were saved by the fault handler.
353    // Save all registers as basis for long jump context; EDI will replace fault address later.
354    SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED ebx, ebx
355    // Retrieve fault address and save EDI.
356    movl (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__)(%esp), %eax
357    movl %edi, (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__)(%esp)
358    CFI_REL_OFFSET(%edi, (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__))
359    // Outgoing argument set up
360    subl MACRO_LITERAL(8), %esp                           // alignment padding
361    CFI_ADJUST_CFA_OFFSET(8)
362    pushl %fs:THREAD_SELF_OFFSET                          // pass Thread::Current()
363    CFI_ADJUST_CFA_OFFSET(4)
364    PUSH eax                                              // pass arg1
365    call SYMBOL(artThrowNullPointerExceptionFromSignal)   // (addr, self)
366    UNREACHABLE
367END_FUNCTION art_quick_throw_null_pointer_exception
368
369    /*
370     * Called by managed code to create and deliver an ArithmeticException.
371     */
372NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_div_zero, artThrowDivZeroFromCode
373
374    /*
375     * Called by managed code to create and deliver a StackOverflowError.
376     */
377NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
378
379    /*
380     * Called by managed code, saves callee saves and then calls artThrowException
381     * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
382     */
383ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
384
385    /*
386     * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
387     * index, arg2 holds limit.
388     */
389TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
390
391    /*
392     * Called by managed code to create and deliver a StringIndexOutOfBoundsException
393     * as if thrown from a call to String.charAt(). Arg1 holds index, arg2 holds limit.
394     */
395TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_string_bounds, artThrowStringBoundsFromCode
396
397    /*
398     * All generated callsites for interface invokes and invocation slow paths will load arguments
399     * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
400     * the method_idx.  This wrapper will save arg1-arg3 and call the appropriate C helper.
401     * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1.
402     *
403     * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting
404     * of the target Method* in r0 and method->code_ in r1.
405     *
406     * If unsuccessful, the helper will return null/null and there will be a pending exception in the
407     * thread and we branch to another stub to deliver it.
408     *
409     * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
410     * pointing back to the original caller.
411     */
412MACRO1(INVOKE_TRAMPOLINE_BODY, cxx_name)
413    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx, ebx
414    movl %esp, %edx  // remember SP
415
416    // Outgoing argument set up
417    PUSH edx                      // pass SP
418    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
419    CFI_ADJUST_CFA_OFFSET(4)
420    PUSH ecx                      // pass arg2
421    PUSH eax                      // pass arg1
422    call CALLVAR(cxx_name)        // cxx_name(arg1, arg2, Thread*, SP)
423    movl %edx, %edi               // save code pointer in EDI
424    addl MACRO_LITERAL(20), %esp  // Pop arguments skip eax
425    CFI_ADJUST_CFA_OFFSET(-20)
426
427    // Restore FPRs.
428    movsd 0(%esp), %xmm0
429    movsd 8(%esp), %xmm1
430    movsd 16(%esp), %xmm2
431    movsd 24(%esp), %xmm3
432
433    // Remove space for FPR args.
434    addl MACRO_LITERAL(4 * 8), %esp
435    CFI_ADJUST_CFA_OFFSET(-4 * 8)
436
437    POP ecx  // Restore args except eax
438    POP edx
439    POP ebx
440    POP ebp  // Restore callee saves
441    POP esi
442    // Swap EDI callee save with code pointer.
443    xchgl %edi, (%esp)
444    testl %eax, %eax              // Branch forward if exception pending.
445    jz    1f
446    // Tail call to intended method.
447    ret
4481:
449    addl MACRO_LITERAL(4), %esp   // Pop code pointer off stack
450    CFI_ADJUST_CFA_OFFSET(-4)
451    DELIVER_PENDING_EXCEPTION
452END_MACRO
453MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name)
454    DEFINE_FUNCTION VAR(c_name)
455    INVOKE_TRAMPOLINE_BODY RAW_VAR(cxx_name)
456    END_FUNCTION VAR(c_name)
457END_MACRO
458
459INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
460
461INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
462INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
463INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
464INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
465
466    /*
467     * Helper for quick invocation stub to set up XMM registers.
468     * Increments shorty and arg_array and clobbers temp_char.
469     * Branches to finished if it encounters the end of the shorty.
470     */
471MACRO5(LOOP_OVER_SHORTY_LOADING_XMMS, xmm_reg, shorty, arg_array, temp_char, finished)
4721: // LOOP
473    movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
474    addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
475    cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
476    je VAR(finished)                               //   goto finished
477    cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
478    je 2f                                          //   goto FOUND_DOUBLE
479    cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
480    je 3f                                          //   goto FOUND_FLOAT
481    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
482    //  Handle extra space in arg array taken by a long.
483    cmpb MACRO_LITERAL(74), REG_VAR(temp_char)     // if (temp_char != 'J')
484    jne 1b                                         //   goto LOOP
485    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
486    jmp 1b                                         // goto LOOP
4872:  // FOUND_DOUBLE
488    movsd (REG_VAR(arg_array)), REG_VAR(xmm_reg)
489    addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
490    jmp 4f
4913:  // FOUND_FLOAT
492    movss (REG_VAR(arg_array)), REG_VAR(xmm_reg)
493    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
4944:
495END_MACRO
496
497    /*
498     * Helper for quick invocation stub to set up GPR registers.
499     * Increments shorty and arg_array, and returns the current short character in
500     * temp_char. Branches to finished if it encounters the end of the shorty.
501     */
502MACRO4(SKIP_OVER_FLOATS, shorty, arg_array, temp_char, finished)
5031: // LOOP:
504    movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
505    addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
506    cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
507    je VAR(finished)                               //   goto finished
508    cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
509    je 3f                                          //   goto SKIP_FLOAT
510    cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
511    je 4f                                          //   goto SKIP_DOUBLE
512    jmp 5f                                         // goto end
5133:  // SKIP_FLOAT
514    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
515    jmp 1b                                         // goto LOOP
5164:  // SKIP_DOUBLE
517    addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
518    jmp 1b                                         // goto LOOP
5195:
520END_MACRO
521
522  /*
523     * Quick invocation stub (non-static).
524     * On entry:
525     *   [sp] = return address
526     *   [sp + 4] = method pointer
527     *   [sp + 8] = argument array or null for no argument methods
528     *   [sp + 12] = size of argument array in bytes
529     *   [sp + 16] = (managed) thread pointer
530     *   [sp + 20] = JValue* result
531     *   [sp + 24] = shorty
532     */
533DEFINE_FUNCTION art_quick_invoke_stub
534    // Save the non-volatiles.
535    PUSH ebp                      // save ebp
536    PUSH ebx                      // save ebx
537    PUSH esi                      // save esi
538    PUSH edi                      // save edi
539    // Set up argument XMM registers.
540    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
541    addl LITERAL(1), %esi
542    mov 8+16(%esp), %edi          // EDI := arg_array + 4 ; ie skip this pointer.
543    addl LITERAL(4), %edi
544    // Clobbers ESI, EDI, EAX.
545    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished
546    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished
547    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished
548    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished
549    .balign 16
550.Lxmm_setup_finished:
551    mov %esp, %ebp                // copy value of stack pointer into base pointer
552    CFI_DEF_CFA_REGISTER(ebp)
553    mov 28(%ebp), %ebx            // get arg array size
554    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
555    addl LITERAL(36), %ebx
556    // align frame size to 16 bytes
557    andl LITERAL(0xFFFFFFF0), %ebx
558    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
559    subl %ebx, %esp               // reserve stack space for argument array
560
561    movl LITERAL(0), (%esp)       // store null for method*
562
563    // Copy arg array into stack.
564    movl 28(%ebp), %ecx           // ECX = size of args
565    movl 24(%ebp), %esi           // ESI = argument array
566    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
567    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
568
569    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
570    addl LITERAL(1), %esi
571    mov 24(%ebp), %edi            // EDI := arg_array
572    mov 0(%edi), %ecx             // ECX := this pointer
573    addl LITERAL(4), %edi         // EDI := arg_array + 4 ; ie skip this pointer.
574
575    // Enumerate the possible cases for loading GPRS.
576    // edx (and maybe ebx):
577    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
578    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
579    je .LfirstLong
580    // Must be an integer value.
581    movl (%edi), %edx
582    addl LITERAL(4), %edi         // arg_array++
583
584    // Now check ebx
585    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
586    // Must be first word of a long, or an integer. First word of long doesn't
587    // go into EBX, but can be loaded there anyways, as it is harmless.
588    movl (%edi), %ebx
589    jmp .Lgpr_setup_finished
590.LfirstLong:
591    movl (%edi), %edx
592    movl 4(%edi), %ebx
593    // Nothing left to load.
594.Lgpr_setup_finished:
595    mov 20(%ebp), %eax            // move method pointer into eax
596    call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
597    mov %ebp, %esp                // restore stack pointer
598    CFI_DEF_CFA_REGISTER(esp)
599    POP edi                       // pop edi
600    POP esi                       // pop esi
601    POP ebx                       // pop ebx
602    POP ebp                       // pop ebp
603    mov 20(%esp), %ecx            // get result pointer
604    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
605    mov %edx, 4(%ecx)             // store the other half of the result
606    mov 24(%esp), %edx            // get the shorty
607    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
608    je .Lreturn_double_quick
609    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
610    je .Lreturn_float_quick
611    ret
612.Lreturn_double_quick:
613    movsd %xmm0, (%ecx)           // store the floating point result
614    ret
615.Lreturn_float_quick:
616    movss %xmm0, (%ecx)           // store the floating point result
617    ret
618END_FUNCTION art_quick_invoke_stub
619
620  /*
621     * Quick invocation stub (static).
622     * On entry:
623     *   [sp] = return address
624     *   [sp + 4] = method pointer
625     *   [sp + 8] = argument array or null for no argument methods
626     *   [sp + 12] = size of argument array in bytes
627     *   [sp + 16] = (managed) thread pointer
628     *   [sp + 20] = JValue* result
629     *   [sp + 24] = shorty
630     */
631DEFINE_FUNCTION art_quick_invoke_static_stub
632    // Save the non-volatiles.
633    PUSH ebp                      // save ebp
634    PUSH ebx                      // save ebx
635    PUSH esi                      // save esi
636    PUSH edi                      // save edi
637    // Set up argument XMM registers.
638    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
639    addl LITERAL(1), %esi
640    mov 8+16(%esp), %edi          // EDI := arg_array
641    // Clobbers ESI, EDI, EAX.
642    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished2
643    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished2
644    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished2
645    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished2
646    .balign 16
647.Lxmm_setup_finished2:
648    mov %esp, %ebp                // copy value of stack pointer into base pointer
649    CFI_DEF_CFA_REGISTER(ebp)
650    mov 28(%ebp), %ebx            // get arg array size
651    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
652    addl LITERAL(36), %ebx
653    // align frame size to 16 bytes
654    andl LITERAL(0xFFFFFFF0), %ebx
655    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
656    subl %ebx, %esp               // reserve stack space for argument array
657
658    movl LITERAL(0), (%esp)       // store null for method*
659
660    // Copy arg array into stack.
661    movl 28(%ebp), %ecx           // ECX = size of args
662    movl 24(%ebp), %esi           // ESI = argument array
663    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
664    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
665
666    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
667    addl LITERAL(1), %esi
668    mov 24(%ebp), %edi            // EDI := arg_array
669
670    // Enumerate the possible cases for loading GPRS.
671    // ecx (and maybe edx)
672    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
673    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
674    je .LfirstLong2
675    // Must be an integer value.  Load into ECX.
676    movl (%edi), %ecx
677    addl LITERAL(4), %edi         // arg_array++
678
679    // Now check edx (and maybe ebx).
680    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
681    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
682    je .LSecondLong2
683    // Must be an integer.  Load into EDX.
684    movl (%edi), %edx
685    addl LITERAL(4), %edi         // arg_array++
686
687    // Is there anything for ebx?
688    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
689    // Must be first word of a long, or an integer. First word of long doesn't
690    // go into EBX, but can be loaded there anyways, as it is harmless.
691    movl (%edi), %ebx
692    jmp .Lgpr_setup_finished2
693.LSecondLong2:
694    // EDX:EBX is long.  That is all.
695    movl (%edi), %edx
696    movl 4(%edi), %ebx
697    jmp .Lgpr_setup_finished2
698.LfirstLong2:
699    // ECX:EDX is a long
700    movl (%edi), %ecx
701    movl 4(%edi), %edx
702    addl LITERAL(8), %edi         // arg_array += 2
703
704    // Anything for EBX?
705    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
706    // Must be first word of a long, or an integer. First word of long doesn't
707    // go into EBX, but can be loaded there anyways, as it is harmless.
708    movl (%edi), %ebx
709    jmp .Lgpr_setup_finished2
710    // Nothing left to load.
711.Lgpr_setup_finished2:
712    mov 20(%ebp), %eax            // move method pointer into eax
713    call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
714    mov %ebp, %esp                // restore stack pointer
715    CFI_DEF_CFA_REGISTER(esp)
716    POP edi                       // pop edi
717    POP esi                       // pop esi
718    POP ebx                       // pop ebx
719    POP ebp                       // pop ebp
720    mov 20(%esp), %ecx            // get result pointer
721    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
722    mov %edx, 4(%ecx)             // store the other half of the result
723    mov 24(%esp), %edx            // get the shorty
724    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
725    je .Lreturn_double_quick2
726    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
727    je .Lreturn_float_quick2
728    ret
729.Lreturn_double_quick2:
730    movsd %xmm0, (%ecx)           // store the floating point result
731    ret
732.Lreturn_float_quick2:
733    movss %xmm0, (%ecx)           // store the floating point result
734    ret
735END_FUNCTION art_quick_invoke_static_stub
736
737MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
738    DEFINE_FUNCTION VAR(c_name)
739    SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx         // save ref containing registers for GC
740    // Outgoing argument set up
741    subl MACRO_LITERAL(8), %esp                  // push padding
742    CFI_ADJUST_CFA_OFFSET(8)
743    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
744    CFI_ADJUST_CFA_OFFSET(4)
745    PUSH eax                                     // pass arg1
746    call CALLVAR(cxx_name)                       // cxx_name(arg1, Thread*)
747    addl MACRO_LITERAL(16), %esp                 // pop arguments
748    CFI_ADJUST_CFA_OFFSET(-16)
749    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
750    CALL_MACRO(return_macro)                     // return or deliver exception
751    END_FUNCTION VAR(c_name)
752END_MACRO
753
754MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
755    DEFINE_FUNCTION VAR(c_name)
756    SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx         // save ref containing registers for GC
757    // Outgoing argument set up
758    PUSH eax                                     // push padding
759    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
760    CFI_ADJUST_CFA_OFFSET(4)
761    PUSH ecx                                     // pass arg2
762    PUSH eax                                     // pass arg1
763    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, Thread*)
764    addl MACRO_LITERAL(16), %esp                 // pop arguments
765    CFI_ADJUST_CFA_OFFSET(-16)
766    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
767    CALL_MACRO(return_macro)                     // return or deliver exception
768    END_FUNCTION VAR(c_name)
769END_MACRO
770
771MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
772    DEFINE_FUNCTION VAR(c_name)
773    SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx         // save ref containing registers for GC
774    // Outgoing argument set up
775    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
776    CFI_ADJUST_CFA_OFFSET(4)
777    PUSH edx                                     // pass arg3
778    PUSH ecx                                     // pass arg2
779    PUSH eax                                     // pass arg1
780    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, Thread*)
781    addl MACRO_LITERAL(16), %esp                 // pop arguments
782    CFI_ADJUST_CFA_OFFSET(-16)
783    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
784    CALL_MACRO(return_macro)                     // return or deliver exception
785    END_FUNCTION VAR(c_name)
786END_MACRO
787
788MACRO3(FOUR_ARG_DOWNCALL, c_name, cxx_name, return_macro)
789    DEFINE_FUNCTION VAR(c_name)
790    SETUP_SAVE_REFS_ONLY_FRAME_PRESERVE_GOT_REG ebx, ebx  // save ref containing registers for GC
791
792    // Outgoing argument set up
793    subl MACRO_LITERAL(12), %esp                 // alignment padding
794    CFI_ADJUST_CFA_OFFSET(12)
795    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
796    CFI_ADJUST_CFA_OFFSET(4)
797    PUSH ebx                                     // pass arg4
798    PUSH edx                                     // pass arg3
799    PUSH ecx                                     // pass arg2
800    PUSH eax                                     // pass arg1
801    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, arg4, Thread*)
802    addl MACRO_LITERAL(32), %esp                 // pop arguments
803    CFI_ADJUST_CFA_OFFSET(-32)
804    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
805    CALL_MACRO(return_macro)                     // return or deliver exception
806    END_FUNCTION VAR(c_name)
807END_MACRO
808
809MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
810    DEFINE_FUNCTION VAR(c_name)
811    SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx               // save ref containing registers for GC
812    // Outgoing argument set up
813    subl MACRO_LITERAL(8), %esp                       // alignment padding
814    CFI_ADJUST_CFA_OFFSET(8)
815    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
816    CFI_ADJUST_CFA_OFFSET(4)
817    PUSH eax                                          // pass arg1
818    call CALLVAR(cxx_name)                            // cxx_name(arg1, Thread*)
819    addl MACRO_LITERAL(16), %esp                      // pop arguments
820    CFI_ADJUST_CFA_OFFSET(-16)
821    RESTORE_SAVE_REFS_ONLY_FRAME                      // restore frame up to return address
822    CALL_MACRO(return_macro)                          // return or deliver exception
823    END_FUNCTION VAR(c_name)
824END_MACRO
825
826MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
827    DEFINE_FUNCTION VAR(c_name)
828    SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx               // save ref containing registers for GC
829    // Outgoing argument set up
830    PUSH eax                                          // alignment padding
831    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
832    CFI_ADJUST_CFA_OFFSET(4)
833    PUSH ecx                                          // pass arg2
834    PUSH eax                                          // pass arg1
835    call CALLVAR(cxx_name)                            // cxx_name(arg1, arg2, referrer, Thread*)
836    addl MACRO_LITERAL(16), %esp                      // pop arguments
837    CFI_ADJUST_CFA_OFFSET(-16)
838    RESTORE_SAVE_REFS_ONLY_FRAME                      // restore frame up to return address
839    CALL_MACRO(return_macro)                          // return or deliver exception
840    END_FUNCTION VAR(c_name)
841END_MACRO
842
843MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
844    DEFINE_FUNCTION VAR(c_name)
845    SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx               // save ref containing registers for GC
846    // Outgoing argument set up
847    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
848    CFI_ADJUST_CFA_OFFSET(4)
849    PUSH edx                                          // pass arg3
850    PUSH ecx                                          // pass arg2
851    PUSH eax                                          // pass arg1
852    call CALLVAR(cxx_name)                            // cxx_name(arg1, arg2, arg3, Thread*)
853    addl LITERAL(16), %esp                            // pop arguments
854    CFI_ADJUST_CFA_OFFSET(-32)
855    RESTORE_SAVE_REFS_ONLY_FRAME                      // restore frame up to return address
856    CALL_MACRO(return_macro)                          // return or deliver exception
857    END_FUNCTION VAR(c_name)
858END_MACRO
859
860    /*
861     * Macro for resolution and initialization of indexed DEX file
862     * constants such as classes and strings.
863     */
864MACRO3(ONE_ARG_SAVE_EVERYTHING_DOWNCALL, c_name, cxx_name, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
865    DEFINE_FUNCTION VAR(c_name)
866    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx, \runtime_method_offset  // save ref containing registers for GC
867    // Outgoing argument set up
868    subl MACRO_LITERAL(8), %esp                       // push padding
869    CFI_ADJUST_CFA_OFFSET(8)
870    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
871    CFI_ADJUST_CFA_OFFSET(4)
872    PUSH eax                                          // pass the index of the constant as arg1
873    call CALLVAR(cxx_name)                            // cxx_name(arg1, Thread*)
874    addl MACRO_LITERAL(16), %esp                      // pop arguments
875    CFI_ADJUST_CFA_OFFSET(-16)
876    testl %eax, %eax                                  // If result is null, deliver the OOME.
877    jz 1f
878    CFI_REMEMBER_STATE
879    RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX            // restore frame up to return address
880    ret                                               // return
881    CFI_RESTORE_STATE_AND_DEF_CFA(esp, FRAME_SIZE_SAVE_EVERYTHING)
8821:
883    DELIVER_PENDING_EXCEPTION_FRAME_READY
884    END_FUNCTION VAR(c_name)
885END_MACRO
886
887MACRO2(ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT, c_name, cxx_name)
888    ONE_ARG_SAVE_EVERYTHING_DOWNCALL \c_name, \cxx_name, RUNTIME_SAVE_EVERYTHING_FOR_CLINIT_METHOD_OFFSET
889END_MACRO
890
891MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
892    testl %eax, %eax               // eax == 0 ?
893    jz  1f                         // if eax == 0 goto 1
894    ret                            // return
8951:                                 // deliver exception on current thread
896    DELIVER_PENDING_EXCEPTION
897END_MACRO
898
899MACRO0(RETURN_IF_EAX_ZERO)
900    testl %eax, %eax               // eax == 0 ?
901    jnz  1f                        // if eax != 0 goto 1
902    ret                            // return
9031:                                 // deliver exception on current thread
904    DELIVER_PENDING_EXCEPTION
905END_MACRO
906
907MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION)
908    cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ?
909    jne 1f                                            // if exception field != 0 goto 1
910    ret                                               // return
9111:                                                    // deliver exception on current thread
912    DELIVER_PENDING_EXCEPTION
913END_MACRO
914
915// Generate the allocation entrypoints for each allocator.
916GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_TLAB_ALLOCATORS
917
918// Comment out allocators that have x86 specific asm.
919// Region TLAB:
920// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB)
921// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab, RegionTLAB)
922GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
923GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_OBJECT(_region_tlab, RegionTLAB)
924// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab, RegionTLAB)
925// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_region_tlab, RegionTLAB)
926// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_region_tlab, RegionTLAB)
927// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_region_tlab, RegionTLAB)
928// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_region_tlab, RegionTLAB)
929GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_region_tlab, RegionTLAB)
930GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_region_tlab, RegionTLAB)
931GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_region_tlab, RegionTLAB)
932// Normal TLAB:
933// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB)
934// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab, TLAB)
935GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB)
936GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_OBJECT(_tlab, TLAB)
937// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab, TLAB)
938// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_tlab, TLAB)
939// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_tlab, TLAB)
940// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_tlab, TLAB)
941// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_tlab, TLAB)
942GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_tlab, TLAB)
943GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_tlab, TLAB)
944GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_tlab, TLAB)
945
946// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc).
947MACRO2(ART_QUICK_ALLOC_OBJECT_ROSALLOC, c_name, cxx_name)
948    DEFINE_FUNCTION VAR(c_name)
949    // Fast path rosalloc allocation.
950    // eax: type/return value
951    // ecx, ebx, edx: free
952    movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
953                                                        // Check if the thread local allocation
954                                                        // stack has room
955    movl THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx), %ecx
956    cmpl THREAD_LOCAL_ALLOC_STACK_END_OFFSET(%ebx), %ecx
957    jae  .Lslow_path\c_name
958
959    movl MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%eax), %ecx  // Load the object size (ecx)
960                                                        // Check if the size is for a thread
961                                                        // local allocation. Also does the
962                                                        // finalizable and initialization check.
963    cmpl LITERAL(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE), %ecx
964    ja   .Lslow_path\c_name
965    shrl LITERAL(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT), %ecx // Calculate the rosalloc bracket index
966                                                            // from object size.
967                                                        // Load thread local rosalloc run (ebx)
968                                                        // Subtract __SIZEOF_POINTER__ to subtract
969                                                        // one from edi as there is no 0 byte run
970                                                        // and the size is already aligned.
971    movl (THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)(%ebx, %ecx, __SIZEOF_POINTER__), %ebx
972                                                        // Load free_list head (edi),
973                                                        // this will be the return value.
974    movl (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%ebx), %ecx
975    jecxz   .Lslow_path\c_name
976                                                        // Point of no slow path. Won't go to
977                                                        // the slow path from here on.
978                                                        // Load the next pointer of the head
979                                                        // and update head of free list with
980                                                        // next pointer
981    movl ROSALLOC_SLOT_NEXT_OFFSET(%ecx), %edx
982    movl %edx, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%ebx)
983                                                        // Decrement size of free list by 1
984    decl (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)(%ebx)
985                                                        // Store the class pointer in the
986                                                        // header. This also overwrites the
987                                                        // next pointer. The offsets are
988                                                        // asserted to match.
989#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
990#error "Class pointer needs to overwrite next pointer."
991#endif
992    POISON_HEAP_REF eax
993    movl %eax, MIRROR_OBJECT_CLASS_OFFSET(%ecx)
994    movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
995                                                        // Push the new object onto the thread
996                                                        // local allocation stack and
997                                                        // increment the thread local
998                                                        // allocation stack top.
999    movl THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx), %eax
1000    movl %ecx, (%eax)
1001    addl LITERAL(COMPRESSED_REFERENCE_SIZE), %eax
1002    movl %eax, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx)
1003                                                        // No fence needed for x86.
1004    movl %ecx, %eax                                     // Move object to return register
1005    ret
1006.Lslow_path\c_name:
1007    SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx          // save ref containing registers for GC
1008    // Outgoing argument set up
1009    subl LITERAL(8), %esp                       // alignment padding
1010    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1011    CFI_ADJUST_CFA_OFFSET(4)
1012    PUSH eax
1013    call SYMBOL(artAllocObjectFromCodeResolvedRosAlloc)  // cxx_name(arg0, Thread*)
1014    addl LITERAL(16), %esp                       // pop arguments
1015    CFI_ADJUST_CFA_OFFSET(-16)
1016    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
1017    RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER      // return or deliver exception
1018    END_FUNCTION VAR(c_name)
1019END_MACRO
1020
1021ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_resolved_rosalloc, artAllocObjectFromCodeResolvedRosAlloc
1022ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_initialized_rosalloc, artAllocObjectFromCodeInitializedRosAlloc
1023
1024// The common fast path code for art_quick_alloc_object_resolved/initialized_tlab
1025// and art_quick_alloc_object_resolved/initialized_region_tlab.
1026//
1027// EAX: type/return_value
1028MACRO1(ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH, slowPathLabel)
1029    movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
1030    movl THREAD_LOCAL_END_OFFSET(%ebx), %edi            // Load thread_local_end.
1031    subl THREAD_LOCAL_POS_OFFSET(%ebx), %edi            // Compute the remaining buffer size.
1032    movl MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%eax), %ecx  // Load the object size.
1033    cmpl %edi, %ecx                                     // Check if it fits.
1034    ja   VAR(slowPathLabel)
1035    movl THREAD_LOCAL_POS_OFFSET(%ebx), %edx            // Load thread_local_pos
1036                                                        // as allocated object.
1037    addl %edx, %ecx                                     // Add the object size.
1038    movl %ecx, THREAD_LOCAL_POS_OFFSET(%ebx)            // Update thread_local_pos.
1039    incl THREAD_LOCAL_OBJECTS_OFFSET(%ebx)              // Increase thread_local_objects.
1040                                                        // Store the class pointer in the header.
1041                                                        // No fence needed for x86.
1042    POISON_HEAP_REF eax
1043    movl %eax, MIRROR_OBJECT_CLASS_OFFSET(%edx)
1044    movl %edx, %eax
1045    POP edi
1046    ret                                                 // Fast path succeeded.
1047END_MACRO
1048
1049// The common slow path code for art_quick_alloc_object_resolved/initialized_tlab
1050// and art_quick_alloc_object_resolved/initialized_region_tlab.
1051MACRO1(ALLOC_OBJECT_RESOLVED_TLAB_SLOW_PATH, cxx_name)
1052    POP edi
1053    SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx                 // save ref containing registers for GC
1054    // Outgoing argument set up
1055    subl LITERAL(8), %esp                               // alignment padding
1056    CFI_ADJUST_CFA_OFFSET(8)
1057    pushl %fs:THREAD_SELF_OFFSET                        // pass Thread::Current()
1058    CFI_ADJUST_CFA_OFFSET(4)
1059    PUSH eax
1060    call CALLVAR(cxx_name)                              // cxx_name(arg0, Thread*)
1061    addl LITERAL(16), %esp
1062    CFI_ADJUST_CFA_OFFSET(-16)
1063    RESTORE_SAVE_REFS_ONLY_FRAME                        // restore frame up to return address
1064    RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER             // return or deliver exception
1065END_MACRO
1066
1067MACRO2(ART_QUICK_ALLOC_OBJECT_TLAB, c_name, cxx_name)
1068    DEFINE_FUNCTION VAR(c_name)
1069    // Fast path tlab allocation.
1070    // EAX: type
1071    // EBX, ECX, EDX: free.
1072    PUSH edi
1073    ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH .Lslow_path\c_name
1074.Lslow_path\c_name:
1075    ALLOC_OBJECT_RESOLVED_TLAB_SLOW_PATH RAW_VAR(cxx_name)
1076    END_FUNCTION VAR(c_name)
1077END_MACRO
1078
1079ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_tlab, artAllocObjectFromCodeResolvedTLAB
1080ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_tlab, artAllocObjectFromCodeInitializedTLAB
1081ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_region_tlab, artAllocObjectFromCodeResolvedRegionTLAB
1082ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_region_tlab, artAllocObjectFromCodeInitializedRegionTLAB
1083
1084// The fast path code for art_quick_alloc_array_region_tlab.
1085// Inputs: EAX: the class, ECX: int32_t component_count, EDX: total_size
1086// Free temp: EBX
1087// Output: EAX: return value.
1088MACRO1(ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE, slowPathLabel)
1089    mov %fs:THREAD_SELF_OFFSET, %ebx                          // ebx = thread
1090    // Mask out the unaligned part to make sure we are 8 byte aligned.
1091    andl LITERAL(OBJECT_ALIGNMENT_MASK_TOGGLED), %edx
1092    movl THREAD_LOCAL_END_OFFSET(%ebx), %edi
1093    subl THREAD_LOCAL_POS_OFFSET(%ebx), %edi
1094    cmpl %edi, %edx                                           // Check if it fits.
1095    ja   RAW_VAR(slowPathLabel)
1096    movl THREAD_LOCAL_POS_OFFSET(%ebx), %edi
1097    addl %edi, %edx                                            // Add the object size.
1098    movl %edx, THREAD_LOCAL_POS_OFFSET(%ebx)                   // Update thread_local_pos_
1099    addl LITERAL(1), THREAD_LOCAL_OBJECTS_OFFSET(%ebx)         // Increase thread_local_objects.
1100                                                               // Store the class pointer in the
1101                                                               // header.
1102                                                               // No fence needed for x86.
1103    POISON_HEAP_REF eax
1104    movl %eax, MIRROR_OBJECT_CLASS_OFFSET(%edi)
1105    movl %ecx, MIRROR_ARRAY_LENGTH_OFFSET(%edi)
1106    movl %edi, %eax
1107    POP edi
1108    ret                                                        // Fast path succeeded.
1109END_MACRO
1110
1111MACRO1(COMPUTE_ARRAY_SIZE_UNKNOWN, slow_path)
1112    // We should never enter here. Code is provided for reference.
1113    int3
1114    // Possibly a large object, go slow.
1115    // Also does negative array size check.
1116    cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_WIDE_ARRAY_DATA_OFFSET) / 8), %ecx
1117    ja RAW_VAR(slow_path)
1118    PUSH ecx
1119    movl %ecx, %edx
1120    movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%eax), %ecx        // Load component type.
1121    UNPOISON_HEAP_REF ecx
1122    movl MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET(%ecx), %ecx // Load primitive type.
1123    shr MACRO_LITERAL(PRIMITIVE_TYPE_SIZE_SHIFT_SHIFT), %ecx        // Get component size shift.
1124    sall %cl, %edx                                              // Calculate array count shifted.
1125    // Add array header + alignment rounding.
1126    add MACRO_LITERAL(MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK), %edx
1127    // Add 4 extra bytes if we are doing a long array.
1128    add MACRO_LITERAL(1), %ecx
1129    and MACRO_LITERAL(4), %ecx
1130#if MIRROR_WIDE_ARRAY_DATA_OFFSET != MIRROR_INT_ARRAY_DATA_OFFSET + 4
1131#error Long array data offset must be 4 greater than int array data offset.
1132#endif
1133    addl %ecx, %edx
1134    POP ecx
1135END_MACRO
1136
1137MACRO1(COMPUTE_ARRAY_SIZE_8, slow_path)
1138    // EAX: mirror::Class* klass, ECX: int32_t component_count
1139    // Possibly a large object, go slow.
1140    // Also does negative array size check.
1141    cmpl LITERAL(MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET), %ecx
1142    ja RAW_VAR(slow_path)
1143    // Add array header + alignment rounding.
1144    leal (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)(%ecx), %edx
1145END_MACRO
1146
1147MACRO1(COMPUTE_ARRAY_SIZE_16, slow_path)
1148    // EAX: mirror::Class* klass, ECX: int32_t component_count
1149    // Possibly a large object, go slow.
1150    // Also does negative array size check.
1151    cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET) / 2), %ecx
1152    ja RAW_VAR(slow_path)
1153    // Add array header + alignment rounding.
1154    leal ((MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) / 2)(%ecx), %edx
1155    sall MACRO_LITERAL(1), %edx
1156END_MACRO
1157
1158MACRO1(COMPUTE_ARRAY_SIZE_32, slow_path)
1159    // EAX: mirror::Class* klass, ECX: int32_t component_count
1160    // Possibly a large object, go slow.
1161    // Also does negative array size check.
1162    cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET) / 4), %ecx
1163    ja RAW_VAR(slow_path)
1164    // Add array header + alignment rounding.
1165    leal ((MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) / 4)(%ecx), %edx
1166    sall MACRO_LITERAL(2), %edx
1167END_MACRO
1168
1169MACRO1(COMPUTE_ARRAY_SIZE_64, slow_path)
1170    // EAX: mirror::Class* klass, ECX: int32_t component_count
1171    // Possibly a large object, go slow.
1172    // Also does negative array size check.
1173    cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_WIDE_ARRAY_DATA_OFFSET) / 8), %ecx
1174    ja RAW_VAR(slow_path)
1175    // Add array header + alignment rounding.
1176    leal ((MIRROR_WIDE_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) / 8)(%ecx), %edx
1177    sall MACRO_LITERAL(3), %edx
1178END_MACRO
1179
1180MACRO3(GENERATE_ALLOC_ARRAY_TLAB, c_entrypoint, cxx_name, size_setup)
1181    DEFINE_FUNCTION VAR(c_entrypoint)
1182    // EAX: mirror::Class* klass, ECX: int32_t component_count
1183    PUSH edi
1184    CALL_MACRO(size_setup) .Lslow_path\c_entrypoint
1185    ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE .Lslow_path\c_entrypoint
1186.Lslow_path\c_entrypoint:
1187    POP edi
1188    SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx                        // save ref containing registers for GC
1189    // Outgoing argument set up
1190    PUSH eax                                                   // alignment padding
1191    pushl %fs:THREAD_SELF_OFFSET                               // pass Thread::Current()
1192    CFI_ADJUST_CFA_OFFSET(4)
1193    PUSH ecx
1194    PUSH eax
1195    call CALLVAR(cxx_name)                                     // cxx_name(arg0, arg1, Thread*)
1196    addl LITERAL(16), %esp                                     // pop arguments
1197    CFI_ADJUST_CFA_OFFSET(-16)
1198    RESTORE_SAVE_REFS_ONLY_FRAME                               // restore frame up to return address
1199    RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER                    // return or deliver exception
1200    END_FUNCTION VAR(c_entrypoint)
1201END_MACRO
1202
1203
1204GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN
1205GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_8
1206GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_16
1207GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_32
1208GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_64
1209
1210GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN
1211GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_8
1212GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_16
1213GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_32
1214GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_64
1215
1216ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
1217ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_resolve_type, artResolveTypeFromCode
1218ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_type_and_verify_access, artResolveTypeAndVerifyAccessFromCode
1219ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_method_handle, artResolveMethodHandleFromCode
1220ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_method_type, artResolveMethodTypeFromCode
1221ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
1222
1223TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
1224
1225DEFINE_FUNCTION art_quick_lock_object
1226    testl %eax, %eax                      // null check object/eax
1227    jz   .Lslow_lock
1228.Lretry_lock:
1229    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
1230    test LITERAL(LOCK_WORD_STATE_MASK_SHIFTED), %ecx  // test the 2 high bits.
1231    jne  .Lslow_lock                      // slow path if either of the two high bits are set.
1232    movl %ecx, %edx                       // save lock word (edx) to keep read barrier bits.
1233    andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %ecx  // zero the gc bits.
1234    test %ecx, %ecx
1235    jnz  .Lalready_thin                   // lock word contains a thin lock
1236    // unlocked case - edx: original lock word, eax: obj.
1237    movl %eax, %ecx                       // remember object in case of retry
1238    movl %edx, %eax                       // eax: lock word zero except for read barrier bits.
1239    movl %fs:THREAD_ID_OFFSET, %edx       // load thread id.
1240    or   %eax, %edx                       // edx: thread id with count of 0 + read barrier bits.
1241    lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
1242    jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
1243    ret
1244.Lalready_thin:  // edx: lock word (with high 2 bits zero and original rb bits), eax: obj.
1245    movl %fs:THREAD_ID_OFFSET, %ecx       // ecx := thread id
1246    cmpw %cx, %dx                         // do we hold the lock already?
1247    jne  .Lslow_lock
1248    movl %edx, %ecx                       // copy the lock word to check count overflow.
1249    andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %ecx  // zero the read barrier bits.
1250    addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // increment recursion count for overflow check.
1251    test LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED), %ecx  // overflowed if the first gc state bit is set.
1252    jne  .Lslow_lock                      // count overflowed so go slow
1253    movl %eax, %ecx                       // save obj to use eax for cmpxchg.
1254    movl %edx, %eax                       // copy the lock word as the old val for cmpxchg.
1255    addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx  // increment recursion count again for real.
1256    // update lockword, cmpxchg necessary for read barrier bits.
1257    lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
1258    jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
1259    ret
1260.Llock_cmpxchg_fail:
1261    movl  %ecx, %eax                      // restore eax
1262    jmp  .Lretry_lock
1263.Lslow_lock:
1264    SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx  // save ref containing registers for GC
1265    // Outgoing argument set up
1266    subl LITERAL(8), %esp                 // alignment padding
1267    CFI_ADJUST_CFA_OFFSET(8)
1268    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1269    CFI_ADJUST_CFA_OFFSET(4)
1270    PUSH eax                              // pass object
1271    call SYMBOL(artLockObjectFromCode)    // artLockObjectFromCode(object, Thread*)
1272    addl LITERAL(16), %esp                // pop arguments
1273    CFI_ADJUST_CFA_OFFSET(-16)
1274    RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
1275    RETURN_IF_EAX_ZERO
1276END_FUNCTION art_quick_lock_object
1277
1278DEFINE_FUNCTION art_quick_lock_object_no_inline
1279    SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx  // save ref containing registers for GC
1280    // Outgoing argument set up
1281    subl LITERAL(8), %esp                 // alignment padding
1282    CFI_ADJUST_CFA_OFFSET(8)
1283    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1284    CFI_ADJUST_CFA_OFFSET(4)
1285    PUSH eax                              // pass object
1286    call SYMBOL(artLockObjectFromCode)    // artLockObjectFromCode(object, Thread*)
1287    addl LITERAL(16), %esp                // pop arguments
1288    CFI_ADJUST_CFA_OFFSET(-16)
1289    RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
1290    RETURN_IF_EAX_ZERO
1291END_FUNCTION art_quick_lock_object_no_inline
1292
1293
1294DEFINE_FUNCTION art_quick_unlock_object
1295    testl %eax, %eax                      // null check object/eax
1296    jz   .Lslow_unlock
1297.Lretry_unlock:
1298    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
1299    movl %fs:THREAD_ID_OFFSET, %edx       // edx := thread id
1300    test LITERAL(LOCK_WORD_STATE_MASK_SHIFTED), %ecx
1301    jnz  .Lslow_unlock                    // lock word contains a monitor
1302    cmpw %cx, %dx                         // does the thread id match?
1303    jne  .Lslow_unlock
1304    movl %ecx, %edx                       // copy the lock word to detect new count of 0.
1305    andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %edx  // zero the gc bits.
1306    cmpl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx
1307    jae  .Lrecursive_thin_unlock
1308    // update lockword, cmpxchg necessary for read barrier bits.
1309    movl %eax, %edx                       // edx: obj
1310    movl %ecx, %eax                       // eax: old lock word.
1311    andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED), %ecx  // ecx: new lock word zero except original rb bits.
1312#ifndef USE_READ_BARRIER
1313    movl %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
1314#else
1315    lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
1316    jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
1317#endif
1318    ret
1319.Lrecursive_thin_unlock:  // ecx: original lock word, eax: obj
1320    // update lockword, cmpxchg necessary for read barrier bits.
1321    movl %eax, %edx                       // edx: obj
1322    movl %ecx, %eax                       // eax: old lock word.
1323    subl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // ecx: new lock word with decremented count.
1324#ifndef USE_READ_BARRIER
1325    mov  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
1326#else
1327    lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
1328    jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
1329#endif
1330    ret
1331.Lunlock_cmpxchg_fail:  // edx: obj
1332    movl %edx, %eax                       // restore eax
1333    jmp  .Lretry_unlock
1334.Lslow_unlock:
1335    SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx  // save ref containing registers for GC
1336    // Outgoing argument set up
1337    subl LITERAL(8), %esp                 // alignment padding
1338    CFI_ADJUST_CFA_OFFSET(8)
1339    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1340    CFI_ADJUST_CFA_OFFSET(4)
1341    PUSH eax                              // pass object
1342    call SYMBOL(artUnlockObjectFromCode)  // artUnlockObjectFromCode(object, Thread*)
1343    addl LITERAL(16), %esp                // pop arguments
1344    CFI_ADJUST_CFA_OFFSET(-16)
1345    RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
1346    RETURN_IF_EAX_ZERO
1347END_FUNCTION art_quick_unlock_object
1348
1349DEFINE_FUNCTION art_quick_unlock_object_no_inline
1350    SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx  // save ref containing registers for GC
1351    // Outgoing argument set up
1352    subl LITERAL(8), %esp                 // alignment padding
1353    CFI_ADJUST_CFA_OFFSET(8)
1354    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1355    CFI_ADJUST_CFA_OFFSET(4)
1356    PUSH eax                              // pass object
1357    call SYMBOL(artUnlockObjectFromCode)  // artUnlockObjectFromCode(object, Thread*)
1358    addl LITERAL(16), %esp                // pop arguments
1359    CFI_ADJUST_CFA_OFFSET(-16)
1360    RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
1361    RETURN_IF_EAX_ZERO
1362END_FUNCTION art_quick_unlock_object_no_inline
1363
1364DEFINE_FUNCTION art_quick_instance_of
1365    PUSH eax                              // alignment padding
1366    PUSH ecx                              // pass arg2 - obj->klass
1367    PUSH eax                              // pass arg1 - checked class
1368    call SYMBOL(artInstanceOfFromCode)    // (Object* obj, Class* ref_klass)
1369    addl LITERAL(12), %esp                // pop arguments
1370    CFI_ADJUST_CFA_OFFSET(-12)
1371    ret
1372END_FUNCTION art_quick_instance_of
1373
1374DEFINE_FUNCTION art_quick_check_instance_of
1375    // Type check using the bit string passes null as the target class. In that case just throw.
1376    testl %ecx, %ecx
1377    jz .Lthrow_class_cast_exception_for_bitstring_check
1378
1379    PUSH eax                              // alignment padding
1380    PUSH ecx                              // pass arg2 - checked class
1381    PUSH eax                              // pass arg1 - obj
1382    call SYMBOL(artInstanceOfFromCode)    // (Object* obj, Class* ref_klass)
1383    testl %eax, %eax
1384    jz .Lthrow_class_cast_exception       // jump forward if not assignable
1385    addl LITERAL(12), %esp                // pop arguments
1386    CFI_ADJUST_CFA_OFFSET(-12)
1387    ret
1388    CFI_ADJUST_CFA_OFFSET(12)             // Reset unwind info so following code unwinds.
1389
1390.Lthrow_class_cast_exception:
1391    POP eax                               // pop arguments
1392    POP ecx
1393    addl LITERAL(4), %esp
1394    CFI_ADJUST_CFA_OFFSET(-4)
1395
1396.Lthrow_class_cast_exception_for_bitstring_check:
1397    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save all registers as basis for long jump context
1398    // Outgoing argument set up
1399    PUSH eax                              // alignment padding
1400    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1401    CFI_ADJUST_CFA_OFFSET(4)
1402    PUSH ecx                              // pass arg2
1403    PUSH eax                              // pass arg1
1404    call SYMBOL(artThrowClassCastExceptionForObject)  // (Object* src, Class* dest, Thread*)
1405    UNREACHABLE
1406END_FUNCTION art_quick_check_instance_of
1407
1408// Restore reg's value if reg is not the same as exclude_reg, otherwise just adjust stack.
1409MACRO2(POP_REG_NE, reg, exclude_reg)
1410    .ifc RAW_VAR(reg), RAW_VAR(exclude_reg)
1411      addl MACRO_LITERAL(4), %esp
1412      CFI_ADJUST_CFA_OFFSET(-4)
1413    .else
1414      POP RAW_VAR(reg)
1415    .endif
1416END_MACRO
1417
1418    /*
1419     * Macro to insert read barrier, only used in art_quick_aput_obj.
1420     * obj_reg and dest_reg are registers, offset is a defined literal such as
1421     * MIRROR_OBJECT_CLASS_OFFSET.
1422     * pop_eax is a boolean flag, indicating if eax is popped after the call.
1423     * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path.
1424     */
1425MACRO4(READ_BARRIER, obj_reg, offset, dest_reg, pop_eax)
1426#ifdef USE_READ_BARRIER
1427    PUSH eax                        // save registers used in art_quick_aput_obj
1428    PUSH ebx
1429    PUSH edx
1430    PUSH ecx
1431    // Outgoing argument set up
1432    pushl MACRO_LITERAL((RAW_VAR(offset)))  // pass offset, double parentheses are necessary
1433    CFI_ADJUST_CFA_OFFSET(4)
1434    PUSH RAW_VAR(obj_reg)           // pass obj_reg
1435    PUSH eax                        // pass ref, just pass eax for now since parameter ref is unused
1436    call SYMBOL(artReadBarrierSlow) // artReadBarrierSlow(ref, obj_reg, offset)
1437    // No need to unpoison return value in eax, artReadBarrierSlow() would do the unpoisoning.
1438    .ifnc RAW_VAR(dest_reg), eax
1439      movl %eax, REG_VAR(dest_reg)  // save loaded ref in dest_reg
1440    .endif
1441    addl MACRO_LITERAL(12), %esp    // pop arguments
1442    CFI_ADJUST_CFA_OFFSET(-12)
1443    POP_REG_NE ecx, RAW_VAR(dest_reg) // Restore args except dest_reg
1444    POP_REG_NE edx, RAW_VAR(dest_reg)
1445    POP_REG_NE ebx, RAW_VAR(dest_reg)
1446    .ifc RAW_VAR(pop_eax), true
1447      POP_REG_NE eax, RAW_VAR(dest_reg)
1448    .endif
1449#else
1450    movl RAW_VAR(offset)(REG_VAR(obj_reg)), REG_VAR(dest_reg)
1451    UNPOISON_HEAP_REF RAW_VAR(dest_reg)
1452#endif  // USE_READ_BARRIER
1453END_MACRO
1454
1455DEFINE_FUNCTION art_quick_aput_obj
1456    test %edx, %edx              // store of null
1457    jz .Ldo_aput_null
1458    READ_BARRIER eax, MIRROR_OBJECT_CLASS_OFFSET, ebx, true
1459    READ_BARRIER ebx, MIRROR_CLASS_COMPONENT_TYPE_OFFSET, ebx, true
1460    // value's type == array's component type - trivial assignability
1461#if defined(USE_READ_BARRIER)
1462    READ_BARRIER edx, MIRROR_OBJECT_CLASS_OFFSET, eax, false
1463    cmpl %eax, %ebx
1464    POP eax                      // restore eax from the push in the beginning of READ_BARRIER macro
1465    // This asymmetric push/pop saves a push of eax and maintains stack alignment.
1466#elif defined(USE_HEAP_POISONING)
1467    PUSH eax                     // save eax
1468    movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
1469    UNPOISON_HEAP_REF eax
1470    cmpl %eax, %ebx
1471    POP eax                      // restore eax
1472#else
1473    cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ebx
1474#endif
1475    jne .Lcheck_assignability
1476.Ldo_aput:
1477    POISON_HEAP_REF edx
1478    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1479    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1480    shrl LITERAL(CARD_TABLE_CARD_SHIFT), %eax
1481    movb %dl, (%edx, %eax)
1482    ret
1483.Ldo_aput_null:
1484    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1485    ret
1486.Lcheck_assignability:
1487    PUSH eax                      // save arguments
1488    PUSH ecx
1489    PUSH edx
1490#if defined(USE_READ_BARRIER)
1491    subl LITERAL(4), %esp         // alignment padding
1492    CFI_ADJUST_CFA_OFFSET(4)
1493    READ_BARRIER edx, MIRROR_OBJECT_CLASS_OFFSET, eax, true
1494    subl LITERAL(4), %esp         // alignment padding
1495    CFI_ADJUST_CFA_OFFSET(4)
1496    PUSH eax                      // pass arg2 - type of the value to be stored
1497#elif defined(USE_HEAP_POISONING)
1498    subl LITERAL(8), %esp         // alignment padding
1499    CFI_ADJUST_CFA_OFFSET(8)
1500    movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
1501    UNPOISON_HEAP_REF eax
1502    PUSH eax                      // pass arg2 - type of the value to be stored
1503#else
1504    subl LITERAL(8), %esp         // alignment padding
1505    CFI_ADJUST_CFA_OFFSET(8)
1506    pushl MIRROR_OBJECT_CLASS_OFFSET(%edx)  // pass arg2 - type of the value to be stored
1507    CFI_ADJUST_CFA_OFFSET(4)
1508#endif
1509    PUSH ebx                      // pass arg1 - component type of the array
1510    call SYMBOL(artIsAssignableFromCode)  // (Class* a, Class* b)
1511    addl LITERAL(16), %esp        // pop arguments
1512    CFI_ADJUST_CFA_OFFSET(-16)
1513    testl %eax, %eax
1514    jz   .Lthrow_array_store_exception
1515    POP  edx
1516    POP  ecx
1517    POP  eax
1518    POISON_HEAP_REF edx
1519    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)  // do the aput
1520    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1521    shrl LITERAL(CARD_TABLE_CARD_SHIFT), %eax
1522    movb %dl, (%edx, %eax)
1523    ret
1524    CFI_ADJUST_CFA_OFFSET(12)     // 3 POP after the jz for unwinding.
1525.Lthrow_array_store_exception:
1526    POP  edx
1527    POP  ecx
1528    POP  eax
1529    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save all registers as basis for long jump context
1530    // Outgoing argument set up
1531    PUSH eax                      // alignment padding
1532    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1533    CFI_ADJUST_CFA_OFFSET(4)
1534    PUSH edx                      // pass arg2 - value
1535    PUSH eax                      // pass arg1 - array
1536    call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*)
1537    UNREACHABLE
1538END_FUNCTION art_quick_aput_obj
1539
1540DEFINE_FUNCTION art_quick_memcpy
1541    SETUP_GOT_NOSAVE ebx          // clobbers EBX
1542    PUSH edx                      // pass arg3
1543    PUSH ecx                      // pass arg2
1544    PUSH eax                      // pass arg1
1545    call PLT_SYMBOL(memcpy)       // (void*, const void*, size_t)
1546    addl LITERAL(12), %esp        // pop arguments
1547    CFI_ADJUST_CFA_OFFSET(-12)
1548    ret
1549END_FUNCTION art_quick_memcpy
1550
1551DEFINE_FUNCTION art_quick_test_suspend
1552    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx, RUNTIME_SAVE_EVERYTHING_FOR_SUSPEND_CHECK_METHOD_OFFSET  // save everything for GC
1553    // Outgoing argument set up
1554    subl MACRO_LITERAL(12), %esp                      // push padding
1555    CFI_ADJUST_CFA_OFFSET(12)
1556    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
1557    CFI_ADJUST_CFA_OFFSET(4)
1558    call SYMBOL(artTestSuspendFromCode)               // (Thread*)
1559    addl MACRO_LITERAL(16), %esp                      // pop arguments
1560    CFI_ADJUST_CFA_OFFSET(-16)
1561    RESTORE_SAVE_EVERYTHING_FRAME                     // restore frame up to return address
1562    ret                                               // return
1563END_FUNCTION art_quick_test_suspend
1564
1565DEFINE_FUNCTION art_quick_d2l
1566    subl LITERAL(12), %esp        // alignment padding, room for argument
1567    CFI_ADJUST_CFA_OFFSET(12)
1568    movsd %xmm0, 0(%esp)          // arg a
1569    call SYMBOL(art_d2l)          // (jdouble a)
1570    addl LITERAL(12), %esp        // pop arguments
1571    CFI_ADJUST_CFA_OFFSET(-12)
1572    ret
1573END_FUNCTION art_quick_d2l
1574
1575DEFINE_FUNCTION art_quick_f2l
1576    subl LITERAL(12), %esp        // alignment padding
1577    CFI_ADJUST_CFA_OFFSET(12)
1578    movss %xmm0, 0(%esp)          // arg a
1579    call SYMBOL(art_f2l)          // (jfloat a)
1580    addl LITERAL(12), %esp        // pop arguments
1581    CFI_ADJUST_CFA_OFFSET(-12)
1582    ret
1583END_FUNCTION art_quick_f2l
1584
1585DEFINE_FUNCTION art_quick_ldiv
1586    subl LITERAL(12), %esp        // alignment padding
1587    CFI_ADJUST_CFA_OFFSET(12)
1588    PUSH ebx                      // pass arg4 b.hi
1589    PUSH edx                      // pass arg3 b.lo
1590    PUSH ecx                      // pass arg2 a.hi
1591    PUSH eax                      // pass arg1 a.lo
1592    call SYMBOL(artLdiv)          // (jlong a, jlong b)
1593    addl LITERAL(28), %esp        // pop arguments
1594    CFI_ADJUST_CFA_OFFSET(-28)
1595    ret
1596END_FUNCTION art_quick_ldiv
1597
1598DEFINE_FUNCTION art_quick_lmod
1599    subl LITERAL(12), %esp        // alignment padding
1600    CFI_ADJUST_CFA_OFFSET(12)
1601    PUSH ebx                      // pass arg4 b.hi
1602    PUSH edx                      // pass arg3 b.lo
1603    PUSH ecx                      // pass arg2 a.hi
1604    PUSH eax                      // pass arg1 a.lo
1605    call SYMBOL(artLmod)          // (jlong a, jlong b)
1606    addl LITERAL(28), %esp        // pop arguments
1607    CFI_ADJUST_CFA_OFFSET(-28)
1608    ret
1609END_FUNCTION art_quick_lmod
1610
1611DEFINE_FUNCTION art_quick_lmul
1612    imul %eax, %ebx               // ebx = a.lo(eax) * b.hi(ebx)
1613    imul %edx, %ecx               // ecx = b.lo(edx) * a.hi(ecx)
1614    mul  %edx                     // edx:eax = a.lo(eax) * b.lo(edx)
1615    add  %ebx, %ecx
1616    add  %ecx, %edx               // edx += (a.lo * b.hi) + (b.lo * a.hi)
1617    ret
1618END_FUNCTION art_quick_lmul
1619
1620DEFINE_FUNCTION art_quick_lshl
1621    // ecx:eax << edx
1622    xchg %edx, %ecx
1623    shld %cl,%eax,%edx
1624    shl  %cl,%eax
1625    test LITERAL(32), %cl
1626    jz  1f
1627    mov %eax, %edx
1628    xor %eax, %eax
16291:
1630    ret
1631END_FUNCTION art_quick_lshl
1632
1633DEFINE_FUNCTION art_quick_lshr
1634    // ecx:eax >> edx
1635    xchg %edx, %ecx
1636    shrd %cl,%edx,%eax
1637    sar  %cl,%edx
1638    test LITERAL(32),%cl
1639    jz  1f
1640    mov %edx, %eax
1641    sar LITERAL(31), %edx
16421:
1643    ret
1644END_FUNCTION art_quick_lshr
1645
1646DEFINE_FUNCTION art_quick_lushr
1647    // ecx:eax >>> edx
1648    xchg %edx, %ecx
1649    shrd %cl,%edx,%eax
1650    shr  %cl,%edx
1651    test LITERAL(32),%cl
1652    jz  1f
1653    mov %edx, %eax
1654    xor %edx, %edx
16551:
1656    ret
1657END_FUNCTION art_quick_lushr
1658
1659// Note: Functions `art{Get,Set}<Kind>{Static,Instance}FromCompiledCode` are
1660// defined with a macro in runtime/entrypoints/quick/quick_field_entrypoints.cc.
1661
1662ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1663ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1664ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1665ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1666ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1667ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1668ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1669
1670TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1671TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1672TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1673TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1674TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1675TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1676TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1677
1678TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCompiledCode, RETURN_IF_EAX_ZERO
1679TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCompiledCode, RETURN_IF_EAX_ZERO
1680TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCompiledCode, RETURN_IF_EAX_ZERO
1681TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCompiledCode, RETURN_IF_EAX_ZERO
1682
1683THREE_ARG_REF_DOWNCALL art_quick_set64_static, artSet64StaticFromCompiledCode, RETURN_IF_EAX_ZERO
1684THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCompiledCode, RETURN_IF_EAX_ZERO
1685THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCompiledCode, RETURN_IF_EAX_ZERO
1686THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCompiledCode, RETURN_IF_EAX_ZERO
1687THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCompiledCode, RETURN_IF_EAX_ZERO
1688
1689// Call artSet64InstanceFromCode with 4 word size arguments.
1690DEFINE_FUNCTION art_quick_set64_instance
1691    movd %ebx, %xmm0
1692    SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx  // save ref containing registers for GC
1693    movd %xmm0, %ebx
1694    // Outgoing argument set up
1695    subl LITERAL(12), %esp         // alignment padding
1696    CFI_ADJUST_CFA_OFFSET(12)
1697    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1698    CFI_ADJUST_CFA_OFFSET(4)
1699    PUSH ebx                      // pass high half of new_val
1700    PUSH edx                      // pass low half of new_val
1701    PUSH ecx                      // pass object
1702    PUSH eax                      // pass field_idx
1703    call SYMBOL(artSet64InstanceFromCompiledCode)  // (field_idx, Object*, new_val, Thread*)
1704    addl LITERAL(32), %esp        // pop arguments
1705    CFI_ADJUST_CFA_OFFSET(-32)
1706    RESTORE_SAVE_REFS_ONLY_FRAME  // restore frame up to return address
1707    RETURN_IF_EAX_ZERO            // return or deliver exception
1708END_FUNCTION art_quick_set64_instance
1709
1710DEFINE_FUNCTION art_quick_proxy_invoke_handler
1711    SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_EAX
1712    PUSH esp                      // pass SP
1713    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1714    CFI_ADJUST_CFA_OFFSET(4)
1715    PUSH ecx                      // pass receiver
1716    PUSH eax                      // pass proxy method
1717    call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP)
1718    movd %eax, %xmm0              // place return value also into floating point return value
1719    movd %edx, %xmm1
1720    punpckldq %xmm1, %xmm0
1721    addl LITERAL(16 + FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_REFS_ONLY), %esp
1722    CFI_ADJUST_CFA_OFFSET(-(16 + FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_REFS_ONLY))
1723    RESTORE_SAVE_REFS_ONLY_FRAME
1724    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1725END_FUNCTION art_quick_proxy_invoke_handler
1726
1727    /*
1728     * Called to resolve an imt conflict.
1729     * eax is the conflict ArtMethod.
1730     * xmm7 is a hidden argument that holds the target interface method's dex method index.
1731     *
1732     * Note that this stub writes to eax.
1733     * Because of lack of free registers, it also saves and restores edi.
1734     */
1735DEFINE_FUNCTION art_quick_imt_conflict_trampoline
1736    PUSH EDI
1737    PUSH ESI
1738    PUSH EDX
1739    movl 16(%esp), %edi         // Load referrer.
1740    movd %xmm7, %esi            // Get target method index stored in xmm7, remember it in ESI.
1741    // If the method is obsolete, just go through the dex cache miss slow path.
1742    // The obsolete flag is set with suspended threads, so we do not need an acquire operation here.
1743    testl LITERAL(ACC_OBSOLETE_METHOD), ART_METHOD_ACCESS_FLAGS_OFFSET(%edi)
1744    jnz .Limt_conflict_trampoline_dex_cache_miss
1745    movl ART_METHOD_DECLARING_CLASS_OFFSET(%edi), %edi // Load declaring class (no read barrier).
1746    movl MIRROR_CLASS_DEX_CACHE_OFFSET(%edi), %edi     // Load the DexCache (without read barrier).
1747    UNPOISON_HEAP_REF edi
1748    movl MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET(%edi), %edi  // Load the resolved methods.
1749    pushl ART_METHOD_JNI_OFFSET_32(%eax)  // Push ImtConflictTable.
1750    CFI_ADJUST_CFA_OFFSET(4)
1751    movl %esi, %eax             // Copy the method index from ESI.
1752    andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax  // Calculate DexCache method slot index.
1753    leal 0(%edi, %eax, 2 * __SIZEOF_POINTER__), %edi  // Load DexCache method slot address.
1754    mov %ecx, %edx              // Make EDX:EAX == ECX:EBX so that LOCK CMPXCHG8B makes no changes.
1755    mov %ebx, %eax              // (The actual value does not matter.)
1756    lock cmpxchg8b (%edi)       // Relaxed atomic load EDX:EAX from the dex cache slot.
1757    popl %edi                   // Pop ImtConflictTable.
1758    CFI_ADJUST_CFA_OFFSET(-4)
1759    cmp %edx, %esi              // Compare method index to see if we had a DexCache method hit.
1760    jne .Limt_conflict_trampoline_dex_cache_miss
1761.Limt_table_iterate:
1762    cmpl %eax, 0(%edi)
1763    jne .Limt_table_next_entry
1764    // We successfully hit an entry in the table. Load the target method
1765    // and jump to it.
1766    movl __SIZEOF_POINTER__(%edi), %eax
1767    CFI_REMEMBER_STATE
1768    POP EDX
1769    POP ESI
1770    POP EDI
1771    jmp *ART_METHOD_QUICK_CODE_OFFSET_32(%eax)
1772    CFI_RESTORE_STATE_AND_DEF_CFA(esp, 16)
1773.Limt_table_next_entry:
1774    // If the entry is null, the interface method is not in the ImtConflictTable.
1775    cmpl LITERAL(0), 0(%edi)
1776    jz .Lconflict_trampoline
1777    // Iterate over the entries of the ImtConflictTable.
1778    addl LITERAL(2 * __SIZEOF_POINTER__), %edi
1779    jmp .Limt_table_iterate
1780.Lconflict_trampoline:
1781    // Call the runtime stub to populate the ImtConflictTable and jump to the
1782    // resolved method.
1783    CFI_REMEMBER_STATE
1784    POP EDX
1785    POP ESI
1786    POP EDI
1787    INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
1788    CFI_RESTORE_STATE_AND_DEF_CFA(esp, 16)
1789.Limt_conflict_trampoline_dex_cache_miss:
1790    // We're not creating a proper runtime method frame here,
1791    // artLookupResolvedMethod() is not allowed to walk the stack.
1792
1793    // Save core register args; EDX is already saved.
1794    PUSH ebx
1795    PUSH ecx
1796
1797    // Save FPR args.
1798    subl MACRO_LITERAL(32), %esp
1799    CFI_ADJUST_CFA_OFFSET(32)
1800    movsd %xmm0, 0(%esp)
1801    movsd %xmm1, 8(%esp)
1802    movsd %xmm2, 16(%esp)
1803    movsd %xmm3, 24(%esp)
1804
1805    pushl 32+8+16(%esp)         // Pass referrer.
1806    CFI_ADJUST_CFA_OFFSET(4)
1807    pushl %esi                  // Pass method index.
1808    CFI_ADJUST_CFA_OFFSET(4)
1809    call SYMBOL(artLookupResolvedMethod)  // (uint32_t method_index, ArtMethod* referrer)
1810    addl LITERAL(8), %esp       // Pop arguments.
1811    CFI_ADJUST_CFA_OFFSET(-8)
1812
1813    // Restore FPR args.
1814    movsd 0(%esp), %xmm0
1815    movsd 8(%esp), %xmm1
1816    movsd 16(%esp), %xmm2
1817    movsd 24(%esp), %xmm3
1818    addl MACRO_LITERAL(32), %esp
1819    CFI_ADJUST_CFA_OFFSET(-32)
1820
1821    // Restore core register args.
1822    POP ecx
1823    POP ebx
1824
1825    cmp LITERAL(0), %eax        // If the method wasn't resolved,
1826    je .Lconflict_trampoline    //   skip the lookup and go to artInvokeInterfaceTrampoline().
1827    jmp .Limt_table_iterate
1828END_FUNCTION art_quick_imt_conflict_trampoline
1829
1830DEFINE_FUNCTION art_quick_resolution_trampoline
1831    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx, ebx
1832    movl %esp, %edi
1833    PUSH EDI                      // pass SP. do not just PUSH ESP; that messes up unwinding
1834    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1835    CFI_ADJUST_CFA_OFFSET(4)
1836    PUSH ecx                      // pass receiver
1837    PUSH eax                      // pass method
1838    call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
1839    movl %eax, %edi               // remember code pointer in EDI
1840    addl LITERAL(16), %esp        // pop arguments
1841    CFI_ADJUST_CFA_OFFSET(-16)
1842    test %eax, %eax               // if code pointer is null goto deliver pending exception
1843    jz 1f
1844    RESTORE_SAVE_REFS_AND_ARGS_FRAME_AND_JUMP
18451:
1846    RESTORE_SAVE_REFS_AND_ARGS_FRAME
1847    DELIVER_PENDING_EXCEPTION
1848END_FUNCTION art_quick_resolution_trampoline
1849
1850DEFINE_FUNCTION art_quick_generic_jni_trampoline
1851    SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_EAX
1852    movl %esp, %ebp               // save SP at callee-save frame
1853    CFI_DEF_CFA_REGISTER(ebp)
1854    subl LITERAL(5120), %esp
1855    // prepare for artQuickGenericJniTrampoline call
1856    // (Thread*, managed_sp, reserved_area)
1857    //   (esp)    4(esp)        8(esp)  <= C calling convention
1858    //  fs:...      ebp           esp   <= where they are
1859
1860    movl %esp, %eax
1861    subl LITERAL(4), %esp         // Padding for 16B alignment.
1862    pushl %eax                    // Pass reserved area.
1863    pushl %ebp                    // Pass managed frame SP.
1864    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1865    call SYMBOL(artQuickGenericJniTrampoline)  // (Thread*, sp)
1866
1867    // The C call will have registered the complete save-frame on success.
1868    // The result of the call is:
1869    //     eax: pointer to native code, 0 on error.
1870    //     The bottom of the reserved area contains values for arg registers,
1871    //     hidden arg register and SP for out args for the call.
1872
1873    // Check for error (class init check or locking for synchronized native method can throw).
1874    test %eax, %eax
1875    jz .Lexception_in_native
1876
1877    // On x86 there are no registers passed, so no native call args to pop here.
1878
1879    // Save code pointer in EDX.
1880    movl %eax, %edx
1881    // Load hidden arg (EAX) for @CriticalNative.
1882    movl 16(%esp), %eax
1883    // Load SP for out args, releasing unneeded reserved area.
1884    movl 20(%esp), %esp
1885
1886    // Native call.
1887    call *%edx
1888
1889    // result sign extension is handled in C code
1890    // prepare for artQuickGenericJniEndTrampoline call
1891    // (Thread*, result, result_f)
1892    //  (esp)    4(esp)  12(esp)    <= C calling convention
1893    //  fs:...  eax:edx   fp0      <= where they are
1894
1895    subl LITERAL(20), %esp        // Padding & pass float result.
1896    fstpl (%esp)
1897    pushl %edx                    // Pass int result.
1898    pushl %eax
1899    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1900    call SYMBOL(artQuickGenericJniEndTrampoline)
1901
1902    // Pending exceptions possible.
1903    mov %fs:THREAD_EXCEPTION_OFFSET, %ebx
1904    testl %ebx, %ebx
1905    jnz .Lexception_in_native
1906
1907    // Tear down the alloca.
1908    movl %ebp, %esp
1909    CFI_DEF_CFA_REGISTER(esp)
1910
1911
1912    // Tear down the callee-save frame.
1913    // Remove space for FPR args and EAX
1914    addl LITERAL(4 + 4 * 8), %esp
1915    CFI_ADJUST_CFA_OFFSET(-(4 + 4 * 8))
1916
1917    POP ecx
1918    addl LITERAL(4), %esp         // Avoid edx, as it may be part of the result.
1919    CFI_ADJUST_CFA_OFFSET(-4)
1920    POP ebx
1921    POP ebp  // Restore callee saves
1922    POP esi
1923    POP edi
1924    // Quick expects the return value to be in xmm0.
1925    movd %eax, %xmm0
1926    movd %edx, %xmm1
1927    punpckldq %xmm1, %xmm0
1928    ret
1929.Lexception_in_native:
1930    pushl %fs:THREAD_TOP_QUICK_FRAME_OFFSET
1931    addl LITERAL(-1), (%esp)  // Remove the GenericJNI tag.
1932    movl (%esp), %esp
1933    // Do a call to push a new save-all frame required by the runtime.
1934    call .Lexception_call
1935.Lexception_call:
1936    DELIVER_PENDING_EXCEPTION
1937END_FUNCTION art_quick_generic_jni_trampoline
1938
1939DEFINE_FUNCTION art_quick_to_interpreter_bridge
1940    SETUP_SAVE_REFS_AND_ARGS_FRAME  ebx, ebx  // save frame
1941    mov %esp, %edx                // remember SP
1942    PUSH eax                      // alignment padding
1943    PUSH edx                      // pass SP
1944    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1945    CFI_ADJUST_CFA_OFFSET(4)
1946    PUSH eax                      // pass  method
1947    call SYMBOL(artQuickToInterpreterBridge)  // (method, Thread*, SP)
1948    addl LITERAL(16), %esp        // pop arguments
1949    CFI_ADJUST_CFA_OFFSET(-16)
1950
1951    // Return eax:edx in xmm0 also.
1952    movd %eax, %xmm0
1953    movd %edx, %xmm1
1954    punpckldq %xmm1, %xmm0
1955
1956    addl LITERAL(48), %esp        // Remove FPRs and EAX, ECX, EDX, EBX.
1957    CFI_ADJUST_CFA_OFFSET(-48)
1958
1959    POP ebp                       // Restore callee saves
1960    POP esi
1961    POP edi
1962
1963    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1964END_FUNCTION art_quick_to_interpreter_bridge
1965
1966    /*
1967     * Called by managed code, saves callee saves and then calls artInvokeObsoleteMethod
1968     */
1969ONE_ARG_RUNTIME_EXCEPTION art_invoke_obsolete_method_stub, artInvokeObsoleteMethod
1970
1971    /*
1972     * Routine that intercepts method calls and returns.
1973     */
1974DEFINE_FUNCTION art_quick_instrumentation_entry
1975    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx, edx
1976    PUSH eax                      // Save eax which will be clobbered by the callee-save method.
1977    subl LITERAL(16), %esp        // Align stack (12 bytes) and reserve space for the SP argument
1978    CFI_ADJUST_CFA_OFFSET(16)     // (4 bytes). We lack the scratch registers to calculate the SP
1979                                  // right now, so we will just fill it in later.
1980    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1981    CFI_ADJUST_CFA_OFFSET(4)
1982    PUSH ecx                      // Pass receiver.
1983    PUSH eax                      // Pass Method*.
1984    leal 32(%esp), %eax           // Put original SP into eax
1985    movl %eax, 12(%esp)           // set SP
1986    call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, SP)
1987
1988    addl LITERAL(28), %esp        // Pop arguments upto saved Method*.
1989    CFI_ADJUST_CFA_OFFSET(-28)
1990
1991    testl %eax, %eax
1992    jz 1f                         // Test for null return (indicating exception) and handle it.
1993
1994    movl 60(%esp), %edi           // Restore edi.
1995    movl %eax, 60(%esp)           // Place code* over edi, just under return pc.
1996    movl SYMBOL(art_quick_instrumentation_exit)@GOT(%ebx), %ebx
1997    // Place instrumentation exit as return pc. ebx holds the GOT computed on entry.
1998    movl %ebx, 64(%esp)
1999    movl 0(%esp), %eax           // Restore eax.
2000    // Restore FPRs (extra 4 bytes of offset due to EAX push at top).
2001    movsd 8(%esp), %xmm0
2002    movsd 16(%esp), %xmm1
2003    movsd 24(%esp), %xmm2
2004    movsd 32(%esp), %xmm3
2005
2006    // Restore GPRs.
2007    movl 40(%esp), %ecx           // Restore ecx.
2008    movl 44(%esp), %edx           // Restore edx.
2009    movl 48(%esp), %ebx           // Restore ebx.
2010    movl 52(%esp), %ebp           // Restore ebp.
2011    movl 56(%esp), %esi           // Restore esi.
2012    addl LITERAL(60), %esp        // Wind stack back upto code*.
2013    CFI_ADJUST_CFA_OFFSET(-60)
2014    ret                           // Call method (and pop).
20151:
2016    // Make caller handle exception
2017    addl LITERAL(4), %esp
2018    CFI_ADJUST_CFA_OFFSET(-4)
2019    RESTORE_SAVE_REFS_AND_ARGS_FRAME
2020    DELIVER_PENDING_EXCEPTION
2021END_FUNCTION art_quick_instrumentation_entry
2022
2023DEFINE_FUNCTION_CUSTOM_CFA art_quick_instrumentation_exit, 0
2024    pushl LITERAL(0)              // Push a fake return PC as there will be none on the stack.
2025    CFI_ADJUST_CFA_OFFSET(4)
2026    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx
2027
2028    movl %esp, %ecx               // Remember SP
2029    subl LITERAL(8), %esp         // Align stack.
2030    CFI_ADJUST_CFA_OFFSET(8)
2031    PUSH edx                      // Save gpr return value. edx and eax need to be together,
2032                                  // which isn't the case in kSaveEverything frame.
2033    PUSH eax
2034    leal 32(%esp), %eax           // Get pointer to fpr_result, in kSaveEverything frame
2035    movl %esp, %edx               // Get pointer to gpr_result
2036    PUSH eax                      // Pass fpr_result
2037    PUSH edx                      // Pass gpr_result
2038    PUSH ecx                      // Pass SP
2039    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current.
2040    CFI_ADJUST_CFA_OFFSET(4)
2041
2042    call SYMBOL(artInstrumentationMethodExitFromCode)  // (Thread*, SP, gpr_result*, fpr_result*)
2043    // Return result could have been changed if it's a reference.
2044    movl 16(%esp), %ecx
2045    movl %ecx, (80+32)(%esp)
2046    addl LITERAL(32), %esp        // Pop arguments and grp_result.
2047    CFI_ADJUST_CFA_OFFSET(-32)
2048
2049    testl %eax, %eax              // Check if we returned error.
2050    jz .Ldo_deliver_instrumentation_exception
2051    testl %edx, %edx
2052    jnz .Ldeoptimize
2053    // Normal return.
2054    movl %eax, FRAME_SIZE_SAVE_EVERYTHING-4(%esp)   // Set return pc.
2055    RESTORE_SAVE_EVERYTHING_FRAME
2056    ret
2057.Ldeoptimize:
2058    mov %edx, (FRAME_SIZE_SAVE_EVERYTHING-4)(%esp)  // Set return pc.
2059    RESTORE_SAVE_EVERYTHING_FRAME
2060    jmp SYMBOL(art_quick_deoptimize)
2061.Ldo_deliver_instrumentation_exception:
2062    DELIVER_PENDING_EXCEPTION_FRAME_READY
2063END_FUNCTION art_quick_instrumentation_exit
2064
2065    /*
2066     * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
2067     * will long jump to the upcall with a special exception of -1.
2068     */
2069DEFINE_FUNCTION art_quick_deoptimize
2070    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx
2071    subl LITERAL(12), %esp        // Align stack.
2072    CFI_ADJUST_CFA_OFFSET(12)
2073    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
2074    CFI_ADJUST_CFA_OFFSET(4)
2075    call SYMBOL(artDeoptimize)    // (Thread*)
2076    UNREACHABLE
2077END_FUNCTION art_quick_deoptimize
2078
2079    /*
2080     * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
2081     * will long jump to the interpreter bridge.
2082     */
2083DEFINE_FUNCTION art_quick_deoptimize_from_compiled_code
2084    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx
2085    subl LITERAL(8), %esp                      // Align stack.
2086    CFI_ADJUST_CFA_OFFSET(8)
2087    pushl %fs:THREAD_SELF_OFFSET                // Pass Thread::Current().
2088    CFI_ADJUST_CFA_OFFSET(4)
2089    PUSH eax
2090    call SYMBOL(artDeoptimizeFromCompiledCode)  // (DeoptimizationKind, Thread*)
2091    UNREACHABLE
2092END_FUNCTION art_quick_deoptimize_from_compiled_code
2093
2094    /*
2095     * String's compareTo.
2096     *
2097     * On entry:
2098     *    eax:   this string object (known non-null)
2099     *    ecx:   comp string object (known non-null)
2100     */
2101DEFINE_FUNCTION art_quick_string_compareto
2102    PUSH esi                      // push callee save reg
2103    PUSH edi                      // push callee save reg
2104    mov MIRROR_STRING_COUNT_OFFSET(%eax), %edx
2105    mov MIRROR_STRING_COUNT_OFFSET(%ecx), %ebx
2106    lea MIRROR_STRING_VALUE_OFFSET(%eax), %esi
2107    lea MIRROR_STRING_VALUE_OFFSET(%ecx), %edi
2108#if (STRING_COMPRESSION_FEATURE)
2109    /* Differ cases */
2110    shrl    LITERAL(1), %edx
2111    jnc     .Lstring_compareto_this_is_compressed
2112    shrl    LITERAL(1), %ebx
2113    jnc     .Lstring_compareto_that_is_compressed
2114    jmp     .Lstring_compareto_both_not_compressed
2115.Lstring_compareto_this_is_compressed:
2116    shrl    LITERAL(1), %ebx
2117    jnc     .Lstring_compareto_both_compressed
2118    /* If (this->IsCompressed() && that->IsCompressed() == false) */
2119    mov     %edx, %eax
2120    subl    %ebx, %eax
2121    mov     %edx, %ecx
2122    cmovg   %ebx, %ecx
2123    /* Going into loop to compare each character */
2124    jecxz   .Lstring_compareto_keep_length            // check loop counter (if 0, don't compare)
2125.Lstring_compareto_loop_comparison_this_compressed:
2126    movzbl  (%esi), %edx                              // move *(this_cur_char) byte to long
2127    movzwl  (%edi), %ebx                              // move *(that_cur_char) word to long
2128    addl    LITERAL(1), %esi                          // ++this_cur_char (8-bit)
2129    addl    LITERAL(2), %edi                          // ++that_cur_char (16-bit)
2130    subl    %ebx, %edx
2131    loope   .Lstring_compareto_loop_comparison_this_compressed
2132    cmovne  %edx, %eax                        // return eax = *(this_cur_char) - *(that_cur_char)
2133    jmp     .Lstring_compareto_return
2134.Lstring_compareto_that_is_compressed:
2135    mov     %edx, %eax
2136    subl    %ebx, %eax
2137    mov     %edx, %ecx
2138    cmovg   %ebx, %ecx
2139    /* If (this->IsCompressed() == false && that->IsCompressed()) */
2140    jecxz   .Lstring_compareto_keep_length            // check loop counter, if 0, don't compare
2141.Lstring_compareto_loop_comparison_that_compressed:
2142    movzwl  (%esi), %edx                              // move *(this_cur_char) word to long
2143    movzbl  (%edi), %ebx                              // move *(that_cur_char) byte to long
2144    addl    LITERAL(2), %esi                          // ++this_cur_char (16-bit)
2145    addl    LITERAL(1), %edi                          // ++that_cur_char (8-bit)
2146    subl    %ebx, %edx
2147    loope   .Lstring_compareto_loop_comparison_that_compressed
2148    cmovne  %edx, %eax
2149    jmp     .Lstring_compareto_return         // return eax = *(this_cur_char) - *(that_cur_char)
2150.Lstring_compareto_both_compressed:
2151    /* Calculate min length and count diff */
2152    mov     %edx, %ecx
2153    mov     %edx, %eax
2154    subl    %ebx, %eax
2155    cmovg   %ebx, %ecx
2156    jecxz   .Lstring_compareto_keep_length
2157    repe    cmpsb
2158    je      .Lstring_compareto_keep_length
2159    movzbl  -1(%esi), %eax        // get last compared char from this string (8-bit)
2160    movzbl  -1(%edi), %ecx        // get last compared char from comp string (8-bit)
2161    jmp     .Lstring_compareto_count_difference
2162#endif // STRING_COMPRESSION_FEATURE
2163.Lstring_compareto_both_not_compressed:
2164    /* Calculate min length and count diff */
2165    mov     %edx, %ecx
2166    mov     %edx, %eax
2167    subl    %ebx, %eax
2168    cmovg   %ebx, %ecx
2169    /*
2170     * At this point we have:
2171     *   eax: value to return if first part of strings are equal
2172     *   ecx: minimum among the lengths of the two strings
2173     *   esi: pointer to this string data
2174     *   edi: pointer to comp string data
2175     */
2176    jecxz .Lstring_compareto_keep_length
2177    repe  cmpsw                   // find nonmatching chars in [%esi] and [%edi], up to length %ecx
2178    je    .Lstring_compareto_keep_length
2179    movzwl  -2(%esi), %eax        // get last compared char from this string (16-bit)
2180    movzwl  -2(%edi), %ecx        // get last compared char from comp string (16-bit)
2181.Lstring_compareto_count_difference:
2182    subl    %ecx, %eax
2183.Lstring_compareto_keep_length:
2184.Lstring_compareto_return:
2185    POP edi                       // pop callee save reg
2186    POP esi                       // pop callee save reg
2187    ret
2188END_FUNCTION art_quick_string_compareto
2189
2190DEFINE_FUNCTION art_quick_string_builder_append
2191    SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx       // save ref containing registers for GC
2192    // Outgoing argument set up
2193    leal FRAME_SIZE_SAVE_REFS_ONLY + __SIZEOF_POINTER__(%esp), %edi  // prepare args
2194    push %eax                                 // push padding
2195    CFI_ADJUST_CFA_OFFSET(4)
2196    pushl %fs:THREAD_SELF_OFFSET              // pass Thread::Current()
2197    CFI_ADJUST_CFA_OFFSET(4)
2198    push %edi                                 // pass args
2199    CFI_ADJUST_CFA_OFFSET(4)
2200    push %eax                                 // pass format
2201    CFI_ADJUST_CFA_OFFSET(4)
2202    call SYMBOL(artStringBuilderAppend)       // (uint32_t, const unit32_t*, Thread*)
2203    addl MACRO_LITERAL(16), %esp              // pop arguments
2204    CFI_ADJUST_CFA_OFFSET(-16)
2205    RESTORE_SAVE_REFS_ONLY_FRAME              // restore frame up to return address
2206    RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER   // return or deliver exception
2207END_FUNCTION art_quick_string_builder_append
2208
2209// Create a function `name` calling the ReadBarrier::Mark routine,
2210// getting its argument and returning its result through register
2211// `reg`, saving and restoring all caller-save registers.
2212//
2213// If `reg` is different from `eax`, the generated function follows a
2214// non-standard runtime calling convention:
2215// - register `reg` is used to pass the (sole) argument of this function
2216//   (instead of EAX);
2217// - register `reg` is used to return the result of this function
2218//   (instead of EAX);
2219// - EAX is treated like a normal (non-argument) caller-save register;
2220// - everything else is the same as in the standard runtime calling
2221//   convention (e.g. standard callee-save registers are preserved).
2222MACRO2(READ_BARRIER_MARK_REG, name, reg)
2223    DEFINE_FUNCTION VAR(name)
2224    // Null check so that we can load the lock word.
2225    test REG_VAR(reg), REG_VAR(reg)
2226    jz .Lret_rb_\name
2227.Lnot_null_\name:
2228    // Check the mark bit, if it is 1 return.
2229    testl LITERAL(LOCK_WORD_MARK_BIT_MASK_SHIFTED), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(reg))
2230    jz .Lslow_rb_\name
2231    ret
2232.Lslow_rb_\name:
2233    PUSH eax
2234    mov MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(reg)), %eax
2235    add LITERAL(LOCK_WORD_STATE_FORWARDING_ADDRESS_OVERFLOW), %eax
2236    // Jump if overflow, the only case where it overflows should be the forwarding address one.
2237    // Taken ~25% of the time.
2238    jnae .Lret_forwarding_address\name
2239
2240    // Save all potentially live caller-save core registers.
2241    mov 0(%esp), %eax
2242    PUSH ecx
2243    PUSH edx
2244    PUSH ebx
2245    // 8-byte align the stack to improve (8-byte) XMM register saving and restoring.
2246    // and create space for caller-save floating-point registers.
2247    subl MACRO_LITERAL(4 + 8 * 8), %esp
2248    CFI_ADJUST_CFA_OFFSET(4 + 8 * 8)
2249    // Save all potentially live caller-save floating-point registers.
2250    movsd %xmm0, 0(%esp)
2251    movsd %xmm1, 8(%esp)
2252    movsd %xmm2, 16(%esp)
2253    movsd %xmm3, 24(%esp)
2254    movsd %xmm4, 32(%esp)
2255    movsd %xmm5, 40(%esp)
2256    movsd %xmm6, 48(%esp)
2257    movsd %xmm7, 56(%esp)
2258
2259    subl LITERAL(4), %esp            // alignment padding
2260    CFI_ADJUST_CFA_OFFSET(4)
2261    PUSH RAW_VAR(reg)                // pass arg1 - obj from `reg`
2262    call SYMBOL(artReadBarrierMark)  // artReadBarrierMark(obj)
2263    .ifnc RAW_VAR(reg), eax
2264      movl %eax, REG_VAR(reg)        // return result into `reg`
2265    .endif
2266    addl LITERAL(8), %esp            // pop argument and remove padding
2267    CFI_ADJUST_CFA_OFFSET(-8)
2268
2269    // Restore floating-point registers.
2270    movsd 0(%esp), %xmm0
2271    movsd 8(%esp), %xmm1
2272    movsd 16(%esp), %xmm2
2273    movsd 24(%esp), %xmm3
2274    movsd 32(%esp), %xmm4
2275    movsd 40(%esp), %xmm5
2276    movsd 48(%esp), %xmm6
2277    movsd 56(%esp), %xmm7
2278    // Remove floating-point registers and padding.
2279    addl MACRO_LITERAL(8 * 8 + 4), %esp
2280    CFI_ADJUST_CFA_OFFSET(-(8 * 8 + 4))
2281    // Restore core regs, except `reg`, as it is used to return the
2282    // result of this function (simply remove it from the stack instead).
2283    POP_REG_NE ebx, RAW_VAR(reg)
2284    POP_REG_NE edx, RAW_VAR(reg)
2285    POP_REG_NE ecx, RAW_VAR(reg)
2286    POP_REG_NE eax, RAW_VAR(reg)
2287.Lret_rb_\name:
2288    ret
2289.Lret_forwarding_address\name:
2290    // The overflow cleared the top bits.
2291    sall LITERAL(LOCK_WORD_STATE_FORWARDING_ADDRESS_SHIFT), %eax
2292    mov %eax, REG_VAR(reg)
2293    POP_REG_NE eax, RAW_VAR(reg)
2294    ret
2295    END_FUNCTION VAR(name)
2296END_MACRO
2297
2298READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg00, eax
2299READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg01, ecx
2300READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg02, edx
2301READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg03, ebx
2302READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg05, ebp
2303// Note: There is no art_quick_read_barrier_mark_reg04, as register 4 (ESP)
2304// cannot be used to pass arguments.
2305READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg06, esi
2306READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg07, edi
2307
2308DEFINE_FUNCTION art_quick_read_barrier_slow
2309    PUSH edx                         // pass arg3 - offset
2310    PUSH ecx                         // pass arg2 - obj
2311    PUSH eax                         // pass arg1 - ref
2312    call SYMBOL(artReadBarrierSlow)  // artReadBarrierSlow(ref, obj, offset)
2313    addl LITERAL(12), %esp           // pop arguments
2314    CFI_ADJUST_CFA_OFFSET(-12)
2315    ret
2316END_FUNCTION art_quick_read_barrier_slow
2317
2318DEFINE_FUNCTION art_quick_read_barrier_for_root_slow
2319    subl LITERAL(8), %esp                   // alignment padding
2320    CFI_ADJUST_CFA_OFFSET(8)
2321    PUSH eax                                // pass arg1 - root
2322    call SYMBOL(artReadBarrierForRootSlow)  // artReadBarrierForRootSlow(root)
2323    addl LITERAL(12), %esp                  // pop argument and remove padding
2324    CFI_ADJUST_CFA_OFFSET(-12)
2325    ret
2326END_FUNCTION art_quick_read_barrier_for_root_slow
2327
2328  /*
2329     * On stack replacement stub.
2330     * On entry:
2331     *   [sp] = return address
2332     *   [sp + 4] = stack to copy
2333     *   [sp + 8] = size of stack
2334     *   [sp + 12] = pc to call
2335     *   [sp + 16] = JValue* result
2336     *   [sp + 20] = shorty
2337     *   [sp + 24] = thread
2338     */
2339DEFINE_FUNCTION art_quick_osr_stub
2340    // Save native callee saves.
2341    PUSH ebp
2342    PUSH ebx
2343    PUSH esi
2344    PUSH edi
2345    SAVE_SIZE=20                   // 4 registers and the return address
2346    mov 4+16(%esp), %esi           // ESI = argument array
2347    mov 8+16(%esp), %ecx           // ECX = size of args
2348    mov 12+16(%esp), %ebx          // EBX = pc to call
2349    mov %esp, %ebp                 // Save stack pointer
2350    CFI_DEF_CFA(ebp, SAVE_SIZE)    // CFA = ebp + SAVE_SIZE
2351    CFI_REMEMBER_STATE
2352    andl LITERAL(0xFFFFFFF0), %esp // Align stack
2353    pushl %ebp                     // Save old stack pointer
2354    subl LITERAL(12), %esp         // Align stack
2355    movl LITERAL(0), (%esp)        // Store null for ArtMethod* slot
2356    // ebp isn't properly spilled in the osr method, so we need use DWARF expression.
2357    // NB: the CFI must be before the call since this is the address gdb will lookup.
2358    // NB: gdb expects that cfa_expression returns the CFA value (not address to it).
2359    CFI_ESCAPE(                    /* cfa = [sp + 12] + SAVE_SIZE */ \
2360      0x0f, 6,                     /* DW_CFA_def_cfa_expression(len) */ \
2361      0x92, 4, 12,                 /* DW_OP_bregx(reg,offset) */ \
2362      0x06,                        /* DW_OP_deref */ \
2363      0x23, SAVE_SIZE)             /* DW_OP_plus_uconst(val) */
2364    call .Losr_entry
2365    mov 12(%esp), %esp             // Restore stack pointer.
2366    CFI_DEF_CFA(esp, SAVE_SIZE)    // CFA = esp + SAVE_SIZE
2367
2368    // Restore callee saves.
2369    POP edi
2370    POP esi
2371    POP ebx
2372    POP ebp
2373    mov 16(%esp), %ecx            // Get JValue result
2374    mov %eax, (%ecx)              // Store the result.
2375    mov %edx, 4(%ecx)             // Store the other half of the result.
2376    ret
2377.Losr_entry:
2378    CFI_RESTORE_STATE_AND_DEF_CFA(ebp, SAVE_SIZE)  // CFA = ebp + SAVE_SIZE
2379    subl LITERAL(4), %ecx         // Given stack size contains pushed frame pointer, substract it.
2380    subl %ecx, %esp
2381    mov %esp, %edi                // EDI = beginning of stack
2382    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
2383    jmp *%ebx
2384END_FUNCTION art_quick_osr_stub
2385
2386DEFINE_FUNCTION art_quick_invoke_polymorphic
2387                                                   // On entry: EAX := unused, ECX := receiver
2388    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx, ebx        // Save frame.
2389    mov %esp, %edx                                 // Remember SP
2390    sub LITERAL(4), %esp                           // Alignment padding
2391    CFI_ADJUST_CFA_OFFSET(4)
2392    push %edx                                      // Push SP
2393    CFI_ADJUST_CFA_OFFSET(4)
2394    pushl %fs:THREAD_SELF_OFFSET                   // Push Thread::Current()
2395    CFI_ADJUST_CFA_OFFSET(4)
2396    push %ecx                                      // Push receiver (method handle)
2397    CFI_ADJUST_CFA_OFFSET(4)
2398    call SYMBOL(artInvokePolymorphic)              // invoke with (receiver, thread, SP)
2399    addl LITERAL(16), %esp                         // Pop arguments.
2400    CFI_ADJUST_CFA_OFFSET(-16)
2401    mov %eax, 4(%esp)                              // Result is in EAX:EDX. Copy to saved FP state.
2402    mov %edx, 8(%esp)
2403    mov %edx, 40(%esp)                             // Copy EDX to saved context
2404    RESTORE_SAVE_REFS_AND_ARGS_FRAME
2405    RETURN_OR_DELIVER_PENDING_EXCEPTION
2406END_FUNCTION art_quick_invoke_polymorphic
2407
2408DEFINE_FUNCTION art_quick_invoke_custom
2409    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx, ebx        // Save frame.
2410                                                   // EAX := call_site_index
2411    mov %esp, %ecx                                 // Remember SP.
2412    subl LITERAL(4), %esp                          // Alignment padding.
2413    CFI_ADJUST_CFA_OFFSET(4)
2414    push %ecx                                      // pass SP
2415    CFI_ADJUST_CFA_OFFSET(4)
2416    pushl %fs:THREAD_SELF_OFFSET                   // pass Thread::Current()
2417    CFI_ADJUST_CFA_OFFSET(4)
2418    push %eax                                      // pass call_site_index
2419    CFI_ADJUST_CFA_OFFSET(4)
2420    call SYMBOL(artInvokeCustom)                   // artInvokeCustom(call_site_index, Thread*, SP)
2421    addl LITERAL(16), %esp                         // Pop arguments.
2422    CFI_ADJUST_CFA_OFFSET(-16)
2423    mov %eax, 4(%esp)                              // Result is in EAX:EDX. Copy to saved FP state.
2424    mov %edx, 8(%esp)
2425    mov %edx, 40(%esp)                             // Copy EDX to saved context
2426    RESTORE_SAVE_REFS_AND_ARGS_FRAME
2427    RETURN_OR_DELIVER_PENDING_EXCEPTION
2428END_FUNCTION art_quick_invoke_custom
2429
2430// Wrap ExecuteSwitchImpl in assembly method which specifies DEX PC for unwinding.
2431//  Argument 0: ESP+4: The context pointer for ExecuteSwitchImpl.
2432//  Argument 1: ESP+8: Pointer to the templated ExecuteSwitchImpl to call.
2433//  Argument 2: ESP+12: The value of DEX PC (memory address of the methods bytecode).
2434DEFINE_FUNCTION ExecuteSwitchImplAsm
2435    PUSH ebx                 // Spill EBX; Increments ESP, so arg0 is at ESP+8 now.
2436    mov 12(%esp), %eax       // EAX = C++ templated interpreter function
2437    mov 16(%esp), %ebx       // EBX = DEX PC (callee save register)
2438    mov 8(%esp), %ecx        // ECX = Context argument for the function
2439    CFI_DEFINE_DEX_PC_WITH_OFFSET(0 /* EAX */, 3 /* EBX */, 0)
2440
2441    sub LITERAL(4), %esp     // Alignment padding
2442    CFI_ADJUST_CFA_OFFSET(4)
2443    push %ecx                // Push argument
2444    CFI_ADJUST_CFA_OFFSET(4)
2445    call *%eax               // Call the wrapped function
2446    addl LITERAL(8), %esp
2447    CFI_ADJUST_CFA_OFFSET(-8)
2448
2449    POP ebx                  // Restore EBX
2450    ret
2451END_FUNCTION ExecuteSwitchImplAsm
2452
2453// On entry: eax is the class, ebp is the inline cache.
2454DEFINE_FUNCTION art_quick_update_inline_cache
2455#if (INLINE_CACHE_SIZE != 5)
2456#error "INLINE_CACHE_SIZE not as expected."
2457#endif
2458    // Don't update the cache if we are marking.
2459    cmpl LITERAL(0), %fs:THREAD_IS_GC_MARKING_OFFSET
2460    jnz .Lret
2461    PUSH ecx
2462    movl %eax, %ecx // eax will be used for cmpxchg
2463.Lentry1:
2464    movl INLINE_CACHE_CLASSES_OFFSET(%ebp), %eax
2465    cmpl %ecx, %eax
2466    je .Ldone
2467    cmpl LITERAL(0), %eax
2468    jne .Lentry2
2469    lock cmpxchg %ecx, INLINE_CACHE_CLASSES_OFFSET(%ebp)
2470    jz .Ldone
2471    jmp .Lentry1
2472.Lentry2:
2473    movl (INLINE_CACHE_CLASSES_OFFSET+4)(%ebp), %eax
2474    cmpl %ecx, %eax
2475    je .Ldone
2476    cmpl LITERAL(0), %eax
2477    jne .Lentry3
2478    lock cmpxchg %ecx, (INLINE_CACHE_CLASSES_OFFSET+4)(%ebp)
2479    jz .Ldone
2480    jmp .Lentry2
2481.Lentry3:
2482    movl (INLINE_CACHE_CLASSES_OFFSET+8)(%ebp), %eax
2483    cmpl %ecx, %eax
2484    je .Ldone
2485    cmpl LITERAL(0), %eax
2486    jne .Lentry4
2487    lock cmpxchg %ecx, (INLINE_CACHE_CLASSES_OFFSET+8)(%ebp)
2488    jz .Ldone
2489    jmp .Lentry3
2490.Lentry4:
2491    movl (INLINE_CACHE_CLASSES_OFFSET+12)(%ebp), %eax
2492    cmpl %ecx, %eax
2493    je .Ldone
2494    cmpl LITERAL(0), %eax
2495    jne .Lentry5
2496    lock cmpxchg %ecx, (INLINE_CACHE_CLASSES_OFFSET+12)(%ebp)
2497    jz .Ldone
2498    jmp .Lentry4
2499.Lentry5:
2500    // Unconditionally store, the cache is megamorphic.
2501    movl %ecx, (INLINE_CACHE_CLASSES_OFFSET+16)(%ebp)
2502.Ldone:
2503    // Restore registers
2504    movl %ecx, %eax
2505    POP ecx
2506.Lret:
2507    ret
2508END_FUNCTION art_quick_update_inline_cache
2509
2510    // TODO: implement these!
2511UNIMPLEMENTED art_quick_memcmp16
2512
2513// On entry, the method is at the bottom of the stack.
2514DEFINE_FUNCTION art_quick_compile_optimized
2515    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx
2516    mov FRAME_SIZE_SAVE_EVERYTHING(%esp), %eax // Fetch ArtMethod
2517    sub LITERAL(8), %esp   		       // Alignment padding
2518    CFI_ADJUST_CFA_OFFSET(8)
2519    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
2520    CFI_ADJUST_CFA_OFFSET(4)
2521    pushl %eax
2522    CFI_ADJUST_CFA_OFFSET(4)
2523    call SYMBOL(artCompileOptimized)           // (ArtMethod*, Thread*)
2524    addl LITERAL(16), %esp                     // Pop arguments.
2525    CFI_ADJUST_CFA_OFFSET(-16)
2526    RESTORE_SAVE_EVERYTHING_FRAME
2527    ret
2528END_FUNCTION art_quick_compile_optimized
2529