Searched refs:w1 (Results 1 – 11 of 11) sorted by relevance
18 GET_VREG w1, w3 // w1<- vCC (requested index)21 add x0, x0, w1, uxtw #$shift // w0<- arrayObj + index*width22 cmp w1, w3 // compare unsigned index, length50 GET_VREG w1, w3 // w1<- vCC (requested index)55 cbnz w1, MterpException75 GET_VREG w1, w3 // w1<- vCC (requested index)78 add x0, x0, w1, uxtw #3 // w0<- arrayObj + index*width79 cmp w1, w3 // compare unsigned index, length104 GET_VREG w1, w3 // w1<- vCC (requested index)107 add x0, x0, w1, uxtw #$shift // w0<- arrayObj + index*width[all …]
9 lsr w1, wINST, #8 // w1<- AA29 FETCH w1, 2 // w1<- BBBB (high31 orr w0, w0, w1, lsl #16 // w0<- BBBBbbbb47 sbfx w1, wINST, #12, #4 // w1<- sssssssB51 SET_VREG w1, w0 // fp[A]<- w181 lsr w1, wINST, #8 // w1<- AA95 FETCH w1, 2 // w1<- BBBB (low middle)101 orr w0, w0, w1, lsl #16 // w0<- BBBBbbbb130 lsr w1, wINST, #8 // w1<- AA133 SET_VREG_WIDE x0, w1[all …]
22 GET_VREG w1, w3 // w1<- vCC25 cbz w1, common_errDivideByZero // is second operand zero?53 GET_VREG w1, w3 // w1<- vB56 cbz w1, common_errDivideByZero80 FETCH_S w1, 1 // w1<- ssssCCCC (sign-extended)85 cbz w1, common_errDivideByZero120 cbz w1, common_errDivideByZero147 and w1, w0, #255 // w1<- BB149 GET_VREG_WIDE $r1, w1 // w1<- vBB176 lsr w1, wINST, #12 // w1<- B[all …]
10 lsr w1, w0, #8 // r2<- CC12 GET_VREG s1, w115 lsr w1, wINST, #8 // r1<- AA18 SET_VREG_FLOAT s0, w129 and w1, w0, #255 // w1<- BB31 GET_VREG_DOUBLE $r1, w1 // w1<- vBB62 lsr w1, wINST, #12 // w1<- B64 GET_VREG_DOUBLE $r1, w1 // x1<- vB266 and w1, w0, #255 // w1<- BB268 GET_VREG_DOUBLE d0, w1 // d0<- vBB[all …]
9 lsr w1, wINST, #12 // w1<- B11 GET_VREG w3, w1 // w3<- vB79 FETCH w1, 2 // w1<- AAAA (hi)80 orr wINST, w0, w1, lsl #16 // wINST<- AAAAaaaa134 GET_VREG w1, w3 // w1<- vAA220 GET_VREG w1, w2 // r1<- vAA (exception object)221 cbz w1, common_errNullObject
24 lsr w1, wINST, #8 // w1<- AA25 VREG_INDEX_TO_ADDR x1, w1 // w1<- &object56 ldr w1, [xSELF, #THREAD_IS_GC_MARKING_OFFSET]57 cbnz w1, .L_${opcode}_mark // GC is active.108 FETCH w1, 1 // w1<- field byte offset125 FETCH w1, 1 // w1<- field byte offset168 lsr w1, wINST, #12 // w1<- B169 VREG_INDEX_TO_ADDR x1, w1 // w1<- &object221 FETCH w1, 1 // w1<- field byte offset
565 ldr w1, [xFP, #OFF_FP_DEX_PC]
510 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.696 LOADREG x8 4 w1 .LfillRegisters2804 sub w1, w1, #8815 cbz w1, .Losr_loop_exit816 sub w1, w1, #4892 ldr w1, [xSELF, #THREAD_ID_OFFSET]898 eor w3, w2, w1 // Prepare the value to store if unlocked938 ldr w1, [xSELF, #THREAD_ID_OFFSET]948 eor w3, w2, w1 // Prepare the value to store if simply locked1447 ldr w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)][all …]
31 int w1; field in Base41 return String.format("w0: %d, w1: %d, w2: %d, w3: %d", w0, w1, w2, w3); in baseString()103 b.w1 = 2; in exercise()
143 // 0x00000030: str w1, [sp, #200]
637 EXPECT_TRUE(vixl::aarch64::w1.Is(Arm64Assembler::reg_w(W1))); in TEST()