Lines Matching refs:temp0
1182 const vixl32::Register temp0 = RegisterFrom(locations->GetTemp(0)); in VisitStringCompareTo() local
1217 __ Lsr(temp0, temp3, 1u); in VisitStringCompareTo()
1221 __ Ldr(temp0, MemOperand(str, count_offset)); in VisitStringCompareTo()
1225 __ Subs(out, temp0, temp1); in VisitStringCompareTo()
1234 __ mov(gt, temp0, temp1); in VisitStringCompareTo()
1240 __ CompareAndBranchIfZero(temp0, &end, mirror::kUseStringCompression); in VisitStringCompareTo()
1256 __ add(ne, temp0, temp0, temp0); in VisitStringCompareTo()
1279 const vixl32::Register temp0 = RegisterFrom(locations->GetTemp(0)); in GenerateStringCompareToLoop() local
1320 __ Subs(temp0, temp0, (mirror::kUseStringCompression ? 8 : 4)); in GenerateStringCompareToLoop()
1326 __ Subs(temp0, temp0, 4); // 4 bytes previously compared. in GenerateStringCompareToLoop()
1331 __ Sub(temp0, temp0, 2); in GenerateStringCompareToLoop()
1354 __ Cmp(temp0, Operand(temp1, vixl32::LSR, (mirror::kUseStringCompression ? 3 : 4))); in GenerateStringCompareToLoop()
1394 __ Add(temp0, temp0, temp0); // Unlike LSL, this ADD is always 16-bit. in GenerateStringCompareToLoop()
1407 __ Sbc(temp0, temp0, 0); // Complete the move of the compression flag. in GenerateStringCompareToLoop()
1423 __ Subs(temp0, temp0, 2); in GenerateStringCompareToLoop()
1433 __ Lsrs(temp0, temp0, 1u); in GenerateStringCompareToLoop()