/art/compiler/optimizing/ |
D | data_type.cc | 38 static_assert(arraysize(kTypeNames) == static_cast<size_t>(Type::kLast) + 1, in PrettyDescriptor() 41 CHECK_LE(uint_type, static_cast<uint32_t>(Type::kLast)); in PrettyDescriptor() 47 if (uint_type <= static_cast<uint32_t>(DataType::Type::kLast)) { in operator <<()
|
D | intrinsic_objects.h | 42 kLast = kIntegerValueOfArray enumerator 73 MinimumBitsToStore(static_cast<uint32_t>(PatchType::kLast));
|
D | optimization.h | 103 kLast = kNone enumerator
|
D | code_generator_arm_vixl.h | 770 kLast = kUnsafeCas enumerator 776 kLast = kNarrow enumerator 782 MinimumBitsToStore(static_cast<size_t>(BakerReadBarrierKind::kLast)); 794 MinimumBitsToStore(static_cast<size_t>(BakerReadBarrierWidth::kLast));
|
D | data_type.h | 44 kLast = kVoid enumerator
|
D | stack_map_stream.cc | 156 current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast; in EndStackMapEntry()
|
D | code_generator_arm64.h | 800 kLast = kGcRoot enumerator 806 MinimumBitsToStore(static_cast<size_t>(BakerReadBarrierKind::kLast));
|
D | nodes_vector.h | 186 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
|
D | optimizing_compiler.cc | 327 std::bitset<static_cast<size_t>(OptimizationPass::kLast) + 1u> pass_changes; in RunOptimizations()
|
/art/runtime/ |
D | string_builder_append.h | 49 kLast = kDouble enumerator 53 MinimumBitsToStore(static_cast<size_t>(Argument::kLast));
|
D | class_status.h | 99 kLast = kVisiblyInitialized enumerator
|
D | subtype_check_bits_and_status.h | 68 static constexpr size_t kClassStatusBitSize = MinimumBitsToStore(enum_cast<>(ClassStatus::kLast));
|
D | deoptimization_kind.h | 33 kLast = kFullFrame enumerator
|
D | reference_table.cc | 164 const size_t kLast = 10; in Dump() local 166 int first = count - kLast; in Dump()
|
D | string_builder_append.cc | 213 DCHECK_LE(f & kArgMask, static_cast<uint32_t>(Argument::kLast)); in CalculateLengthWithFlag() 281 DCHECK_LE(f & kArgMask, static_cast<uint32_t>(Argument::kLast)); in StoreData()
|
D | runtime.h | 888 DCHECK_LE(kind, DeoptimizationKind::kLast); in IncrementDeoptimizationCount() 894 for (size_t i = 0; i <= static_cast<size_t>(DeoptimizationKind::kLast); ++i) { in GetNumberOfDeoptimizations() 1328 static_cast<uint32_t>(DeoptimizationKind::kLast) + 1];
|
D | stack_map.h | 185 static constexpr uint32_t kLast = -1;
|
D | runtime.cc | 308 for (size_t i = 0; i <= static_cast<size_t>(DeoptimizationKind::kLast); ++i) { in Runtime() 2002 for (size_t i = 0; i <= static_cast<size_t>(DeoptimizationKind::kLast); ++i) { in DumpDeoptimizations()
|
/art/runtime/mirror/ |
D | var_handle.h | 94 kLast = kGetAndBitwiseXorAcquire, enumerator 96 constexpr static size_t kNumberOfAccessModes = static_cast<size_t>(AccessMode::kLast) + 1u;
|
/art/dex2oat/linker/ |
D | image_writer.h | 215 kLast = kDexCacheArray, enumerator 243 kLast = kQuickToInterpreterBridge, enumerator 257 static constexpr size_t kNumberOfBins = static_cast<size_t>(Bin::kLast) + 1u; 260 static constexpr size_t kNumberOfStubTypes = static_cast<size_t>(StubType::kLast) + 1u;
|
/art/compiler/ |
D | compiled_method.h | 73 MinimumBitsToStore(static_cast<size_t>(InstructionSet::kLast));
|
/art/libartbase/arch/ |
D | instruction_set.h | 35 kLast = kX86_64 enumerator
|
/art/dex2oat/driver/ |
D | compiler_driver_test.cc | 349 static_assert(enum_cast<size_t>(ClassStatus::kLast) < std::numeric_limits<size_t>::max(), in TEST_F() 352 i <= enum_cast<size_t>(ClassStatus::kLast); in TEST_F()
|
D | compiler_driver.cc | 137 class_status_count_ + static_cast<size_t>(ClassStatus::kLast) + 1, in Dump() 139 for (size_t i = 0; i <= static_cast<size_t>(ClassStatus::kLast); ++i) { in Dump() 250 size_t class_status_count_[static_cast<size_t>(ClassStatus::kLast) + 1] = {};
|
/art/libdexfile/dex/ |
D | dex_file.h | 157 kLast = kInvokeInterface enumerator
|