HELLO·Android
系统源代码
IT资讯
技术文章
我的收藏
注册
登录
-
我收藏的文章
创建代码块
我的代码块
我的账号
Android 10
|
10.0.0_r6
下载
查看原文件
收藏
根目录
external
v8
src
elements.cc
// Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/elements.h" #include "src/arguments.h" #include "src/conversions.h" #include "src/frames.h" #include "src/heap/factory.h" #include "src/heap/heap-write-barrier-inl.h" #include "src/isolate-inl.h" #include "src/messages.h" #include "src/objects-inl.h" #include "src/objects/arguments-inl.h" #include "src/objects/hash-table-inl.h" #include "src/objects/js-array-buffer-inl.h" #include "src/objects/js-array-inl.h" #include "src/utils.h" // Each concrete ElementsAccessor can handle exactly one ElementsKind, // several abstract ElementsAccessor classes are used to allow sharing // common code. // // Inheritance hierarchy: // - ElementsAccessorBase (abstract) // - FastElementsAccessor (abstract) // - FastSmiOrObjectElementsAccessor // - FastPackedSmiElementsAccessor // - FastHoleySmiElementsAccessor // - FastPackedObjectElementsAccessor // - FastHoleyObjectElementsAccessor // - FastDoubleElementsAccessor // - FastPackedDoubleElementsAccessor // - FastHoleyDoubleElementsAccessor // - TypedElementsAccessor: template, with instantiations: // - FixedUint8ElementsAccessor // - FixedInt8ElementsAccessor // - FixedUint16ElementsAccessor // - FixedInt16ElementsAccessor // - FixedUint32ElementsAccessor // - FixedInt32ElementsAccessor // - FixedFloat32ElementsAccessor // - FixedFloat64ElementsAccessor // - FixedUint8ClampedElementsAccessor // - FixedBigUint64ElementsAccessor // - FixedBigInt64ElementsAccessor // - DictionaryElementsAccessor // - SloppyArgumentsElementsAccessor // - FastSloppyArgumentsElementsAccessor // - SlowSloppyArgumentsElementsAccessor // - StringWrapperElementsAccessor // - FastStringWrapperElementsAccessor // - SlowStringWrapperElementsAccessor namespace v8 { namespace internal { namespace { static const int kPackedSizeNotKnown = -1; enum Where { AT_START, AT_END }; // First argument in list is the accessor class, the second argument is the // accessor ElementsKind, and the third is the backing store class. Use the // fast element handler for smi-only arrays. The implementation is currently // identical. Note that the order must match that of the ElementsKind enum for // the |accessor_array[]| below to work. #define ELEMENTS_LIST(V) \ V(FastPackedSmiElementsAccessor, PACKED_SMI_ELEMENTS, FixedArray) \ V(FastHoleySmiElementsAccessor, HOLEY_SMI_ELEMENTS, FixedArray) \ V(FastPackedObjectElementsAccessor, PACKED_ELEMENTS, FixedArray) \ V(FastHoleyObjectElementsAccessor, HOLEY_ELEMENTS, FixedArray) \ V(FastPackedDoubleElementsAccessor, PACKED_DOUBLE_ELEMENTS, \ FixedDoubleArray) \ V(FastHoleyDoubleElementsAccessor, HOLEY_DOUBLE_ELEMENTS, FixedDoubleArray) \ V(DictionaryElementsAccessor, DICTIONARY_ELEMENTS, NumberDictionary) \ V(FastSloppyArgumentsElementsAccessor, FAST_SLOPPY_ARGUMENTS_ELEMENTS, \ FixedArray) \ V(SlowSloppyArgumentsElementsAccessor, SLOW_SLOPPY_ARGUMENTS_ELEMENTS, \ FixedArray) \ V(FastStringWrapperElementsAccessor, FAST_STRING_WRAPPER_ELEMENTS, \ FixedArray) \ V(SlowStringWrapperElementsAccessor, SLOW_STRING_WRAPPER_ELEMENTS, \ FixedArray) \ V(FixedUint8ElementsAccessor, UINT8_ELEMENTS, FixedUint8Array) \ V(FixedInt8ElementsAccessor, INT8_ELEMENTS, FixedInt8Array) \ V(FixedUint16ElementsAccessor, UINT16_ELEMENTS, FixedUint16Array) \ V(FixedInt16ElementsAccessor, INT16_ELEMENTS, FixedInt16Array) \ V(FixedUint32ElementsAccessor, UINT32_ELEMENTS, FixedUint32Array) \ V(FixedInt32ElementsAccessor, INT32_ELEMENTS, FixedInt32Array) \ V(FixedFloat32ElementsAccessor, FLOAT32_ELEMENTS, FixedFloat32Array) \ V(FixedFloat64ElementsAccessor, FLOAT64_ELEMENTS, FixedFloat64Array) \ V(FixedUint8ClampedElementsAccessor, UINT8_CLAMPED_ELEMENTS, \ FixedUint8ClampedArray) \ V(FixedBigUint64ElementsAccessor, BIGUINT64_ELEMENTS, FixedBigUint64Array) \ V(FixedBigInt64ElementsAccessor, BIGINT64_ELEMENTS, FixedBigInt64Array) template
class ElementsKindTraits { public: typedef FixedArrayBase BackingStore; }; #define ELEMENTS_TRAITS(Class, KindParam, Store) \ template <> \ class ElementsKindTraits
{ \ public: /* NOLINT */ \ static constexpr ElementsKind Kind = KindParam; \ typedef Store BackingStore; \ }; \ constexpr ElementsKind ElementsKindTraits
::Kind; ELEMENTS_LIST(ELEMENTS_TRAITS) #undef ELEMENTS_TRAITS V8_WARN_UNUSED_RESULT MaybeHandle
ThrowArrayLengthRangeError(Isolate* isolate) { THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kInvalidArrayLength), Object); } WriteBarrierMode GetWriteBarrierMode(ElementsKind kind) { if (IsSmiElementsKind(kind)) return SKIP_WRITE_BARRIER; if (IsDoubleElementsKind(kind)) return SKIP_WRITE_BARRIER; return UPDATE_WRITE_BARRIER; } void CopyObjectToObjectElements(Isolate* isolate, FixedArrayBase* from_base, ElementsKind from_kind, uint32_t from_start, FixedArrayBase* to_base, ElementsKind to_kind, uint32_t to_start, int raw_copy_size) { ReadOnlyRoots roots(isolate); DCHECK(to_base->map() != roots.fixed_cow_array_map()); DisallowHeapAllocation no_allocation; int copy_size = raw_copy_size; if (raw_copy_size < 0) { DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); copy_size = Min(from_base->length() - from_start, to_base->length() - to_start); if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { int start = to_start + copy_size; int length = to_base->length() - start; if (length > 0) { MemsetPointer(FixedArray::cast(to_base)->data_start() + start, roots.the_hole_value(), length); } } } DCHECK((copy_size + static_cast
(to_start)) <= to_base->length() && (copy_size + static_cast
(from_start)) <= from_base->length()); if (copy_size == 0) return; FixedArray* from = FixedArray::cast(from_base); FixedArray* to = FixedArray::cast(to_base); DCHECK(IsSmiOrObjectElementsKind(from_kind)); DCHECK(IsSmiOrObjectElementsKind(to_kind)); WriteBarrierMode write_barrier_mode = (IsObjectElementsKind(from_kind) && IsObjectElementsKind(to_kind)) ? UPDATE_WRITE_BARRIER : SKIP_WRITE_BARRIER; for (int i = 0; i < copy_size; i++) { Object* value = from->get(from_start + i); to->set(to_start + i, value, write_barrier_mode); } } static void CopyDictionaryToObjectElements( Isolate* isolate, FixedArrayBase* from_base, uint32_t from_start, FixedArrayBase* to_base, ElementsKind to_kind, uint32_t to_start, int raw_copy_size) { DisallowHeapAllocation no_allocation; NumberDictionary* from = NumberDictionary::cast(from_base); int copy_size = raw_copy_size; if (raw_copy_size < 0) { DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); copy_size = from->max_number_key() + 1 - from_start; if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { int start = to_start + copy_size; int length = to_base->length() - start; if (length > 0) { MemsetPointer(FixedArray::cast(to_base)->data_start() + start, ReadOnlyRoots(isolate).the_hole_value(), length); } } } DCHECK(to_base != from_base); DCHECK(IsSmiOrObjectElementsKind(to_kind)); if (copy_size == 0) return; FixedArray* to = FixedArray::cast(to_base); uint32_t to_length = to->length(); if (to_start + copy_size > to_length) { copy_size = to_length - to_start; } WriteBarrierMode write_barrier_mode = GetWriteBarrierMode(to_kind); for (int i = 0; i < copy_size; i++) { int entry = from->FindEntry(isolate, i + from_start); if (entry != NumberDictionary::kNotFound) { Object* value = from->ValueAt(entry); DCHECK(!value->IsTheHole(isolate)); to->set(i + to_start, value, write_barrier_mode); } else { to->set_the_hole(isolate, i + to_start); } } } // NOTE: this method violates the handlified function signature convention: // raw pointer parameters in the function that allocates. // See ElementsAccessorBase::CopyElements() for details. static void CopyDoubleToObjectElements(Isolate* isolate, FixedArrayBase* from_base, uint32_t from_start, FixedArrayBase* to_base, uint32_t to_start, int raw_copy_size) { int copy_size = raw_copy_size; if (raw_copy_size < 0) { DisallowHeapAllocation no_allocation; DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); copy_size = Min(from_base->length() - from_start, to_base->length() - to_start); if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { // Also initialize the area that will be copied over since HeapNumber // allocation below can cause an incremental marking step, requiring all // existing heap objects to be propertly initialized. int start = to_start; int length = to_base->length() - start; if (length > 0) { MemsetPointer(FixedArray::cast(to_base)->data_start() + start, ReadOnlyRoots(isolate).the_hole_value(), length); } } } DCHECK((copy_size + static_cast
(to_start)) <= to_base->length() && (copy_size + static_cast
(from_start)) <= from_base->length()); if (copy_size == 0) return; // From here on, the code below could actually allocate. Therefore the raw // values are wrapped into handles. Handle
from(FixedDoubleArray::cast(from_base), isolate); Handle
to(FixedArray::cast(to_base), isolate); // Use an outer loop to not waste too much time on creating HandleScopes. // On the other hand we might overflow a single handle scope depending on // the copy_size. int offset = 0; while (offset < copy_size) { HandleScope scope(isolate); offset += 100; for (int i = offset - 100; i < offset && i < copy_size; ++i) { Handle
value = FixedDoubleArray::get(*from, i + from_start, isolate); to->set(i + to_start, *value, UPDATE_WRITE_BARRIER); } } } static void CopyDoubleToDoubleElements(FixedArrayBase* from_base, uint32_t from_start, FixedArrayBase* to_base, uint32_t to_start, int raw_copy_size) { DisallowHeapAllocation no_allocation; int copy_size = raw_copy_size; if (raw_copy_size < 0) { DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); copy_size = Min(from_base->length() - from_start, to_base->length() - to_start); if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { for (int i = to_start + copy_size; i < to_base->length(); ++i) { FixedDoubleArray::cast(to_base)->set_the_hole(i); } } } DCHECK((copy_size + static_cast
(to_start)) <= to_base->length() && (copy_size + static_cast
(from_start)) <= from_base->length()); if (copy_size == 0) return; FixedDoubleArray* from = FixedDoubleArray::cast(from_base); FixedDoubleArray* to = FixedDoubleArray::cast(to_base); Address to_address = to->address() + FixedDoubleArray::kHeaderSize; Address from_address = from->address() + FixedDoubleArray::kHeaderSize; to_address += kDoubleSize * to_start; from_address += kDoubleSize * from_start; int words_per_double = (kDoubleSize / kPointerSize); CopyWords(reinterpret_cast
(to_address), reinterpret_cast
(from_address), static_cast
(words_per_double * copy_size)); } static void CopySmiToDoubleElements(FixedArrayBase* from_base, uint32_t from_start, FixedArrayBase* to_base, uint32_t to_start, int raw_copy_size) { DisallowHeapAllocation no_allocation; int copy_size = raw_copy_size; if (raw_copy_size < 0) { DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); copy_size = from_base->length() - from_start; if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { for (int i = to_start + copy_size; i < to_base->length(); ++i) { FixedDoubleArray::cast(to_base)->set_the_hole(i); } } } DCHECK((copy_size + static_cast
(to_start)) <= to_base->length() && (copy_size + static_cast
(from_start)) <= from_base->length()); if (copy_size == 0) return; FixedArray* from = FixedArray::cast(from_base); FixedDoubleArray* to = FixedDoubleArray::cast(to_base); Object* the_hole = from->GetReadOnlyRoots().the_hole_value(); for (uint32_t from_end = from_start + static_cast
(copy_size); from_start < from_end; from_start++, to_start++) { Object* hole_or_smi = from->get(from_start); if (hole_or_smi == the_hole) { to->set_the_hole(to_start); } else { to->set(to_start, Smi::ToInt(hole_or_smi)); } } } static void CopyPackedSmiToDoubleElements(FixedArrayBase* from_base, uint32_t from_start, FixedArrayBase* to_base, uint32_t to_start, int packed_size, int raw_copy_size) { DisallowHeapAllocation no_allocation; int copy_size = raw_copy_size; uint32_t to_end; if (raw_copy_size < 0) { DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); copy_size = packed_size - from_start; if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { to_end = to_base->length(); for (uint32_t i = to_start + copy_size; i < to_end; ++i) { FixedDoubleArray::cast(to_base)->set_the_hole(i); } } else { to_end = to_start + static_cast
(copy_size); } } else { to_end = to_start + static_cast
(copy_size); } DCHECK(static_cast
(to_end) <= to_base->length()); DCHECK(packed_size >= 0 && packed_size <= copy_size); DCHECK((copy_size + static_cast
(to_start)) <= to_base->length() && (copy_size + static_cast
(from_start)) <= from_base->length()); if (copy_size == 0) return; FixedArray* from = FixedArray::cast(from_base); FixedDoubleArray* to = FixedDoubleArray::cast(to_base); for (uint32_t from_end = from_start + static_cast
(packed_size); from_start < from_end; from_start++, to_start++) { Object* smi = from->get(from_start); DCHECK(!smi->IsTheHole()); to->set(to_start, Smi::ToInt(smi)); } } static void CopyObjectToDoubleElements(FixedArrayBase* from_base, uint32_t from_start, FixedArrayBase* to_base, uint32_t to_start, int raw_copy_size) { DisallowHeapAllocation no_allocation; int copy_size = raw_copy_size; if (raw_copy_size < 0) { DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); copy_size = from_base->length() - from_start; if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { for (int i = to_start + copy_size; i < to_base->length(); ++i) { FixedDoubleArray::cast(to_base)->set_the_hole(i); } } } DCHECK((copy_size + static_cast
(to_start)) <= to_base->length() && (copy_size + static_cast
(from_start)) <= from_base->length()); if (copy_size == 0) return; FixedArray* from = FixedArray::cast(from_base); FixedDoubleArray* to = FixedDoubleArray::cast(to_base); Object* the_hole = from->GetReadOnlyRoots().the_hole_value(); for (uint32_t from_end = from_start + copy_size; from_start < from_end; from_start++, to_start++) { Object* hole_or_object = from->get(from_start); if (hole_or_object == the_hole) { to->set_the_hole(to_start); } else { to->set(to_start, hole_or_object->Number()); } } } static void CopyDictionaryToDoubleElements( Isolate* isolate, FixedArrayBase* from_base, uint32_t from_start, FixedArrayBase* to_base, uint32_t to_start, int raw_copy_size) { DisallowHeapAllocation no_allocation; NumberDictionary* from = NumberDictionary::cast(from_base); int copy_size = raw_copy_size; if (copy_size < 0) { DCHECK(copy_size == ElementsAccessor::kCopyToEnd || copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); copy_size = from->max_number_key() + 1 - from_start; if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { for (int i = to_start + copy_size; i < to_base->length(); ++i) { FixedDoubleArray::cast(to_base)->set_the_hole(i); } } } if (copy_size == 0) return; FixedDoubleArray* to = FixedDoubleArray::cast(to_base); uint32_t to_length = to->length(); if (to_start + copy_size > to_length) { copy_size = to_length - to_start; } for (int i = 0; i < copy_size; i++) { int entry = from->FindEntry(isolate, i + from_start); if (entry != NumberDictionary::kNotFound) { to->set(i + to_start, from->ValueAt(entry)->Number()); } else { to->set_the_hole(i + to_start); } } } static void TraceTopFrame(Isolate* isolate) { StackFrameIterator it(isolate); if (it.done()) { PrintF("unknown location (no JavaScript frames present)"); return; } StackFrame* raw_frame = it.frame(); if (raw_frame->is_internal()) { Code* current_code_object = isolate->heap()->GcSafeFindCodeForInnerPointer(raw_frame->pc()); if (current_code_object->builtin_index() == Builtins::kFunctionPrototypeApply) { PrintF("apply from "); it.Advance(); raw_frame = it.frame(); } } JavaScriptFrame::PrintTop(isolate, stdout, false, true); } static void SortIndices( Isolate* isolate, Handle
indices, uint32_t sort_size, WriteBarrierMode write_barrier_mode = UPDATE_WRITE_BARRIER) { // Use AtomicElement wrapper to ensure that std::sort uses atomic load and // store operations that are safe for concurrent marking. base::AtomicElement
* start = reinterpret_cast
*>( indices->GetFirstElementAddress()); std::sort(start, start + sort_size, [isolate](const base::AtomicElement
& elementA, const base::AtomicElement
& elementB) { const Object* a = elementA.value(); const Object* b = elementB.value(); if (a->IsSmi() || !a->IsUndefined(isolate)) { if (!b->IsSmi() && b->IsUndefined(isolate)) { return true; } return a->Number() < b->Number(); } return !b->IsSmi() && b->IsUndefined(isolate); }); if (write_barrier_mode != SKIP_WRITE_BARRIER) { FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(isolate->heap(), *indices, 0, sort_size); } } static Maybe
IncludesValueSlowPath(Isolate* isolate, Handle
receiver, Handle
value, uint32_t start_from, uint32_t length) { bool search_for_hole = value->IsUndefined(isolate); for (uint32_t k = start_from; k < length; ++k) { LookupIterator it(isolate, receiver, k); if (!it.IsFound()) { if (search_for_hole) return Just(true); continue; } Handle
element_k; ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k, Object::GetProperty(&it), Nothing
()); if (value->SameValueZero(*element_k)) return Just(true); } return Just(false); } static Maybe
IndexOfValueSlowPath(Isolate* isolate, Handle
receiver, Handle
value, uint32_t start_from, uint32_t length) { for (uint32_t k = start_from; k < length; ++k) { LookupIterator it(isolate, receiver, k); if (!it.IsFound()) { continue; } Handle
element_k; ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, element_k, Object::GetProperty(&it), Nothing
()); if (value->StrictEquals(*element_k)) return Just
(k); } return Just
(-1); } // The InternalElementsAccessor is a helper class to expose otherwise protected // methods to its subclasses. Namely, we don't want to publicly expose methods // that take an entry (instead of an index) as an argument. class InternalElementsAccessor : public ElementsAccessor { public: explicit InternalElementsAccessor(const char* name) : ElementsAccessor(name) {} virtual uint32_t GetEntryForIndex(Isolate* isolate, JSObject* holder, FixedArrayBase* backing_store, uint32_t index) = 0; virtual PropertyDetails GetDetails(JSObject* holder, uint32_t entry) = 0; }; // Base class for element handler implementations. Contains the // the common logic for objects with different ElementsKinds. // Subclasses must specialize method for which the element // implementation differs from the base class implementation. // // This class is intended to be used in the following way: // // class SomeElementsAccessor : // public ElementsAccessorBase
{ // ... // } // // This is an example of the Curiously Recurring Template Pattern (see // http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). We use // CRTP to guarantee aggressive compile time optimizations (i.e. inlining and // specialization of SomeElementsAccessor methods). template
class ElementsAccessorBase : public InternalElementsAccessor { public: explicit ElementsAccessorBase(const char* name) : InternalElementsAccessor(name) {} typedef ElementsTraitsParam ElementsTraits; typedef typename ElementsTraitsParam::BackingStore BackingStore; static ElementsKind kind() { return ElementsTraits::Kind; } static void ValidateContents(JSObject* holder, int length) {} static void ValidateImpl(JSObject* holder) { FixedArrayBase* fixed_array_base = holder->elements(); if (!fixed_array_base->IsHeapObject()) return; // Arrays that have been shifted in place can't be verified. if (fixed_array_base->IsFiller()) return; int length = 0; if (holder->IsJSArray()) { Object* length_obj = JSArray::cast(holder)->length(); if (length_obj->IsSmi()) { length = Smi::ToInt(length_obj); } } else { length = fixed_array_base->length(); } Subclass::ValidateContents(holder, length); } void Validate(JSObject* holder) final { DisallowHeapAllocation no_gc; Subclass::ValidateImpl(holder); } static bool IsPackedImpl(JSObject* holder, FixedArrayBase* backing_store, uint32_t start, uint32_t end) { DisallowHeapAllocation no_gc; if (IsFastPackedElementsKind(kind())) return true; Isolate* isolate = holder->GetIsolate(); for (uint32_t i = start; i < end; i++) { if (!Subclass::HasElementImpl(isolate, holder, i, backing_store, ALL_PROPERTIES)) { return false; } } return true; } static void TryTransitionResultArrayToPacked(Handle
array) { if (!IsHoleyElementsKind(kind())) return; Handle
backing_store(array->elements(), array->GetIsolate()); int length = Smi::ToInt(array->length()); if (!Subclass::IsPackedImpl(*array, *backing_store, 0, length)) return; ElementsKind packed_kind = GetPackedElementsKind(kind()); Handle
new_map = JSObject::GetElementsTransitionMap(array, packed_kind); JSObject::MigrateToMap(array, new_map); if (FLAG_trace_elements_transitions) { JSObject::PrintElementsTransition(stdout, array, kind(), backing_store, packed_kind, backing_store); } } bool HasElement(JSObject* holder, uint32_t index, FixedArrayBase* backing_store, PropertyFilter filter) final { return Subclass::HasElementImpl(holder->GetIsolate(), holder, index, backing_store, filter); } static bool HasElementImpl(Isolate* isolate, JSObject* holder, uint32_t index, FixedArrayBase* backing_store, PropertyFilter filter = ALL_PROPERTIES) { return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index, filter) != kMaxUInt32; } bool HasEntry(JSObject* holder, uint32_t entry) final { return Subclass::HasEntryImpl(holder->GetIsolate(), holder->elements(), entry); } static bool HasEntryImpl(Isolate* isolate, FixedArrayBase* backing_store, uint32_t entry) { UNIMPLEMENTED(); } bool HasAccessors(JSObject* holder) final { return Subclass::HasAccessorsImpl(holder, holder->elements()); } static bool HasAccessorsImpl(JSObject* holder, FixedArrayBase* backing_store) { return false; } Handle
Get(Handle
holder, uint32_t entry) final { return Subclass::GetInternalImpl(holder, entry); } static Handle
GetInternalImpl(Handle
holder, uint32_t entry) { return Subclass::GetImpl(holder->GetIsolate(), holder->elements(), entry); } static Handle
GetImpl(Isolate* isolate, FixedArrayBase* backing_store, uint32_t entry) { uint32_t index = GetIndexForEntryImpl(backing_store, entry); return handle(BackingStore::cast(backing_store)->get(index), isolate); } void Set(Handle
holder, uint32_t entry, Object* value) final { Subclass::SetImpl(holder, entry, value); } void Reconfigure(Handle
object, Handle
store, uint32_t entry, Handle
value, PropertyAttributes attributes) final { Subclass::ReconfigureImpl(object, store, entry, value, attributes); } static void ReconfigureImpl(Handle
object, Handle
store, uint32_t entry, Handle
value, PropertyAttributes attributes) { UNREACHABLE(); } void Add(Handle
object, uint32_t index, Handle
value, PropertyAttributes attributes, uint32_t new_capacity) final { Subclass::AddImpl(object, index, value, attributes, new_capacity); } static void AddImpl(Handle
object, uint32_t index, Handle
value, PropertyAttributes attributes, uint32_t new_capacity) { UNREACHABLE(); } uint32_t Push(Handle
receiver, Arguments* args, uint32_t push_size) final { return Subclass::PushImpl(receiver, args, push_size); } static uint32_t PushImpl(Handle
receiver, Arguments* args, uint32_t push_sized) { UNREACHABLE(); } uint32_t Unshift(Handle
receiver, Arguments* args, uint32_t unshift_size) final { return Subclass::UnshiftImpl(receiver, args, unshift_size); } static uint32_t UnshiftImpl(Handle
receiver, Arguments* args, uint32_t unshift_size) { UNREACHABLE(); } Handle
Slice(Handle
receiver, uint32_t start, uint32_t end) final { return Subclass::SliceImpl(receiver, start, end); } static Handle
SliceImpl(Handle
receiver, uint32_t start, uint32_t end) { UNREACHABLE(); } Handle
Splice(Handle
receiver, uint32_t start, uint32_t delete_count, Arguments* args, uint32_t add_count) final { return Subclass::SpliceImpl(receiver, start, delete_count, args, add_count); } static Handle
SpliceImpl(Handle
receiver, uint32_t start, uint32_t delete_count, Arguments* args, uint32_t add_count) { UNREACHABLE(); } Handle
Pop(Handle
receiver) final { return Subclass::PopImpl(receiver); } static Handle
PopImpl(Handle
receiver) { UNREACHABLE(); } Handle
Shift(Handle
receiver) final { return Subclass::ShiftImpl(receiver); } static Handle
ShiftImpl(Handle
receiver) { UNREACHABLE(); } void SetLength(Handle
array, uint32_t length) final { Subclass::SetLengthImpl(array->GetIsolate(), array, length, handle(array->elements(), array->GetIsolate())); } static void SetLengthImpl(Isolate* isolate, Handle
array, uint32_t length, Handle
backing_store) { DCHECK(!array->SetLengthWouldNormalize(length)); DCHECK(IsFastElementsKind(array->GetElementsKind())); uint32_t old_length = 0; CHECK(array->length()->ToArrayIndex(&old_length)); if (old_length < length) { ElementsKind kind = array->GetElementsKind(); if (!IsHoleyElementsKind(kind)) { kind = GetHoleyElementsKind(kind); JSObject::TransitionElementsKind(array, kind); } } // Check whether the backing store should be shrunk. uint32_t capacity = backing_store->length(); old_length = Min(old_length, capacity); if (length == 0) { array->initialize_elements(); } else if (length <= capacity) { if (IsSmiOrObjectElementsKind(kind())) { JSObject::EnsureWritableFastElements(array); if (array->elements() != *backing_store) { backing_store = handle(array->elements(), isolate); } } if (2 * length + JSObject::kMinAddedElementsCapacity <= capacity) { // If more than half the elements won't be used, trim the array. // Do not trim from short arrays to prevent frequent trimming on // repeated pop operations. // Leave some space to allow for subsequent push operations. int elements_to_trim = length + 1 == old_length ? (capacity - length) / 2 : capacity - length; isolate->heap()->RightTrimFixedArray(*backing_store, elements_to_trim); // Fill the non-trimmed elements with holes. BackingStore::cast(*backing_store) ->FillWithHoles(length, std::min(old_length, capacity - elements_to_trim)); } else { // Otherwise, fill the unused tail with holes. BackingStore::cast(*backing_store)->FillWithHoles(length, old_length); } } else { // Check whether the backing store should be expanded. capacity = Max(length, JSObject::NewElementsCapacity(capacity)); Subclass::GrowCapacityAndConvertImpl(array, capacity); } array->set_length(Smi::FromInt(length)); JSObject::ValidateElements(*array); } uint32_t NumberOfElements(JSObject* receiver) final { return Subclass::NumberOfElementsImpl(receiver, receiver->elements()); } static uint32_t NumberOfElementsImpl(JSObject* receiver, FixedArrayBase* backing_store) { UNREACHABLE(); } static uint32_t GetMaxIndex(JSObject* receiver, FixedArrayBase* elements) { if (receiver->IsJSArray()) { DCHECK(JSArray::cast(receiver)->length()->IsSmi()); return static_cast
( Smi::ToInt(JSArray::cast(receiver)->length())); } return Subclass::GetCapacityImpl(receiver, elements); } static uint32_t GetMaxNumberOfEntries(JSObject* receiver, FixedArrayBase* elements) { return Subclass::GetMaxIndex(receiver, elements); } static Handle
ConvertElementsWithCapacity( Handle
object, Handle
old_elements, ElementsKind from_kind, uint32_t capacity) { return ConvertElementsWithCapacity( object, old_elements, from_kind, capacity, 0, 0, ElementsAccessor::kCopyToEndAndInitializeToHole); } static Handle
ConvertElementsWithCapacity( Handle
object, Handle
old_elements, ElementsKind from_kind, uint32_t capacity, int copy_size) { return ConvertElementsWithCapacity(object, old_elements, from_kind, capacity, 0, 0, copy_size); } static Handle
ConvertElementsWithCapacity( Handle
object, Handle
old_elements, ElementsKind from_kind, uint32_t capacity, uint32_t src_index, uint32_t dst_index, int copy_size) { Isolate* isolate = object->GetIsolate(); Handle
new_elements; if (IsDoubleElementsKind(kind())) { new_elements = isolate->factory()->NewFixedDoubleArray(capacity); } else { new_elements = isolate->factory()->NewUninitializedFixedArray(capacity); } int packed_size = kPackedSizeNotKnown; if (IsFastPackedElementsKind(from_kind) && object->IsJSArray()) { packed_size = Smi::ToInt(JSArray::cast(*object)->length()); } Subclass::CopyElementsImpl(isolate, *old_elements, src_index, *new_elements, from_kind, dst_index, packed_size, copy_size); return new_elements; } static void TransitionElementsKindImpl(Handle
object, Handle
to_map) { Handle
from_map = handle(object->map(), object->GetIsolate()); ElementsKind from_kind = from_map->elements_kind(); ElementsKind to_kind = to_map->elements_kind(); if (IsHoleyElementsKind(from_kind)) { to_kind = GetHoleyElementsKind(to_kind); } if (from_kind != to_kind) { // This method should never be called for any other case. DCHECK(IsFastElementsKind(from_kind)); DCHECK(IsFastElementsKind(to_kind)); DCHECK_NE(TERMINAL_FAST_ELEMENTS_KIND, from_kind); Handle
from_elements(object->elements(), object->GetIsolate()); if (object->elements() == object->GetReadOnlyRoots().empty_fixed_array() || IsDoubleElementsKind(from_kind) == IsDoubleElementsKind(to_kind)) { // No change is needed to the elements() buffer, the transition // only requires a map change. JSObject::MigrateToMap(object, to_map); } else { DCHECK( (IsSmiElementsKind(from_kind) && IsDoubleElementsKind(to_kind)) || (IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind))); uint32_t capacity = static_cast
(object->elements()->length()); Handle
elements = ConvertElementsWithCapacity( object, from_elements, from_kind, capacity); JSObject::SetMapAndElements(object, to_map, elements); } if (FLAG_trace_elements_transitions) { JSObject::PrintElementsTransition( stdout, object, from_kind, from_elements, to_kind, handle(object->elements(), object->GetIsolate())); } } } static void GrowCapacityAndConvertImpl(Handle
object, uint32_t capacity) { ElementsKind from_kind = object->GetElementsKind(); if (IsSmiOrObjectElementsKind(from_kind)) { // Array optimizations rely on the prototype lookups of Array objects // always returning undefined. If there is a store to the initial // prototype object, make sure all of these optimizations are invalidated. object->GetIsolate()->UpdateNoElementsProtectorOnSetLength(object); } Handle
old_elements(object->elements(), object->GetIsolate()); // This method should only be called if there's a reason to update the // elements. DCHECK(IsDoubleElementsKind(from_kind) != IsDoubleElementsKind(kind()) || IsDictionaryElementsKind(from_kind) || static_cast
(old_elements->length()) < capacity); Subclass::BasicGrowCapacityAndConvertImpl(object, old_elements, from_kind, kind(), capacity); } static void BasicGrowCapacityAndConvertImpl( Handle
object, Handle
old_elements, ElementsKind from_kind, ElementsKind to_kind, uint32_t capacity) { Handle
elements = ConvertElementsWithCapacity(object, old_elements, from_kind, capacity); if (IsHoleyElementsKind(from_kind)) { to_kind = GetHoleyElementsKind(to_kind); } Handle
new_map = JSObject::GetElementsTransitionMap(object, to_kind); JSObject::SetMapAndElements(object, new_map, elements); // Transition through the allocation site as well if present. JSObject::UpdateAllocationSite(object, to_kind); if (FLAG_trace_elements_transitions) { JSObject::PrintElementsTransition(stdout, object, from_kind, old_elements, to_kind, elements); } } void TransitionElementsKind(Handle
object, Handle
map) final { Subclass::TransitionElementsKindImpl(object, map); } void GrowCapacityAndConvert(Handle
object, uint32_t capacity) final { Subclass::GrowCapacityAndConvertImpl(object, capacity); } bool GrowCapacity(Handle
object, uint32_t index) final { // This function is intended to be called from optimized code. We don't // want to trigger lazy deopts there, so refuse to handle cases that would. if (object->map()->is_prototype_map() || object->WouldConvertToSlowElements(index)) { return false; } Handle
old_elements(object->elements(), object->GetIsolate()); uint32_t new_capacity = JSObject::NewElementsCapacity(index + 1); DCHECK(static_cast
(old_elements->length()) < new_capacity); Handle
elements = ConvertElementsWithCapacity(object, old_elements, kind(), new_capacity); DCHECK_EQ(object->GetElementsKind(), kind()); // Transition through the allocation site as well if present. if (JSObject::UpdateAllocationSite
( object, kind())) { return false; } object->set_elements(*elements); return true; } void Delete(Handle
obj, uint32_t entry) final { Subclass::DeleteImpl(obj, entry); } static void CopyElementsImpl(Isolate* isolate, FixedArrayBase* from, uint32_t from_start, FixedArrayBase* to, ElementsKind from_kind, uint32_t to_start, int packed_size, int copy_size) { UNREACHABLE(); } void CopyElements(JSObject* from_holder, uint32_t from_start, ElementsKind from_kind, Handle
to, uint32_t to_start, int copy_size) final { int packed_size = kPackedSizeNotKnown; bool is_packed = IsFastPackedElementsKind(from_kind) && from_holder->IsJSArray(); if (is_packed) { packed_size = Smi::ToInt(JSArray::cast(from_holder)->length()); if (copy_size >= 0 && packed_size > copy_size) { packed_size = copy_size; } } FixedArrayBase* from = from_holder->elements(); // NOTE: the Subclass::CopyElementsImpl() methods // violate the handlified function signature convention: // raw pointer parameters in the function that allocates. This is done // intentionally to avoid ArrayConcat() builtin performance degradation. // // Details: The idea is that allocations actually happen only in case of // copying from object with fast double elements to object with object // elements. In all the other cases there are no allocations performed and // handle creation causes noticeable performance degradation of the builtin. Subclass::CopyElementsImpl(from_holder->GetIsolate(), from, from_start, *to, from_kind, to_start, packed_size, copy_size); } void CopyElements(Isolate* isolate, Handle
source, ElementsKind source_kind, Handle
destination, int size) { Subclass::CopyElementsImpl(isolate, *source, 0, *destination, source_kind, 0, kPackedSizeNotKnown, size); } void CopyTypedArrayElementsSlice(JSTypedArray* source, JSTypedArray* destination, size_t start, size_t end) { Subclass::CopyTypedArrayElementsSliceImpl(source, destination, start, end); } static void CopyTypedArrayElementsSliceImpl(JSTypedArray* source, JSTypedArray* destination, size_t start, size_t end) { UNREACHABLE(); } Object* CopyElements(Handle
source, Handle
destination, size_t length, uint32_t offset) final { return Subclass::CopyElementsHandleImpl(source, destination, length, offset); } static Object* CopyElementsHandleImpl(Handle
source, Handle
destination, size_t length, uint32_t offset) { UNREACHABLE(); } Handle
Normalize(Handle
object) final { return Subclass::NormalizeImpl( object, handle(object->elements(), object->GetIsolate())); } static Handle
NormalizeImpl( Handle
object, Handle
elements) { UNREACHABLE(); } Maybe
CollectValuesOrEntries(Isolate* isolate, Handle
object, Handle
values_or_entries, bool get_entries, int* nof_items, PropertyFilter filter) { return Subclass::CollectValuesOrEntriesImpl( isolate, object, values_or_entries, get_entries, nof_items, filter); } static Maybe
CollectValuesOrEntriesImpl( Isolate* isolate, Handle
object, Handle
values_or_entries, bool get_entries, int* nof_items, PropertyFilter filter) { DCHECK_EQ(*nof_items, 0); KeyAccumulator accumulator(isolate, KeyCollectionMode::kOwnOnly, ALL_PROPERTIES); Subclass::CollectElementIndicesImpl( object, handle(object->elements(), isolate), &accumulator); Handle
keys = accumulator.GetKeys(); int count = 0; int i = 0; ElementsKind original_elements_kind = object->GetElementsKind(); for (; i < keys->length(); ++i) { Handle
key(keys->get(i), isolate); uint32_t index; if (!key->ToUint32(&index)) continue; DCHECK_EQ(object->GetElementsKind(), original_elements_kind); uint32_t entry = Subclass::GetEntryForIndexImpl( isolate, *object, object->elements(), index, filter); if (entry == kMaxUInt32) continue; PropertyDetails details = Subclass::GetDetailsImpl(*object, entry); Handle
value; if (details.kind() == kData) { value = Subclass::GetImpl(isolate, object->elements(), entry); } else { // This might modify the elements and/or change the elements kind. LookupIterator it(isolate, object, index, LookupIterator::OWN); ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, value, Object::GetProperty(&it), Nothing
()); } if (get_entries) value = MakeEntryPair(isolate, index, value); values_or_entries->set(count++, *value); if (object->GetElementsKind() != original_elements_kind) break; } // Slow path caused by changes in elements kind during iteration. for (; i < keys->length(); i++) { Handle
key(keys->get(i), isolate); uint32_t index; if (!key->ToUint32(&index)) continue; if (filter & ONLY_ENUMERABLE) { InternalElementsAccessor* accessor = reinterpret_cast
( object->GetElementsAccessor()); uint32_t entry = accessor->GetEntryForIndex(isolate, *object, object->elements(), index); if (entry == kMaxUInt32) continue; PropertyDetails details = accessor->GetDetails(*object, entry); if (!details.IsEnumerable()) continue; } Handle
value; LookupIterator it(isolate, object, index, LookupIterator::OWN); ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, value, Object::GetProperty(&it), Nothing
()); if (get_entries) value = MakeEntryPair(isolate, index, value); values_or_entries->set(count++, *value); } *nof_items = count; return Just(true); } void CollectElementIndices(Handle
object, Handle
backing_store, KeyAccumulator* keys) final { if (keys->filter() & ONLY_ALL_CAN_READ) return; Subclass::CollectElementIndicesImpl(object, backing_store, keys); } static void CollectElementIndicesImpl(Handle
object, Handle
backing_store, KeyAccumulator* keys) { DCHECK_NE(DICTIONARY_ELEMENTS, kind()); // Non-dictionary elements can't have all-can-read accessors. uint32_t length = Subclass::GetMaxIndex(*object, *backing_store); PropertyFilter filter = keys->filter(); Isolate* isolate = keys->isolate(); Factory* factory = isolate->factory(); for (uint32_t i = 0; i < length; i++) { if (Subclass::HasElementImpl(isolate, *object, i, *backing_store, filter)) { keys->AddKey(factory->NewNumberFromUint(i)); } } } static Handle
DirectCollectElementIndicesImpl( Isolate* isolate, Handle
object, Handle
backing_store, GetKeysConversion convert, PropertyFilter filter, Handle
list, uint32_t* nof_indices, uint32_t insertion_index = 0) { uint32_t length = Subclass::GetMaxIndex(*object, *backing_store); uint32_t const kMaxStringTableEntries = isolate->heap()->MaxNumberToStringCacheSize(); for (uint32_t i = 0; i < length; i++) { if (Subclass::HasElementImpl(isolate, *object, i, *backing_store, filter)) { if (convert == GetKeysConversion::kConvertToString) { bool use_cache = i < kMaxStringTableEntries; Handle
index_string = isolate->factory()->Uint32ToString(i, use_cache); list->set(insertion_index, *index_string); } else { list->set(insertion_index, Smi::FromInt(i), SKIP_WRITE_BARRIER); } insertion_index++; } } *nof_indices = insertion_index; return list; } MaybeHandle
PrependElementIndices( Handle
object, Handle
backing_store, Handle
keys, GetKeysConversion convert, PropertyFilter filter) final { return Subclass::PrependElementIndicesImpl(object, backing_store, keys, convert, filter); } static MaybeHandle
PrependElementIndicesImpl( Handle
object, Handle
backing_store, Handle
keys, GetKeysConversion convert, PropertyFilter filter) { Isolate* isolate = object->GetIsolate(); uint32_t nof_property_keys = keys->length(); uint32_t initial_list_length = Subclass::GetMaxNumberOfEntries(*object, *backing_store); initial_list_length += nof_property_keys; if (initial_list_length > FixedArray::kMaxLength || initial_list_length < nof_property_keys) { return isolate->Throw
(isolate->factory()->NewRangeError( MessageTemplate::kInvalidArrayLength)); } // Collect the element indices into a new list. MaybeHandle
raw_array = isolate->factory()->TryNewFixedArray(initial_list_length); Handle
combined_keys; // If we have a holey backing store try to precisely estimate the backing // store size as a last emergency measure if we cannot allocate the big // array. if (!raw_array.ToHandle(&combined_keys)) { if (IsHoleyOrDictionaryElementsKind(kind())) { // If we overestimate the result list size we might end up in the // large-object space which doesn't free memory on shrinking the list. // Hence we try to estimate the final size for holey backing stores more // precisely here. initial_list_length = Subclass::NumberOfElementsImpl(*object, *backing_store); initial_list_length += nof_property_keys; } combined_keys = isolate->factory()->NewFixedArray(initial_list_length); } uint32_t nof_indices = 0; bool needs_sorting = IsDictionaryElementsKind(kind()) || IsSloppyArgumentsElementsKind(kind()); combined_keys = Subclass::DirectCollectElementIndicesImpl( isolate, object, backing_store, needs_sorting ? GetKeysConversion::kKeepNumbers : convert, filter, combined_keys, &nof_indices); if (needs_sorting) { SortIndices(isolate, combined_keys, nof_indices); // Indices from dictionary elements should only be converted after // sorting. if (convert == GetKeysConversion::kConvertToString) { for (uint32_t i = 0; i < nof_indices; i++) { Handle
index_string = isolate->factory()->Uint32ToString( combined_keys->get(i)->Number()); combined_keys->set(i, *index_string); } } } // Copy over the passed-in property keys. CopyObjectToObjectElements(isolate, *keys, PACKED_ELEMENTS, 0, *combined_keys, PACKED_ELEMENTS, nof_indices, nof_property_keys); // For holey elements and arguments we might have to shrink the collected // keys since the estimates might be off. if (IsHoleyOrDictionaryElementsKind(kind()) || IsSloppyArgumentsElementsKind(kind())) { // Shrink combined_keys to the final size. int final_size = nof_indices + nof_property_keys; DCHECK_LE(final_size, combined_keys->length()); return FixedArray::ShrinkOrEmpty(isolate, combined_keys, final_size); } return combined_keys; } void AddElementsToKeyAccumulator(Handle
receiver, KeyAccumulator* accumulator, AddKeyConversion convert) final { Subclass::AddElementsToKeyAccumulatorImpl(receiver, accumulator, convert); } static uint32_t GetCapacityImpl(JSObject* holder, FixedArrayBase* backing_store) { return backing_store->length(); } uint32_t GetCapacity(JSObject* holder, FixedArrayBase* backing_store) final { return Subclass::GetCapacityImpl(holder, backing_store); } static Object* FillImpl(Handle
receiver, Handle
obj_value, uint32_t start, uint32_t end) { UNREACHABLE(); } Object* Fill(Handle
receiver, Handle
obj_value, uint32_t start, uint32_t end) { return Subclass::FillImpl(receiver, obj_value, start, end); } static Maybe
IncludesValueImpl(Isolate* isolate, Handle
receiver, Handle
value, uint32_t start_from, uint32_t length) { return IncludesValueSlowPath(isolate, receiver, value, start_from, length); } Maybe
IncludesValue(Isolate* isolate, Handle
receiver, Handle
value, uint32_t start_from, uint32_t length) final { return Subclass::IncludesValueImpl(isolate, receiver, value, start_from, length); } static Maybe
IndexOfValueImpl(Isolate* isolate, Handle
receiver, Handle
value, uint32_t start_from, uint32_t length) { return IndexOfValueSlowPath(isolate, receiver, value, start_from, length); } Maybe
IndexOfValue(Isolate* isolate, Handle
receiver, Handle
value, uint32_t start_from, uint32_t length) final { return Subclass::IndexOfValueImpl(isolate, receiver, value, start_from, length); } static Maybe
LastIndexOfValueImpl(Handle
receiver, Handle
value, uint32_t start_from) { UNREACHABLE(); } Maybe
LastIndexOfValue(Handle
receiver, Handle
value, uint32_t start_from) final { return Subclass::LastIndexOfValueImpl(receiver, value, start_from); } static void ReverseImpl(JSObject* receiver) { UNREACHABLE(); } void Reverse(JSObject* receiver) final { Subclass::ReverseImpl(receiver); } static uint32_t GetIndexForEntryImpl(FixedArrayBase* backing_store, uint32_t entry) { return entry; } static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject* holder, FixedArrayBase* backing_store, uint32_t index, PropertyFilter filter) { DCHECK(IsFastElementsKind(kind())); uint32_t length = Subclass::GetMaxIndex(holder, backing_store); if (IsHoleyElementsKind(kind())) { return index < length && !BackingStore::cast(backing_store) ->is_the_hole(isolate, index) ? index : kMaxUInt32; } else { return index < length ? index : kMaxUInt32; } } uint32_t GetEntryForIndex(Isolate* isolate, JSObject* holder, FixedArrayBase* backing_store, uint32_t index) final { return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index, ALL_PROPERTIES); } static PropertyDetails GetDetailsImpl(FixedArrayBase* backing_store, uint32_t entry) { return PropertyDetails(kData, NONE, PropertyCellType::kNoCell); } static PropertyDetails GetDetailsImpl(JSObject* holder, uint32_t entry) { return PropertyDetails(kData, NONE, PropertyCellType::kNoCell); } PropertyDetails GetDetails(JSObject* holder, uint32_t entry) final { return Subclass::GetDetailsImpl(holder, entry); } Handle
CreateListFromArrayLike(Isolate* isolate, Handle
object, uint32_t length) final { return Subclass::CreateListFromArrayLikeImpl(isolate, object, length); }; static Handle
CreateListFromArrayLikeImpl(Isolate* isolate, Handle
object, uint32_t length) { UNREACHABLE(); } private: DISALLOW_COPY_AND_ASSIGN(ElementsAccessorBase); }; class DictionaryElementsAccessor : public ElementsAccessorBase
> { public: explicit DictionaryElementsAccessor(const char* name) : ElementsAccessorBase
>(name) {} static uint32_t GetMaxIndex(JSObject* receiver, FixedArrayBase* elements) { // We cannot properly estimate this for dictionaries. UNREACHABLE(); } static uint32_t GetMaxNumberOfEntries(JSObject* receiver, FixedArrayBase* backing_store) { return NumberOfElementsImpl(receiver, backing_store); } static uint32_t NumberOfElementsImpl(JSObject* receiver, FixedArrayBase* backing_store) { NumberDictionary* dict = NumberDictionary::cast(backing_store); return dict->NumberOfElements(); } static void SetLengthImpl(Isolate* isolate, Handle
array, uint32_t length, Handle
backing_store) { Handle
dict = Handle
::cast(backing_store); int capacity = dict->Capacity(); uint32_t old_length = 0; CHECK(array->length()->ToArrayLength(&old_length)); { DisallowHeapAllocation no_gc; ReadOnlyRoots roots(isolate); if (length < old_length) { if (dict->requires_slow_elements()) { // Find last non-deletable element in range of elements to be // deleted and adjust range accordingly. for (int entry = 0; entry < capacity; entry++) { Object* index = dict->KeyAt(entry); if (dict->IsKey(roots, index)) { uint32_t number = static_cast
(index->Number()); if (length <= number && number < old_length) { PropertyDetails details = dict->DetailsAt(entry); if (!details.IsConfigurable()) length = number + 1; } } } } if (length == 0) { // Flush the backing store. array->initialize_elements(); } else { // Remove elements that should be deleted. int removed_entries = 0; for (int entry = 0; entry < capacity; entry++) { Object* index = dict->KeyAt(entry); if (dict->IsKey(roots, index)) { uint32_t number = static_cast
(index->Number()); if (length <= number && number < old_length) { dict->ClearEntry(isolate, entry); removed_entries++; } } } if (removed_entries > 0) { // Update the number of elements. dict->ElementsRemoved(removed_entries); } } } } Handle
length_obj = isolate->factory()->NewNumberFromUint(length); array->set_length(*length_obj); } static void CopyElementsImpl(Isolate* isolate, FixedArrayBase* from, uint32_t from_start, FixedArrayBase* to, ElementsKind from_kind, uint32_t to_start, int packed_size, int copy_size) { UNREACHABLE(); } static Handle
SliceImpl(Handle
receiver, uint32_t start, uint32_t end) { Isolate* isolate = receiver->GetIsolate(); uint32_t result_length = end < start ? 0u : end - start; // Result must also be a dictionary. Handle
result_array = isolate->factory()->NewJSArray(0, HOLEY_ELEMENTS); JSObject::NormalizeElements(result_array); result_array->set_length(Smi::FromInt(result_length)); Handle
source_dict( NumberDictionary::cast(receiver->elements()), isolate); int entry_count = source_dict->Capacity(); ReadOnlyRoots roots(isolate); for (int i = 0; i < entry_count; i++) { Object* key = source_dict->KeyAt(i); if (!source_dict->ToKey(roots, i, &key)) continue; uint64_t key_value = NumberToInt64(key); if (key_value >= start && key_value < end) { Handle
dest_dict( NumberDictionary::cast(result_array->elements()), isolate); Handle
value(source_dict->ValueAt(i), isolate); PropertyDetails details = source_dict->DetailsAt(i); PropertyAttributes attr = details.attributes(); AddImpl(result_array, static_cast
(key_value) - start, value, attr, 0); } } return result_array; } static void DeleteImpl(Handle
obj, uint32_t entry) { Handle
dict(NumberDictionary::cast(obj->elements()), obj->GetIsolate()); dict = NumberDictionary::DeleteEntry(obj->GetIsolate(), dict, entry); obj->set_elements(*dict); } static bool HasAccessorsImpl(JSObject* holder, FixedArrayBase* backing_store) { DisallowHeapAllocation no_gc; NumberDictionary* dict = NumberDictionary::cast(backing_store); if (!dict->requires_slow_elements()) return false; int capacity = dict->Capacity(); ReadOnlyRoots roots = holder->GetReadOnlyRoots(); for (int i = 0; i < capacity; i++) { Object* key = dict->KeyAt(i); if (!dict->IsKey(roots, key)) continue; PropertyDetails details = dict->DetailsAt(i); if (details.kind() == kAccessor) return true; } return false; } static Object* GetRaw(FixedArrayBase* store, uint32_t entry) { NumberDictionary* backing_store = NumberDictionary::cast(store); return backing_store->ValueAt(entry); } static Handle
GetImpl(Isolate* isolate, FixedArrayBase* backing_store, uint32_t entry) { return handle(GetRaw(backing_store, entry), isolate); } static inline void SetImpl(Handle
holder, uint32_t entry, Object* value) { SetImpl(holder->elements(), entry, value); } static inline void SetImpl(FixedArrayBase* backing_store, uint32_t entry, Object* value) { NumberDictionary::cast(backing_store)->ValueAtPut(entry, value); } static void ReconfigureImpl(Handle
object, Handle
store, uint32_t entry, Handle
value, PropertyAttributes attributes) { NumberDictionary* dictionary = NumberDictionary::cast(*store); if (attributes != NONE) object->RequireSlowElements(dictionary); dictionary->ValueAtPut(entry, *value); PropertyDetails details = dictionary->DetailsAt(entry); details = PropertyDetails(kData, attributes, PropertyCellType::kNoCell, details.dictionary_index()); dictionary->DetailsAtPut(object->GetIsolate(), entry, details); } static void AddImpl(Handle
object, uint32_t index, Handle
value, PropertyAttributes attributes, uint32_t new_capacity) { PropertyDetails details(kData, attributes, PropertyCellType::kNoCell); Handle
dictionary = object->HasFastElements() || object->HasFastStringWrapperElements() ? JSObject::NormalizeElements(object) : handle(NumberDictionary::cast(object->elements()), object->GetIsolate()); Handle
new_dictionary = NumberDictionary::Add( object->GetIsolate(), dictionary, index, value, details); new_dictionary->UpdateMaxNumberKey(index, object); if (attributes != NONE) object->RequireSlowElements(*new_dictionary); if (dictionary.is_identical_to(new_dictionary)) return; object->set_elements(*new_dictionary); } static bool HasEntryImpl(Isolate* isolate, FixedArrayBase* store, uint32_t entry) { DisallowHeapAllocation no_gc; NumberDictionary* dict = NumberDictionary::cast(store); Object* index = dict->KeyAt(entry); return !index->IsTheHole(isolate); } static uint32_t GetIndexForEntryImpl(FixedArrayBase* store, uint32_t entry) { DisallowHeapAllocation no_gc; NumberDictionary* dict = NumberDictionary::cast(store); uint32_t result = 0; CHECK(dict->KeyAt(entry)->ToArrayIndex(&result)); return result; } static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject* holder, FixedArrayBase* store, uint32_t index, PropertyFilter filter) { DisallowHeapAllocation no_gc; NumberDictionary* dictionary = NumberDictionary::cast(store); int entry = dictionary->FindEntry(isolate, index); if (entry == NumberDictionary::kNotFound) return kMaxUInt32; if (filter != ALL_PROPERTIES) { PropertyDetails details = dictionary->DetailsAt(entry); PropertyAttributes attr = details.attributes(); if ((attr & filter) != 0) return kMaxUInt32; } return static_cast
(entry); } static PropertyDetails GetDetailsImpl(JSObject* holder, uint32_t entry) { return GetDetailsImpl(holder->elements(), entry); } static PropertyDetails GetDetailsImpl(FixedArrayBase* backing_store, uint32_t entry) { return NumberDictionary::cast(backing_store)->DetailsAt(entry); } static uint32_t FilterKey(Handle
dictionary, int entry, Object* raw_key, PropertyFilter filter) { DCHECK(raw_key->IsNumber()); DCHECK_LE(raw_key->Number(), kMaxUInt32); PropertyDetails details = dictionary->DetailsAt(entry); PropertyAttributes attr = details.attributes(); if ((attr & filter) != 0) return kMaxUInt32; return static_cast
(raw_key->Number()); } static uint32_t GetKeyForEntryImpl(Isolate* isolate, Handle
dictionary, int entry, PropertyFilter filter) { DisallowHeapAllocation no_gc; Object* raw_key = dictionary->KeyAt(entry); if (!dictionary->IsKey(ReadOnlyRoots(isolate), raw_key)) return kMaxUInt32; return FilterKey(dictionary, entry, raw_key, filter); } static void CollectElementIndicesImpl(Handle
object, Handle
backing_store, KeyAccumulator* keys) { if (keys->filter() & SKIP_STRINGS) return; Isolate* isolate = keys->isolate(); Handle
dictionary = Handle
::cast(backing_store); int capacity = dictionary->Capacity(); Handle
elements = isolate->factory()->NewFixedArray( GetMaxNumberOfEntries(*object, *backing_store)); int insertion_index = 0; PropertyFilter filter = keys->filter(); ReadOnlyRoots roots(isolate); for (int i = 0; i < capacity; i++) { Object* raw_key = dictionary->KeyAt(i); if (!dictionary->IsKey(roots, raw_key)) continue; uint32_t key = FilterKey(dictionary, i, raw_key, filter); if (key == kMaxUInt32) { keys->AddShadowingKey(raw_key); continue; } elements->set(insertion_index, raw_key); insertion_index++; } SortIndices(isolate, elements, insertion_index); for (int i = 0; i < insertion_index; i++) { keys->AddKey(elements->get(i)); } } static Handle
DirectCollectElementIndicesImpl( Isolate* isolate, Handle
object, Handle
backing_store, GetKeysConversion convert, PropertyFilter filter, Handle
list, uint32_t* nof_indices, uint32_t insertion_index = 0) { if (filter & SKIP_STRINGS) return list; if (filter & ONLY_ALL_CAN_READ) return list; Handle
dictionary = Handle
::cast(backing_store); uint32_t capacity = dictionary->Capacity(); for (uint32_t i = 0; i < capacity; i++) { uint32_t key = GetKeyForEntryImpl(isolate, dictionary, i, filter); if (key == kMaxUInt32) continue; Handle
index = isolate->factory()->NewNumberFromUint(key); list->set(insertion_index, *index); insertion_index++; } *nof_indices = insertion_index; return list; } static void AddElementsToKeyAccumulatorImpl(Handle
receiver, KeyAccumulator* accumulator, AddKeyConversion convert) { Isolate* isolate = accumulator->isolate(); Handle
dictionary( NumberDictionary::cast(receiver->elements()), isolate); int capacity = dictionary->Capacity(); ReadOnlyRoots roots(isolate); for (int i = 0; i < capacity; i++) { Object* k = dictionary->KeyAt(i); if (!dictionary->IsKey(roots, k)) continue; Object* value = dictionary->ValueAt(i); DCHECK(!value->IsTheHole(isolate)); DCHECK(!value->IsAccessorPair()); DCHECK(!value->IsAccessorInfo()); accumulator->AddKey(value, convert); } } static bool IncludesValueFastPath(Isolate* isolate, Handle
receiver, Handle
value, uint32_t start_from, uint32_t length, Maybe
* result) { DisallowHeapAllocation no_gc; NumberDictionary* dictionary = NumberDictionary::cast(receiver->elements()); int capacity = dictionary->Capacity(); Object* the_hole = ReadOnlyRoots(isolate).the_hole_value(); Object* undefined = ReadOnlyRoots(isolate).undefined_value(); // Scan for accessor properties. If accessors are present, then elements // must be accessed in order via the slow path. bool found = false; for (int i = 0; i < capacity; ++i) { Object* k = dictionary->KeyAt(i); if (k == the_hole) continue; if (k == undefined) continue; uint32_t index; if (!k->ToArrayIndex(&index) || index < start_from || index >= length) { continue; } if (dictionary->DetailsAt(i).kind() == kAccessor) { // Restart from beginning in slow path, otherwise we may observably // access getters out of order return false; } else if (!found) { Object* element_k = dictionary->ValueAt(i); if (value->SameValueZero(element_k)) found = true; } } *result = Just(found); return true; } static Maybe
IncludesValueImpl(Isolate* isolate, Handle
receiver, Handle
value, uint32_t start_from, uint32_t length) { DCHECK(JSObject::PrototypeHasNoElements(isolate, *receiver)); bool search_for_hole = value->IsUndefined(isolate); if (!search_for_hole) { Maybe
result = Nothing
(); if (DictionaryElementsAccessor::IncludesValueFastPath( isolate, receiver, value, start_from, length, &result)) { return result; } } ElementsKind original_elements_kind = receiver->GetElementsKind(); USE(original_elements_kind); Handle
dictionary( NumberDictionary::cast(receiver->elements()), isolate); // Iterate through entire range, as accessing elements out of order is // observable for (uint32_t k = start_from; k < length; ++k) { DCHECK_EQ(receiver->GetElementsKind(), original_elements_kind); int entry = dictionary->FindEntry(isolate, k); if (entry == NumberDictionary::kNotFound) { if (search_for_hole) return Just(true); continue; } PropertyDetails details = GetDetailsImpl(*dictionary, entry); switch (details.kind()) { case kData: { Object* element_k = dictionary->ValueAt(entry); if (value->SameValueZero(element_k)) return Just(true); break; } case kAccessor: { LookupIterator it(isolate, receiver, k, LookupIterator::OWN_SKIP_INTERCEPTOR); DCHECK(it.IsFound()); DCHECK_EQ(it.state(), LookupIterator::ACCESSOR); Handle
element_k; ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, element_k, JSObject::GetPropertyWithAccessor(&it), Nothing
()); if (value->SameValueZero(*element_k)) return Just(true); // Bailout to slow path if elements on prototype changed if (!JSObject::PrototypeHasNoElements(isolate, *receiver)) { return IncludesValueSlowPath(isolate, receiver, value, k + 1, length); } // Continue if elements unchanged if (*dictionary == receiver->elements()) continue; // Otherwise, bailout or update elements // If switched to initial elements, return true if searching for // undefined, and false otherwise. if (receiver->map()->GetInitialElements() == receiver->elements()) { return Just(search_for_hole); } // If switched to fast elements, continue with the correct accessor. if (receiver->GetElementsKind() != DICTIONARY_ELEMENTS) { ElementsAccessor* accessor = receiver->GetElementsAccessor(); return accessor->IncludesValue(isolate, receiver, value, k + 1, length); } dictionary = handle(NumberDictionary::cast(receiver->elements()), isolate); break; } } } return Just(false); } static Maybe
IndexOfValueImpl(Isolate* isolate, Handle
receiver, Handle