"); break; case ODDBALL_TYPE: { if (IsUndefined()) accumulator->Add(""); else if (IsTheHole()) accumulator->Add(""); else if (IsNull()) accumulator->Add(""); else if (IsTrue()) accumulator->Add(""); else if (IsFalse()) accumulator->Add(""); else accumulator->Add(""); break; } case SYMBOL_TYPE: { Symbol* symbol = Symbol::cast(this); accumulator->Add("Hash()); if (!symbol->name()->IsUndefined()) { accumulator->Add(" "); String::cast(symbol->name())->StringShortPrint(accumulator); } accumulator->Add(">"); break; } case HEAP_NUMBER_TYPE: accumulator->Add("HeapNumberPrint(accumulator); accumulator->Put('>'); break; case JS_PROXY_TYPE: accumulator->Add(""); break; case JS_FUNCTION_PROXY_TYPE: accumulator->Add(""); break; case FOREIGN_TYPE: accumulator->Add(""); break; case CELL_TYPE: accumulator->Add("Cell for "); Cell::cast(this)->value()->ShortPrint(accumulator); break; case PROPERTY_CELL_TYPE: accumulator->Add("PropertyCell for "); PropertyCell::cast(this)->value()->ShortPrint(accumulator); break; default: accumulator->Add("", map()->instance_type()); break; } } void HeapObject::Iterate(ObjectVisitor* v) { // Handle header IteratePointer(v, kMapOffset); // Handle object body Map* m = map(); IterateBody(m->instance_type(), SizeFromMap(m), v); } void HeapObject::IterateBody(InstanceType type, int object_size, ObjectVisitor* v) { // Avoiding ::cast(this) because it accesses the map pointer field. // During GC, the map pointer field is encoded. if (type < FIRST_NONSTRING_TYPE) { switch (type & kStringRepresentationMask) { case kSeqStringTag: break; case kConsStringTag: ConsString::BodyDescriptor::IterateBody(this, v); break; case kSlicedStringTag: SlicedString::BodyDescriptor::IterateBody(this, v); break; case kExternalStringTag: if ((type & kStringEncodingMask) == kOneByteStringTag) { reinterpret_cast(this)-> ExternalAsciiStringIterateBody(v); } else { reinterpret_cast(this)-> ExternalTwoByteStringIterateBody(v); } break; } return; } switch (type) { case FIXED_ARRAY_TYPE: FixedArray::BodyDescriptor::IterateBody(this, object_size, v); break; case CONSTANT_POOL_ARRAY_TYPE: reinterpret_cast(this)->ConstantPoolIterateBody(v); break; case FIXED_DOUBLE_ARRAY_TYPE: break; case JS_OBJECT_TYPE: case JS_CONTEXT_EXTENSION_OBJECT_TYPE: case JS_GENERATOR_OBJECT_TYPE: case JS_MODULE_TYPE: case JS_VALUE_TYPE: case JS_DATE_TYPE: case JS_ARRAY_TYPE: case JS_ARRAY_BUFFER_TYPE: case JS_TYPED_ARRAY_TYPE: case JS_DATA_VIEW_TYPE: case JS_SET_TYPE: case JS_MAP_TYPE: case JS_WEAK_MAP_TYPE: case JS_WEAK_SET_TYPE: case JS_REGEXP_TYPE: case JS_GLOBAL_PROXY_TYPE: case JS_GLOBAL_OBJECT_TYPE: case JS_BUILTINS_OBJECT_TYPE: case JS_MESSAGE_OBJECT_TYPE: JSObject::BodyDescriptor::IterateBody(this, object_size, v); break; case JS_FUNCTION_TYPE: reinterpret_cast(this) ->JSFunctionIterateBody(object_size, v); break; case ODDBALL_TYPE: Oddball::BodyDescriptor::IterateBody(this, v); break; case JS_PROXY_TYPE: JSProxy::BodyDescriptor::IterateBody(this, v); break; case JS_FUNCTION_PROXY_TYPE: JSFunctionProxy::BodyDescriptor::IterateBody(this, v); break; case FOREIGN_TYPE: reinterpret_cast(this)->ForeignIterateBody(v); break; case MAP_TYPE: Map::BodyDescriptor::IterateBody(this, v); break; case CODE_TYPE: reinterpret_cast(this)->CodeIterateBody(v); break; case CELL_TYPE: Cell::BodyDescriptor::IterateBody(this, v); break; case PROPERTY_CELL_TYPE: PropertyCell::BodyDescriptor::IterateBody(this, v); break; case SYMBOL_TYPE: Symbol::BodyDescriptor::IterateBody(this, v); break; case HEAP_NUMBER_TYPE: case FILLER_TYPE: case BYTE_ARRAY_TYPE: case FREE_SPACE_TYPE: case EXTERNAL_PIXEL_ARRAY_TYPE: case EXTERNAL_BYTE_ARRAY_TYPE: case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: case EXTERNAL_SHORT_ARRAY_TYPE: case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: case EXTERNAL_INT_ARRAY_TYPE: case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE: case EXTERNAL_FLOAT_ARRAY_TYPE: case EXTERNAL_DOUBLE_ARRAY_TYPE: break; case SHARED_FUNCTION_INFO_TYPE: { SharedFunctionInfo::BodyDescriptor::IterateBody(this, v); break; } #define MAKE_STRUCT_CASE(NAME, Name, name) \ case NAME##_TYPE: STRUCT_LIST(MAKE_STRUCT_CASE) #undef MAKE_STRUCT_CASE if (type == ALLOCATION_SITE_TYPE) { AllocationSite::BodyDescriptor::IterateBody(this, v); } else { StructBodyDescriptor::IterateBody(this, object_size, v); } break; default: PrintF("Unknown type: %d\n", type); UNREACHABLE(); } } bool HeapNumber::HeapNumberBooleanValue() { // NaN, +0, and -0 should return the false object #if __BYTE_ORDER == __LITTLE_ENDIAN union IeeeDoubleLittleEndianArchType u; #elif __BYTE_ORDER == __BIG_ENDIAN union IeeeDoubleBigEndianArchType u; #endif u.d = value(); if (u.bits.exp == 2047) { // Detect NaN for IEEE double precision floating point. if ((u.bits.man_low | u.bits.man_high) != 0) return false; } if (u.bits.exp == 0) { // Detect +0, and -0 for IEEE double precision floating point. if ((u.bits.man_low | u.bits.man_high) == 0) return false; } return true; } void HeapNumber::HeapNumberPrint(FILE* out) { PrintF(out, "%.16g", Number()); } void HeapNumber::HeapNumberPrint(StringStream* accumulator) { // The Windows version of vsnprintf can allocate when printing a %g string // into a buffer that may not be big enough. We don't want random memory // allocation when producing post-crash stack traces, so we print into a // buffer that is plenty big enough for any floating point number, then // print that using vsnprintf (which may truncate but never allocate if // there is no more space in the buffer). EmbeddedVector buffer; OS::SNPrintF(buffer, "%.16g", Number()); accumulator->Add("%s", buffer.start()); } String* JSReceiver::class_name() { if (IsJSFunction() && IsJSFunctionProxy()) { return GetHeap()->function_class_string(); } if (map()->constructor()->IsJSFunction()) { JSFunction* constructor = JSFunction::cast(map()->constructor()); return String::cast(constructor->shared()->instance_class_name()); } // If the constructor is not present, return "Object". return GetHeap()->Object_string(); } String* Map::constructor_name() { if (constructor()->IsJSFunction()) { JSFunction* constructor = JSFunction::cast(this->constructor()); String* name = String::cast(constructor->shared()->name()); if (name->length() > 0) return name; String* inferred_name = constructor->shared()->inferred_name(); if (inferred_name->length() > 0) return inferred_name; Object* proto = prototype(); if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name(); } // TODO(rossberg): what about proxies? // If the constructor is not present, return "Object". return GetHeap()->Object_string(); } String* JSReceiver::constructor_name() { return map()->constructor_name(); } // TODO(mstarzinger): Temporary wrapper until handlified. static Handle NewStorageFor(Isolate* isolate, Handle object, Representation representation) { Heap* heap = isolate->heap(); CALL_HEAP_FUNCTION(isolate, object->AllocateNewStorageFor(heap, representation), Object); } void JSObject::AddFastPropertyUsingMap(Handle object, Handle new_map, Handle name, Handle value, int field_index, Representation representation) { Isolate* isolate = object->GetIsolate(); // This method is used to transition to a field. If we are transitioning to a // double field, allocate new storage. Handle storage = NewStorageFor(isolate, value, representation); if (object->map()->unused_property_fields() == 0) { int new_unused = new_map->unused_property_fields(); Handle properties(object->properties()); Handle values = isolate->factory()->CopySizeFixedArray( properties, properties->length() + new_unused + 1); object->set_properties(*values); } object->set_map(*new_map); object->FastPropertyAtPut(field_index, *storage); } static MaybeObject* CopyAddFieldDescriptor(Map* map, Name* name, int index, PropertyAttributes attributes, Representation representation, TransitionFlag flag) { Map* new_map; FieldDescriptor new_field_desc(name, index, attributes, representation); MaybeObject* maybe_map = map->CopyAddDescriptor(&new_field_desc, flag); if (!maybe_map->To(&new_map)) return maybe_map; int unused_property_fields = map->unused_property_fields() - 1; if (unused_property_fields < 0) { unused_property_fields += JSObject::kFieldsAdded; } new_map->set_unused_property_fields(unused_property_fields); return new_map; } static Handle CopyAddFieldDescriptor(Handle map, Handle name, int index, PropertyAttributes attributes, Representation representation, TransitionFlag flag) { CALL_HEAP_FUNCTION(map->GetIsolate(), CopyAddFieldDescriptor( *map, *name, index, attributes, representation, flag), Map); } void JSObject::AddFastProperty(Handle object, Handle name, Handle value, PropertyAttributes attributes, StoreFromKeyed store_mode, ValueType value_type, TransitionFlag flag) { ASSERT(!object->IsJSGlobalProxy()); ASSERT(DescriptorArray::kNotFound == object->map()->instance_descriptors()->Search( *name, object->map()->NumberOfOwnDescriptors())); // Normalize the object if the name is an actual name (not the // hidden strings) and is not a real identifier. // Normalize the object if it will have too many fast properties. Isolate* isolate = object->GetIsolate(); if (!name->IsCacheable(isolate) || object->TooManyFastProperties(store_mode)) { NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); AddSlowProperty(object, name, value, attributes); return; } // Compute the new index for new field. int index = object->map()->NextFreePropertyIndex(); // Allocate new instance descriptors with (name, index) added if (object->IsJSContextExtensionObject()) value_type = FORCE_TAGGED; Representation representation = value->OptimalRepresentation(value_type); Handle new_map = CopyAddFieldDescriptor( handle(object->map()), name, index, attributes, representation, flag); AddFastPropertyUsingMap(object, new_map, name, value, index, representation); } static MaybeObject* CopyAddConstantDescriptor(Map* map, Name* name, Object* value, PropertyAttributes attributes, TransitionFlag flag) { ConstantDescriptor new_constant_desc(name, value, attributes); return map->CopyAddDescriptor(&new_constant_desc, flag); } static Handle CopyAddConstantDescriptor(Handle map, Handle name, Handle value, PropertyAttributes attributes, TransitionFlag flag) { CALL_HEAP_FUNCTION(map->GetIsolate(), CopyAddConstantDescriptor( *map, *name, *value, attributes, flag), Map); } void JSObject::AddConstantProperty(Handle object, Handle name, Handle constant, PropertyAttributes attributes, TransitionFlag initial_flag) { TransitionFlag flag = // Do not add transitions to global objects. (object->IsGlobalObject() || // Don't add transitions to special properties with non-trivial // attributes. attributes != NONE) ? OMIT_TRANSITION : initial_flag; // Allocate new instance descriptors with (name, constant) added. Handle new_map = CopyAddConstantDescriptor( handle(object->map()), name, constant, attributes, flag); object->set_map(*new_map); } void JSObject::AddSlowProperty(Handle object, Handle name, Handle value, PropertyAttributes attributes) { ASSERT(!object->HasFastProperties()); Isolate* isolate = object->GetIsolate(); Handle dict(object->property_dictionary()); if (object->IsGlobalObject()) { // In case name is an orphaned property reuse the cell. int entry = dict->FindEntry(*name); if (entry != NameDictionary::kNotFound) { Handle cell(PropertyCell::cast(dict->ValueAt(entry))); PropertyCell::SetValueInferType(cell, value); // Assign an enumeration index to the property and update // SetNextEnumerationIndex. int index = dict->NextEnumerationIndex(); PropertyDetails details = PropertyDetails(attributes, NORMAL, index); dict->SetNextEnumerationIndex(index + 1); dict->SetEntry(entry, *name, *cell, details); return; } Handle cell = isolate->factory()->NewPropertyCell(value); PropertyCell::SetValueInferType(cell, value); value = cell; } PropertyDetails details = PropertyDetails(attributes, NORMAL, 0); Handle result = NameDictionaryAdd(dict, name, value, details); if (*dict != *result) object->set_properties(*result); } Handle JSObject::AddProperty(Handle object, Handle name, Handle value, PropertyAttributes attributes, StrictModeFlag strict_mode, JSReceiver::StoreFromKeyed store_mode, ExtensibilityCheck extensibility_check, ValueType value_type, StoreMode mode, TransitionFlag transition_flag) { ASSERT(!object->IsJSGlobalProxy()); Isolate* isolate = object->GetIsolate(); if (!name->IsUniqueName()) { name = isolate->factory()->InternalizedStringFromString( Handle::cast(name)); } if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK && !object->map()->is_extensible()) { if (strict_mode == kNonStrictMode) { return value; } else { Handle args[1] = { name }; Handle error = isolate->factory()->NewTypeError( "object_not_extensible", HandleVector(args, ARRAY_SIZE(args))); isolate->Throw(*error); return Handle(); } } if (object->HasFastProperties()) { // Ensure the descriptor array does not get too big. if (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors) { // TODO(verwaest): Support other constants. // if (mode == ALLOW_AS_CONSTANT && // !value->IsTheHole() && // !value->IsConsString()) { if (value->IsJSFunction()) { AddConstantProperty(object, name, value, attributes, transition_flag); } else { AddFastProperty(object, name, value, attributes, store_mode, value_type, transition_flag); } } else { // Normalize the object to prevent very large instance descriptors. // This eliminates unwanted N^2 allocation and lookup behavior. NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); AddSlowProperty(object, name, value, attributes); } } else { AddSlowProperty(object, name, value, attributes); } if (FLAG_harmony_observation && object->map()->is_observed() && *name != isolate->heap()->hidden_string()) { Handle old_value = isolate->factory()->the_hole_value(); EnqueueChangeRecord(object, "add", name, old_value); } return value; } void JSObject::EnqueueChangeRecord(Handle object, const char* type_str, Handle name, Handle old_value) { Isolate* isolate = object->GetIsolate(); HandleScope scope(isolate); Handle type = isolate->factory()->InternalizeUtf8String(type_str); if (object->IsJSGlobalObject()) { object = handle(JSGlobalObject::cast(*object)->global_receiver(), isolate); } Handle args[] = { type, object, name, old_value }; int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4; bool threw; Execution::Call(isolate, Handle(isolate->observers_notify_change()), isolate->factory()->undefined_value(), argc, args, &threw); ASSERT(!threw); } Handle JSObject::SetPropertyPostInterceptor( Handle object, Handle name, Handle value, PropertyAttributes attributes, StrictModeFlag strict_mode) { // Check local property, ignore interceptor. LookupResult result(object->GetIsolate()); object->LocalLookupRealNamedProperty(*name, &result); if (!result.IsFound()) { object->map()->LookupTransition(*object, *name, &result); } if (result.IsFound()) { // An existing property or a map transition was found. Use set property to // handle all these cases. return SetPropertyForResult(object, &result, name, value, attributes, strict_mode, MAY_BE_STORE_FROM_KEYED); } bool done = false; Handle result_object = SetPropertyViaPrototypes( object, name, value, attributes, strict_mode, &done); if (done) return result_object; // Add a new real property. return AddProperty(object, name, value, attributes, strict_mode); } static void ReplaceSlowProperty(Handle object, Handle name, Handle value, PropertyAttributes attributes) { NameDictionary* dictionary = object->property_dictionary(); int old_index = dictionary->FindEntry(*name); int new_enumeration_index = 0; // 0 means "Use the next available index." if (old_index != -1) { // All calls to ReplaceSlowProperty have had all transitions removed. new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index(); } PropertyDetails new_details(attributes, NORMAL, new_enumeration_index); JSObject::SetNormalizedProperty(object, name, value, new_details); } const char* Representation::Mnemonic() const { switch (kind_) { case kNone: return "v"; case kTagged: return "t"; case kSmi: return "s"; case kDouble: return "d"; case kInteger32: return "i"; case kHeapObject: return "h"; case kExternal: return "x"; default: UNREACHABLE(); return NULL; } } enum RightTrimMode { FROM_GC, FROM_MUTATOR }; static void ZapEndOfFixedArray(Address new_end, int to_trim) { // If we are doing a big trim in old space then we zap the space. Object** zap = reinterpret_cast(new_end); zap++; // Header of filler must be at least one word so skip that. for (int i = 1; i < to_trim; i++) { *zap++ = Smi::FromInt(0); } } template static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { ASSERT(elms->map() != heap->fixed_cow_array_map()); // For now this trick is only applied to fixed arrays in new and paged space. ASSERT(!heap->lo_space()->Contains(elms)); const int len = elms->length(); ASSERT(to_trim < len); Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) { ZapEndOfFixedArray(new_end, to_trim); } int size_delta = to_trim * kPointerSize; // Technically in new space this write might be omitted (except for // debug mode which iterates through the heap), but to play safer // we still do it. heap->CreateFillerObjectAt(new_end, size_delta); elms->set_length(len - to_trim); // Maintain marking consistency for IncrementalMarking. if (Marking::IsBlack(Marking::MarkBitFrom(elms))) { if (trim_mode == FROM_GC) { MemoryChunk::IncrementLiveBytesFromGC(elms->address(), -size_delta); } else { MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta); } } // The array may not be moved during GC, // and size has to be adjusted nevertheless. HeapProfiler* profiler = heap->isolate()->heap_profiler(); if (profiler->is_tracking_allocations()) { profiler->UpdateObjectSizeEvent(elms->address(), elms->Size()); } } bool Map::InstancesNeedRewriting(Map* target, int target_number_of_fields, int target_inobject, int target_unused) { // If fields were added (or removed), rewrite the instance. int number_of_fields = NumberOfFields(); ASSERT(target_number_of_fields >= number_of_fields); if (target_number_of_fields != number_of_fields) return true; if (FLAG_track_double_fields) { // If smi descriptors were replaced by double descriptors, rewrite. DescriptorArray* old_desc = instance_descriptors(); DescriptorArray* new_desc = target->instance_descriptors(); int limit = NumberOfOwnDescriptors(); for (int i = 0; i < limit; i++) { if (new_desc->GetDetails(i).representation().IsDouble() && !old_desc->GetDetails(i).representation().IsDouble()) { return true; } } } // If no fields were added, and no inobject properties were removed, setting // the map is sufficient. if (target_inobject == inobject_properties()) return false; // In-object slack tracking may have reduced the object size of the new map. // In that case, succeed if all existing fields were inobject, and they still // fit within the new inobject size. ASSERT(target_inobject < inobject_properties()); if (target_number_of_fields <= target_inobject) { ASSERT(target_number_of_fields + target_unused == target_inobject); return false; } // Otherwise, properties will need to be moved to the backing store. return true; } // To migrate an instance to a map: // - First check whether the instance needs to be rewritten. If not, simply // change the map. // - Otherwise, allocate a fixed array large enough to hold all fields, in // addition to unused space. // - Copy all existing properties in, in the following order: backing store // properties, unused fields, inobject properties. // - If all allocation succeeded, commit the state atomically: // * Copy inobject properties from the backing store back into the object. // * Trim the difference in instance size of the object. This also cleanly // frees inobject properties that moved to the backing store. // * If there are properties left in the backing store, trim of the space used // to temporarily store the inobject properties. // * If there are properties left in the backing store, install the backing // store. void JSObject::MigrateToMap(Handle object, Handle new_map) { Isolate* isolate = object->GetIsolate(); Handle old_map(object->map()); int number_of_fields = new_map->NumberOfFields(); int inobject = new_map->inobject_properties(); int unused = new_map->unused_property_fields(); // Nothing to do if no functions were converted to fields and no smis were // converted to doubles. if (!old_map->InstancesNeedRewriting( *new_map, number_of_fields, inobject, unused)) { object->set_map(*new_map); return; } int total_size = number_of_fields + unused; int external = total_size - inobject; Handle array = isolate->factory()->NewFixedArray(total_size); Handle old_descriptors(old_map->instance_descriptors()); Handle new_descriptors(new_map->instance_descriptors()); int descriptors = new_map->NumberOfOwnDescriptors(); for (int i = 0; i < descriptors; i++) { PropertyDetails details = new_descriptors->GetDetails(i); if (details.type() != FIELD) continue; PropertyDetails old_details = old_descriptors->GetDetails(i); if (old_details.type() == CALLBACKS) { ASSERT(details.representation().IsTagged()); continue; } ASSERT(old_details.type() == CONSTANT || old_details.type() == FIELD); Object* raw_value = old_details.type() == CONSTANT ? old_descriptors->GetValue(i) : object->RawFastPropertyAt(old_descriptors->GetFieldIndex(i)); Handle value(raw_value, isolate); if (FLAG_track_double_fields && !old_details.representation().IsDouble() && details.representation().IsDouble()) { if (old_details.representation().IsNone()) { value = handle(Smi::FromInt(0), isolate); } value = NewStorageFor(isolate, value, details.representation()); } ASSERT(!(FLAG_track_double_fields && details.representation().IsDouble() && value->IsSmi())); int target_index = new_descriptors->GetFieldIndex(i) - inobject; if (target_index < 0) target_index += total_size; array->set(target_index, *value); } // From here on we cannot fail and we shouldn't GC anymore. DisallowHeapAllocation no_allocation; // Copy (real) inobject properties. If necessary, stop at number_of_fields to // avoid overwriting |one_pointer_filler_map|. int limit = Min(inobject, number_of_fields); for (int i = 0; i < limit; i++) { object->FastPropertyAtPut(i, array->get(external + i)); } // Create filler object past the new instance size. int new_instance_size = new_map->instance_size(); int instance_size_delta = old_map->instance_size() - new_instance_size; ASSERT(instance_size_delta >= 0); Address address = object->address() + new_instance_size; isolate->heap()->CreateFillerObjectAt(address, instance_size_delta); // If there are properties in the new backing store, trim it to the correct // size and install the backing store into the object. if (external > 0) { RightTrimFixedArray(isolate->heap(), *array, inobject); object->set_properties(*array); } object->set_map(*new_map); } Handle Map::AddTransition(Handle map, Handle
(this)->CodeIterateBody(v); break; case CELL_TYPE: Cell::BodyDescriptor::IterateBody(this, v); break; case PROPERTY_CELL_TYPE: PropertyCell::BodyDescriptor::IterateBody(this, v); break; case SYMBOL_TYPE: Symbol::BodyDescriptor::IterateBody(this, v); break; case HEAP_NUMBER_TYPE: case FILLER_TYPE: case BYTE_ARRAY_TYPE: case FREE_SPACE_TYPE: case EXTERNAL_PIXEL_ARRAY_TYPE: case EXTERNAL_BYTE_ARRAY_TYPE: case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: case EXTERNAL_SHORT_ARRAY_TYPE: case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: case EXTERNAL_INT_ARRAY_TYPE: case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE: case EXTERNAL_FLOAT_ARRAY_TYPE: case EXTERNAL_DOUBLE_ARRAY_TYPE: break; case SHARED_FUNCTION_INFO_TYPE: { SharedFunctionInfo::BodyDescriptor::IterateBody(this, v); break; } #define MAKE_STRUCT_CASE(NAME, Name, name) \ case NAME##_TYPE: STRUCT_LIST(MAKE_STRUCT_CASE) #undef MAKE_STRUCT_CASE if (type == ALLOCATION_SITE_TYPE) { AllocationSite::BodyDescriptor::IterateBody(this, v); } else { StructBodyDescriptor::IterateBody(this, object_size, v); } break; default: PrintF("Unknown type: %d\n", type); UNREACHABLE(); } } bool HeapNumber::HeapNumberBooleanValue() { // NaN, +0, and -0 should return the false object #if __BYTE_ORDER == __LITTLE_ENDIAN union IeeeDoubleLittleEndianArchType u; #elif __BYTE_ORDER == __BIG_ENDIAN union IeeeDoubleBigEndianArchType u; #endif u.d = value(); if (u.bits.exp == 2047) { // Detect NaN for IEEE double precision floating point. if ((u.bits.man_low | u.bits.man_high) != 0) return false; } if (u.bits.exp == 0) { // Detect +0, and -0 for IEEE double precision floating point. if ((u.bits.man_low | u.bits.man_high) == 0) return false; } return true; } void HeapNumber::HeapNumberPrint(FILE* out) { PrintF(out, "%.16g", Number()); } void HeapNumber::HeapNumberPrint(StringStream* accumulator) { // The Windows version of vsnprintf can allocate when printing a %g string // into a buffer that may not be big enough. We don't want random memory // allocation when producing post-crash stack traces, so we print into a // buffer that is plenty big enough for any floating point number, then // print that using vsnprintf (which may truncate but never allocate if // there is no more space in the buffer). EmbeddedVector buffer; OS::SNPrintF(buffer, "%.16g", Number()); accumulator->Add("%s", buffer.start()); } String* JSReceiver::class_name() { if (IsJSFunction() && IsJSFunctionProxy()) { return GetHeap()->function_class_string(); } if (map()->constructor()->IsJSFunction()) { JSFunction* constructor = JSFunction::cast(map()->constructor()); return String::cast(constructor->shared()->instance_class_name()); } // If the constructor is not present, return "Object". return GetHeap()->Object_string(); } String* Map::constructor_name() { if (constructor()->IsJSFunction()) { JSFunction* constructor = JSFunction::cast(this->constructor()); String* name = String::cast(constructor->shared()->name()); if (name->length() > 0) return name; String* inferred_name = constructor->shared()->inferred_name(); if (inferred_name->length() > 0) return inferred_name; Object* proto = prototype(); if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name(); } // TODO(rossberg): what about proxies? // If the constructor is not present, return "Object". return GetHeap()->Object_string(); } String* JSReceiver::constructor_name() { return map()->constructor_name(); } // TODO(mstarzinger): Temporary wrapper until handlified. static Handle NewStorageFor(Isolate* isolate, Handle object, Representation representation) { Heap* heap = isolate->heap(); CALL_HEAP_FUNCTION(isolate, object->AllocateNewStorageFor(heap, representation), Object); } void JSObject::AddFastPropertyUsingMap(Handle object, Handle new_map, Handle name, Handle value, int field_index, Representation representation) { Isolate* isolate = object->GetIsolate(); // This method is used to transition to a field. If we are transitioning to a // double field, allocate new storage. Handle storage = NewStorageFor(isolate, value, representation); if (object->map()->unused_property_fields() == 0) { int new_unused = new_map->unused_property_fields(); Handle properties(object->properties()); Handle values = isolate->factory()->CopySizeFixedArray( properties, properties->length() + new_unused + 1); object->set_properties(*values); } object->set_map(*new_map); object->FastPropertyAtPut(field_index, *storage); } static MaybeObject* CopyAddFieldDescriptor(Map* map, Name* name, int index, PropertyAttributes attributes, Representation representation, TransitionFlag flag) { Map* new_map; FieldDescriptor new_field_desc(name, index, attributes, representation); MaybeObject* maybe_map = map->CopyAddDescriptor(&new_field_desc, flag); if (!maybe_map->To(&new_map)) return maybe_map; int unused_property_fields = map->unused_property_fields() - 1; if (unused_property_fields < 0) { unused_property_fields += JSObject::kFieldsAdded; } new_map->set_unused_property_fields(unused_property_fields); return new_map; } static Handle CopyAddFieldDescriptor(Handle map, Handle name, int index, PropertyAttributes attributes, Representation representation, TransitionFlag flag) { CALL_HEAP_FUNCTION(map->GetIsolate(), CopyAddFieldDescriptor( *map, *name, index, attributes, representation, flag), Map); } void JSObject::AddFastProperty(Handle object, Handle name, Handle value, PropertyAttributes attributes, StoreFromKeyed store_mode, ValueType value_type, TransitionFlag flag) { ASSERT(!object->IsJSGlobalProxy()); ASSERT(DescriptorArray::kNotFound == object->map()->instance_descriptors()->Search( *name, object->map()->NumberOfOwnDescriptors())); // Normalize the object if the name is an actual name (not the // hidden strings) and is not a real identifier. // Normalize the object if it will have too many fast properties. Isolate* isolate = object->GetIsolate(); if (!name->IsCacheable(isolate) || object->TooManyFastProperties(store_mode)) { NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); AddSlowProperty(object, name, value, attributes); return; } // Compute the new index for new field. int index = object->map()->NextFreePropertyIndex(); // Allocate new instance descriptors with (name, index) added if (object->IsJSContextExtensionObject()) value_type = FORCE_TAGGED; Representation representation = value->OptimalRepresentation(value_type); Handle new_map = CopyAddFieldDescriptor( handle(object->map()), name, index, attributes, representation, flag); AddFastPropertyUsingMap(object, new_map, name, value, index, representation); } static MaybeObject* CopyAddConstantDescriptor(Map* map, Name* name, Object* value, PropertyAttributes attributes, TransitionFlag flag) { ConstantDescriptor new_constant_desc(name, value, attributes); return map->CopyAddDescriptor(&new_constant_desc, flag); } static Handle CopyAddConstantDescriptor(Handle map, Handle name, Handle value, PropertyAttributes attributes, TransitionFlag flag) { CALL_HEAP_FUNCTION(map->GetIsolate(), CopyAddConstantDescriptor( *map, *name, *value, attributes, flag), Map); } void JSObject::AddConstantProperty(Handle object, Handle name, Handle constant, PropertyAttributes attributes, TransitionFlag initial_flag) { TransitionFlag flag = // Do not add transitions to global objects. (object->IsGlobalObject() || // Don't add transitions to special properties with non-trivial // attributes. attributes != NONE) ? OMIT_TRANSITION : initial_flag; // Allocate new instance descriptors with (name, constant) added. Handle new_map = CopyAddConstantDescriptor( handle(object->map()), name, constant, attributes, flag); object->set_map(*new_map); } void JSObject::AddSlowProperty(Handle object, Handle name, Handle value, PropertyAttributes attributes) { ASSERT(!object->HasFastProperties()); Isolate* isolate = object->GetIsolate(); Handle dict(object->property_dictionary()); if (object->IsGlobalObject()) { // In case name is an orphaned property reuse the cell. int entry = dict->FindEntry(*name); if (entry != NameDictionary::kNotFound) { Handle cell(PropertyCell::cast(dict->ValueAt(entry))); PropertyCell::SetValueInferType(cell, value); // Assign an enumeration index to the property and update // SetNextEnumerationIndex. int index = dict->NextEnumerationIndex(); PropertyDetails details = PropertyDetails(attributes, NORMAL, index); dict->SetNextEnumerationIndex(index + 1); dict->SetEntry(entry, *name, *cell, details); return; } Handle cell = isolate->factory()->NewPropertyCell(value); PropertyCell::SetValueInferType(cell, value); value = cell; } PropertyDetails details = PropertyDetails(attributes, NORMAL, 0); Handle result = NameDictionaryAdd(dict, name, value, details); if (*dict != *result) object->set_properties(*result); } Handle JSObject::AddProperty(Handle object, Handle name, Handle value, PropertyAttributes attributes, StrictModeFlag strict_mode, JSReceiver::StoreFromKeyed store_mode, ExtensibilityCheck extensibility_check, ValueType value_type, StoreMode mode, TransitionFlag transition_flag) { ASSERT(!object->IsJSGlobalProxy()); Isolate* isolate = object->GetIsolate(); if (!name->IsUniqueName()) { name = isolate->factory()->InternalizedStringFromString( Handle::cast(name)); } if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK && !object->map()->is_extensible()) { if (strict_mode == kNonStrictMode) { return value; } else { Handle args[1] = { name }; Handle error = isolate->factory()->NewTypeError( "object_not_extensible", HandleVector(args, ARRAY_SIZE(args))); isolate->Throw(*error); return Handle(); } } if (object->HasFastProperties()) { // Ensure the descriptor array does not get too big. if (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors) { // TODO(verwaest): Support other constants. // if (mode == ALLOW_AS_CONSTANT && // !value->IsTheHole() && // !value->IsConsString()) { if (value->IsJSFunction()) { AddConstantProperty(object, name, value, attributes, transition_flag); } else { AddFastProperty(object, name, value, attributes, store_mode, value_type, transition_flag); } } else { // Normalize the object to prevent very large instance descriptors. // This eliminates unwanted N^2 allocation and lookup behavior. NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); AddSlowProperty(object, name, value, attributes); } } else { AddSlowProperty(object, name, value, attributes); } if (FLAG_harmony_observation && object->map()->is_observed() && *name != isolate->heap()->hidden_string()) { Handle old_value = isolate->factory()->the_hole_value(); EnqueueChangeRecord(object, "add", name, old_value); } return value; } void JSObject::EnqueueChangeRecord(Handle object, const char* type_str, Handle name, Handle old_value) { Isolate* isolate = object->GetIsolate(); HandleScope scope(isolate); Handle type = isolate->factory()->InternalizeUtf8String(type_str); if (object->IsJSGlobalObject()) { object = handle(JSGlobalObject::cast(*object)->global_receiver(), isolate); } Handle args[] = { type, object, name, old_value }; int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4; bool threw; Execution::Call(isolate, Handle(isolate->observers_notify_change()), isolate->factory()->undefined_value(), argc, args, &threw); ASSERT(!threw); } Handle JSObject::SetPropertyPostInterceptor( Handle object, Handle name, Handle value, PropertyAttributes attributes, StrictModeFlag strict_mode) { // Check local property, ignore interceptor. LookupResult result(object->GetIsolate()); object->LocalLookupRealNamedProperty(*name, &result); if (!result.IsFound()) { object->map()->LookupTransition(*object, *name, &result); } if (result.IsFound()) { // An existing property or a map transition was found. Use set property to // handle all these cases. return SetPropertyForResult(object, &result, name, value, attributes, strict_mode, MAY_BE_STORE_FROM_KEYED); } bool done = false; Handle result_object = SetPropertyViaPrototypes( object, name, value, attributes, strict_mode, &done); if (done) return result_object; // Add a new real property. return AddProperty(object, name, value, attributes, strict_mode); } static void ReplaceSlowProperty(Handle object, Handle name, Handle value, PropertyAttributes attributes) { NameDictionary* dictionary = object->property_dictionary(); int old_index = dictionary->FindEntry(*name); int new_enumeration_index = 0; // 0 means "Use the next available index." if (old_index != -1) { // All calls to ReplaceSlowProperty have had all transitions removed. new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index(); } PropertyDetails new_details(attributes, NORMAL, new_enumeration_index); JSObject::SetNormalizedProperty(object, name, value, new_details); } const char* Representation::Mnemonic() const { switch (kind_) { case kNone: return "v"; case kTagged: return "t"; case kSmi: return "s"; case kDouble: return "d"; case kInteger32: return "i"; case kHeapObject: return "h"; case kExternal: return "x"; default: UNREACHABLE(); return NULL; } } enum RightTrimMode { FROM_GC, FROM_MUTATOR }; static void ZapEndOfFixedArray(Address new_end, int to_trim) { // If we are doing a big trim in old space then we zap the space. Object** zap = reinterpret_cast(new_end); zap++; // Header of filler must be at least one word so skip that. for (int i = 1; i < to_trim; i++) { *zap++ = Smi::FromInt(0); } } template static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { ASSERT(elms->map() != heap->fixed_cow_array_map()); // For now this trick is only applied to fixed arrays in new and paged space. ASSERT(!heap->lo_space()->Contains(elms)); const int len = elms->length(); ASSERT(to_trim < len); Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) { ZapEndOfFixedArray(new_end, to_trim); } int size_delta = to_trim * kPointerSize; // Technically in new space this write might be omitted (except for // debug mode which iterates through the heap), but to play safer // we still do it. heap->CreateFillerObjectAt(new_end, size_delta); elms->set_length(len - to_trim); // Maintain marking consistency for IncrementalMarking. if (Marking::IsBlack(Marking::MarkBitFrom(elms))) { if (trim_mode == FROM_GC) { MemoryChunk::IncrementLiveBytesFromGC(elms->address(), -size_delta); } else { MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta); } } // The array may not be moved during GC, // and size has to be adjusted nevertheless. HeapProfiler* profiler = heap->isolate()->heap_profiler(); if (profiler->is_tracking_allocations()) { profiler->UpdateObjectSizeEvent(elms->address(), elms->Size()); } } bool Map::InstancesNeedRewriting(Map* target, int target_number_of_fields, int target_inobject, int target_unused) { // If fields were added (or removed), rewrite the instance. int number_of_fields = NumberOfFields(); ASSERT(target_number_of_fields >= number_of_fields); if (target_number_of_fields != number_of_fields) return true; if (FLAG_track_double_fields) { // If smi descriptors were replaced by double descriptors, rewrite. DescriptorArray* old_desc = instance_descriptors(); DescriptorArray* new_desc = target->instance_descriptors(); int limit = NumberOfOwnDescriptors(); for (int i = 0; i < limit; i++) { if (new_desc->GetDetails(i).representation().IsDouble() && !old_desc->GetDetails(i).representation().IsDouble()) { return true; } } } // If no fields were added, and no inobject properties were removed, setting // the map is sufficient. if (target_inobject == inobject_properties()) return false; // In-object slack tracking may have reduced the object size of the new map. // In that case, succeed if all existing fields were inobject, and they still // fit within the new inobject size. ASSERT(target_inobject < inobject_properties()); if (target_number_of_fields <= target_inobject) { ASSERT(target_number_of_fields + target_unused == target_inobject); return false; } // Otherwise, properties will need to be moved to the backing store. return true; } // To migrate an instance to a map: // - First check whether the instance needs to be rewritten. If not, simply // change the map. // - Otherwise, allocate a fixed array large enough to hold all fields, in // addition to unused space. // - Copy all existing properties in, in the following order: backing store // properties, unused fields, inobject properties. // - If all allocation succeeded, commit the state atomically: // * Copy inobject properties from the backing store back into the object. // * Trim the difference in instance size of the object. This also cleanly // frees inobject properties that moved to the backing store. // * If there are properties left in the backing store, trim of the space used // to temporarily store the inobject properties. // * If there are properties left in the backing store, install the backing // store. void JSObject::MigrateToMap(Handle object, Handle new_map) { Isolate* isolate = object->GetIsolate(); Handle old_map(object->map()); int number_of_fields = new_map->NumberOfFields(); int inobject = new_map->inobject_properties(); int unused = new_map->unused_property_fields(); // Nothing to do if no functions were converted to fields and no smis were // converted to doubles. if (!old_map->InstancesNeedRewriting( *new_map, number_of_fields, inobject, unused)) { object->set_map(*new_map); return; } int total_size = number_of_fields + unused; int external = total_size - inobject; Handle array = isolate->factory()->NewFixedArray(total_size); Handle old_descriptors(old_map->instance_descriptors()); Handle new_descriptors(new_map->instance_descriptors()); int descriptors = new_map->NumberOfOwnDescriptors(); for (int i = 0; i < descriptors; i++) { PropertyDetails details = new_descriptors->GetDetails(i); if (details.type() != FIELD) continue; PropertyDetails old_details = old_descriptors->GetDetails(i); if (old_details.type() == CALLBACKS) { ASSERT(details.representation().IsTagged()); continue; } ASSERT(old_details.type() == CONSTANT || old_details.type() == FIELD); Object* raw_value = old_details.type() == CONSTANT ? old_descriptors->GetValue(i) : object->RawFastPropertyAt(old_descriptors->GetFieldIndex(i)); Handle value(raw_value, isolate); if (FLAG_track_double_fields && !old_details.representation().IsDouble() && details.representation().IsDouble()) { if (old_details.representation().IsNone()) { value = handle(Smi::FromInt(0), isolate); } value = NewStorageFor(isolate, value, details.representation()); } ASSERT(!(FLAG_track_double_fields && details.representation().IsDouble() && value->IsSmi())); int target_index = new_descriptors->GetFieldIndex(i) - inobject; if (target_index < 0) target_index += total_size; array->set(target_index, *value); } // From here on we cannot fail and we shouldn't GC anymore. DisallowHeapAllocation no_allocation; // Copy (real) inobject properties. If necessary, stop at number_of_fields to // avoid overwriting |one_pointer_filler_map|. int limit = Min(inobject, number_of_fields); for (int i = 0; i < limit; i++) { object->FastPropertyAtPut(i, array->get(external + i)); } // Create filler object past the new instance size. int new_instance_size = new_map->instance_size(); int instance_size_delta = old_map->instance_size() - new_instance_size; ASSERT(instance_size_delta >= 0); Address address = object->address() + new_instance_size; isolate->heap()->CreateFillerObjectAt(address, instance_size_delta); // If there are properties in the new backing store, trim it to the correct // size and install the backing store into the object. if (external > 0) { RightTrimFixedArray(isolate->heap(), *array, inobject); object->set_properties(*array); } object->set_map(*new_map); } Handle Map::AddTransition(Handle map, Handle