(this)->CodeSize(); } void Map::set_instance_size(int value) { ASSERT_EQ(0, value & (kPointerSize - 1)); value >>= kPointerSizeLog2; ASSERT(0 <= value && value < 256); WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast(value)); } void Map::set_inobject_properties(int value) { ASSERT(0 <= value && value < 256); WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast(value)); } void Map::set_pre_allocated_property_fields(int value) { ASSERT(0 <= value && value < 256); WRITE_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset, static_cast(value)); } InstanceType Map::instance_type() { return static_cast(READ_BYTE_FIELD(this, kInstanceTypeOffset)); } void Map::set_instance_type(InstanceType value) { WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value); } int Map::unused_property_fields() { return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset); } void Map::set_unused_property_fields(int value) { WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255)); } byte Map::bit_field() { return READ_BYTE_FIELD(this, kBitFieldOffset); } void Map::set_bit_field(byte value) { WRITE_BYTE_FIELD(this, kBitFieldOffset, value); } byte Map::bit_field2() { return READ_BYTE_FIELD(this, kBitField2Offset); } void Map::set_bit_field2(byte value) { WRITE_BYTE_FIELD(this, kBitField2Offset, value); } void Map::set_non_instance_prototype(bool value) { if (value) { set_bit_field(bit_field() | (1 << kHasNonInstancePrototype)); } else { set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype)); } } bool Map::has_non_instance_prototype() { return ((1 << kHasNonInstancePrototype) & bit_field()) != 0; } void Map::set_function_with_prototype(bool value) { set_bit_field3(FunctionWithPrototype::update(bit_field3(), value)); } bool Map::function_with_prototype() { return FunctionWithPrototype::decode(bit_field3()); } void Map::set_is_access_check_needed(bool access_check_needed) { if (access_check_needed) { set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded)); } else { set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded)); } } bool Map::is_access_check_needed() { return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0; } void Map::set_is_extensible(bool value) { if (value) { set_bit_field2(bit_field2() | (1 << kIsExtensible)); } else { set_bit_field2(bit_field2() & ~(1 << kIsExtensible)); } } bool Map::is_extensible() { return ((1 << kIsExtensible) & bit_field2()) != 0; } void Map::set_attached_to_shared_function_info(bool value) { if (value) { set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo)); } else { set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo)); } } bool Map::attached_to_shared_function_info() { return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0; } void Map::set_is_shared(bool value) { set_bit_field3(IsShared::update(bit_field3(), value)); } bool Map::is_shared() { return IsShared::decode(bit_field3()); } void Map::set_dictionary_map(bool value) { if (value) mark_unstable(); set_bit_field3(DictionaryMap::update(bit_field3(), value)); } bool Map::is_dictionary_map() { return DictionaryMap::decode(bit_field3()); } Code::Flags Code::flags() { return static_cast(READ_INT_FIELD(this, kFlagsOffset)); } void Map::set_owns_descriptors(bool is_shared) { set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared)); } bool Map::owns_descriptors() { return OwnsDescriptors::decode(bit_field3()); } void Map::set_has_instance_call_handler() { set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true)); } bool Map::has_instance_call_handler() { return HasInstanceCallHandler::decode(bit_field3()); } void Map::deprecate() { set_bit_field3(Deprecated::update(bit_field3(), true)); } bool Map::is_deprecated() { if (!FLAG_track_fields) return false; return Deprecated::decode(bit_field3()); } void Map::set_migration_target(bool value) { set_bit_field3(IsMigrationTarget::update(bit_field3(), value)); } bool Map::is_migration_target() { if (!FLAG_track_fields) return false; return IsMigrationTarget::decode(bit_field3()); } void Map::freeze() { set_bit_field3(IsFrozen::update(bit_field3(), true)); } bool Map::is_frozen() { return IsFrozen::decode(bit_field3()); } void Map::mark_unstable() { set_bit_field3(IsUnstable::update(bit_field3(), true)); } bool Map::is_stable() { return !IsUnstable::decode(bit_field3()); } bool Map::has_code_cache() { return code_cache() != GetIsolate()->heap()->empty_fixed_array(); } bool Map::CanBeDeprecated() { int descriptor = LastAdded(); for (int i = 0; i <= descriptor; i++) { PropertyDetails details = instance_descriptors()->GetDetails(i); if (FLAG_track_fields && details.representation().IsNone()) { return true; } if (FLAG_track_fields && details.representation().IsSmi()) { return true; } if (FLAG_track_double_fields && details.representation().IsDouble()) { return true; } if (FLAG_track_heap_object_fields && details.representation().IsHeapObject()) { return true; } if (FLAG_track_fields && details.type() == CONSTANT) { return true; } } return false; } void Map::NotifyLeafMapLayoutChange() { if (is_stable()) { mark_unstable(); dependent_code()->DeoptimizeDependentCodeGroup( GetIsolate(), DependentCode::kPrototypeCheckGroup); } } bool Map::CanOmitMapChecks() { return is_stable() && FLAG_omit_map_checks_for_leaf_maps; } int DependentCode::number_of_entries(DependencyGroup group) { if (length() == 0) return 0; return Smi::cast(get(group))->value(); } void DependentCode::set_number_of_entries(DependencyGroup group, int value) { set(group, Smi::FromInt(value)); } bool DependentCode::is_code_at(int i) { return get(kCodesStartIndex + i)->IsCode(); } Code* DependentCode::code_at(int i) { return Code::cast(get(kCodesStartIndex + i)); } CompilationInfo* DependentCode::compilation_info_at(int i) { return reinterpret_cast( Foreign::cast(get(kCodesStartIndex + i))->foreign_address()); } void DependentCode::set_object_at(int i, Object* object) { set(kCodesStartIndex + i, object); } Object* DependentCode::object_at(int i) { return get(kCodesStartIndex + i); } Object** DependentCode::slot_at(int i) { return HeapObject::RawField( this, FixedArray::OffsetOfElementAt(kCodesStartIndex + i)); } void DependentCode::clear_at(int i) { set_undefined(kCodesStartIndex + i); } void DependentCode::copy(int from, int to) { set(kCodesStartIndex + to, get(kCodesStartIndex + from)); } void DependentCode::ExtendGroup(DependencyGroup group) { GroupStartIndexes starts(this); for (int g = kGroupCount - 1; g > group; g--) { if (starts.at(g) < starts.at(g + 1)) { copy(starts.at(g), starts.at(g + 1)); } } } void Code::set_flags(Code::Flags flags) { STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1); // Make sure that all call stubs have an arguments count. ASSERT((ExtractKindFromFlags(flags) != CALL_IC && ExtractKindFromFlags(flags) != KEYED_CALL_IC) || ExtractArgumentsCountFromFlags(flags) >= 0); WRITE_INT_FIELD(this, kFlagsOffset, flags); } Code::Kind Code::kind() { return ExtractKindFromFlags(flags()); } InlineCacheState Code::ic_state() { InlineCacheState result = ExtractICStateFromFlags(flags()); // Only allow uninitialized or debugger states for non-IC code // objects. This is used in the debugger to determine whether or not // a call to code object has been replaced with a debug break call. ASSERT(is_inline_cache_stub() || result == UNINITIALIZED || result == DEBUG_STUB); return result; } ExtraICState Code::extra_ic_state() { ASSERT((is_inline_cache_stub() && !needs_extended_extra_ic_state(kind())) || ic_state() == DEBUG_STUB); return ExtractExtraICStateFromFlags(flags()); } ExtraICState Code::extended_extra_ic_state() { ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB); ASSERT(needs_extended_extra_ic_state(kind())); return ExtractExtendedExtraICStateFromFlags(flags()); } Code::StubType Code::type() { return ExtractTypeFromFlags(flags()); } int Code::arguments_count() { ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB || is_handler()); return ExtractArgumentsCountFromFlags(flags()); } // For initialization. void Code::set_raw_kind_specific_flags1(int value) { WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value); } void Code::set_raw_kind_specific_flags2(int value) { WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value); } inline bool Code::is_crankshafted() { return IsCrankshaftedField::decode( READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); } inline void Code::set_is_crankshafted(bool value) { int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); int updated = IsCrankshaftedField::update(previous, value); WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); } int Code::major_key() { ASSERT(has_major_key()); return StubMajorKeyField::decode( READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); } void Code::set_major_key(int major) { ASSERT(has_major_key()); ASSERT(0 <= major && major < 256); int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); int updated = StubMajorKeyField::update(previous, major); WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); } bool Code::has_major_key() { return kind() == STUB || kind() == HANDLER || kind() == BINARY_OP_IC || kind() == COMPARE_IC || kind() == COMPARE_NIL_IC || kind() == LOAD_IC || kind() == KEYED_LOAD_IC || kind() == STORE_IC || kind() == KEYED_STORE_IC || kind() == KEYED_CALL_IC || kind() == TO_BOOLEAN_IC; } bool Code::optimizable() { ASSERT_EQ(FUNCTION, kind()); return READ_BYTE_FIELD(this, kOptimizableOffset) == 1; } void Code::set_optimizable(bool value) { ASSERT_EQ(FUNCTION, kind()); WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0); } bool Code::has_deoptimization_support() { ASSERT_EQ(FUNCTION, kind()); byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); return FullCodeFlagsHasDeoptimizationSupportField::decode(flags); } void Code::set_has_deoptimization_support(bool value) { ASSERT_EQ(FUNCTION, kind()); byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value); WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); } bool Code::has_debug_break_slots() { ASSERT_EQ(FUNCTION, kind()); byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); return FullCodeFlagsHasDebugBreakSlotsField::decode(flags); } void Code::set_has_debug_break_slots(bool value) { ASSERT_EQ(FUNCTION, kind()); byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value); WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); } bool Code::is_compiled_optimizable() { ASSERT_EQ(FUNCTION, kind()); byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); return FullCodeFlagsIsCompiledOptimizable::decode(flags); } void Code::set_compiled_optimizable(bool value) { ASSERT_EQ(FUNCTION, kind()); byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value); WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); } int Code::allow_osr_at_loop_nesting_level() { ASSERT_EQ(FUNCTION, kind()); return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset); } void Code::set_allow_osr_at_loop_nesting_level(int level) { ASSERT_EQ(FUNCTION, kind()); ASSERT(level >= 0 && level <= kMaxLoopNestingMarker); WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level); } int Code::profiler_ticks() { ASSERT_EQ(FUNCTION, kind()); return READ_BYTE_FIELD(this, kProfilerTicksOffset); } void Code::set_profiler_ticks(int ticks) { ASSERT_EQ(FUNCTION, kind()); ASSERT(ticks < 256); WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks); } unsigned Code::stack_slots() { ASSERT(is_crankshafted()); return StackSlotsField::decode( READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); } void Code::set_stack_slots(unsigned slots) { CHECK(slots <= (1 << kStackSlotsBitCount)); ASSERT(is_crankshafted()); int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); int updated = StackSlotsField::update(previous, slots); WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); } unsigned Code::safepoint_table_offset() { ASSERT(is_crankshafted()); return SafepointTableOffsetField::decode( READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); } void Code::set_safepoint_table_offset(unsigned offset) { CHECK(offset <= (1 << kSafepointTableOffsetBitCount)); ASSERT(is_crankshafted()); ASSERT(IsAligned(offset, static_cast(kIntSize))); int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); int updated = SafepointTableOffsetField::update(previous, offset); WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); } unsigned Code::back_edge_table_offset() { ASSERT_EQ(FUNCTION, kind()); return BackEdgeTableOffsetField::decode( READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); } void Code::set_back_edge_table_offset(unsigned offset) { ASSERT_EQ(FUNCTION, kind()); ASSERT(IsAligned(offset, static_cast(kIntSize))); int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); int updated = BackEdgeTableOffsetField::update(previous, offset); WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); } bool Code::back_edges_patched_for_osr() { ASSERT_EQ(FUNCTION, kind()); return BackEdgesPatchedForOSRField::decode( READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); } void Code::set_back_edges_patched_for_osr(bool value) { ASSERT_EQ(FUNCTION, kind()); int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); int updated = BackEdgesPatchedForOSRField::update(previous, value); WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); } CheckType Code::check_type() { ASSERT(is_call_stub() || is_keyed_call_stub()); byte type = READ_BYTE_FIELD(this, kCheckTypeOffset); return static_cast(type); } void Code::set_check_type(CheckType value) { ASSERT(is_call_stub() || is_keyed_call_stub()); WRITE_BYTE_FIELD(this, kCheckTypeOffset, value); } byte Code::to_boolean_state() { return extended_extra_ic_state(); } bool Code::has_function_cache() { ASSERT(kind() == STUB); return HasFunctionCacheField::decode( READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); } void Code::set_has_function_cache(bool flag) { ASSERT(kind() == STUB); int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); int updated = HasFunctionCacheField::update(previous, flag); WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); } bool Code::marked_for_deoptimization() { ASSERT(kind() == OPTIMIZED_FUNCTION); return MarkedForDeoptimizationField::decode( READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); } void Code::set_marked_for_deoptimization(bool flag) { ASSERT(kind() == OPTIMIZED_FUNCTION); int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); int updated = MarkedForDeoptimizationField::update(previous, flag); WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); } bool Code::is_inline_cache_stub() { Kind kind = this->kind(); switch (kind) { #define CASE(name) case name: return true; IC_KIND_LIST(CASE) #undef CASE default: return false; } } bool Code::is_keyed_stub() { return is_keyed_load_stub() || is_keyed_store_stub() || is_keyed_call_stub(); } bool Code::is_debug_stub() { return ic_state() == DEBUG_STUB; } Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state, ExtraICState extra_ic_state, StubType type, int argc, InlineCacheHolderFlag holder) { ASSERT(argc <= Code::kMaxArguments); // Since the extended extra ic state overlaps with the argument count // for CALL_ICs, do so checks to make sure that they don't interfere. ASSERT((kind != Code::CALL_IC && kind != Code::KEYED_CALL_IC) || (ExtraICStateField::encode(extra_ic_state) | true)); // Compute the bit mask. unsigned int bits = KindField::encode(kind) | ICStateField::encode(ic_state) | TypeField::encode(type) | ExtendedExtraICStateField::encode(extra_ic_state) | CacheHolderField::encode(holder); if (!Code::needs_extended_extra_ic_state(kind)) { bits |= (argc << kArgumentsCountShift); } return static_cast(bits); } Code::Flags Code::ComputeMonomorphicFlags(Kind kind, ExtraICState extra_ic_state, InlineCacheHolderFlag holder, StubType type, int argc) { return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder); } Code::Kind Code::ExtractKindFromFlags(Flags flags) { return KindField::decode(flags); } InlineCacheState Code::ExtractICStateFromFlags(Flags flags) { return ICStateField::decode(flags); } ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) { return ExtraICStateField::decode(flags); } ExtraICState Code::ExtractExtendedExtraICStateFromFlags( Flags flags) { return ExtendedExtraICStateField::decode(flags); } Code::StubType Code::ExtractTypeFromFlags(Flags flags) { return TypeField::decode(flags); } int Code::ExtractArgumentsCountFromFlags(Flags flags) { return (flags & kArgumentsCountMask) >> kArgumentsCountShift; } InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) { return CacheHolderField::decode(flags); } Code::Flags Code::RemoveTypeFromFlags(Flags flags) { int bits = flags & ~TypeField::kMask; return static_cast(bits); } Code* Code::GetCodeFromTargetAddress(Address address) { HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); // GetCodeFromTargetAddress might be called when marking objects during mark // sweep. reinterpret_cast is therefore used instead of the more appropriate // Code::cast. Code::cast does not work when the object's map is // marked. Code* result = reinterpret_cast(code); return result; } Object* Code::GetObjectFromEntryAddress(Address location_of_address) { return HeapObject:: FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); } Object* Map::prototype() { return READ_FIELD(this, kPrototypeOffset); } void Map::set_prototype(Object* value, WriteBarrierMode mode) { ASSERT(value->IsNull() || value->IsJSReceiver()); WRITE_FIELD(this, kPrototypeOffset, value); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode); } // If the descriptor is using the empty transition array, install a new empty // transition array that will have place for an element transition. static MaybeObject* EnsureHasTransitionArray(Map* map) { TransitionArray* transitions; MaybeObject* maybe_transitions; if (!map->HasTransitionArray()) { maybe_transitions = TransitionArray::Allocate(map->GetIsolate(), 0); if (!maybe_transitions->To(&transitions)) return maybe_transitions; transitions->set_back_pointer_storage(map->GetBackPointer()); } else if (!map->transitions()->IsFullTransitionArray()) { maybe_transitions = map->transitions()->ExtendToFullTransitionArray(); if (!maybe_transitions->To(&transitions)) return maybe_transitions; } else { return map; } map->set_transitions(transitions); return transitions; } void Map::InitializeDescriptors(DescriptorArray* descriptors) { int len = descriptors->number_of_descriptors(); set_instance_descriptors(descriptors); SetNumberOfOwnDescriptors(len); } ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) void Map::set_bit_field3(uint32_t bits) { // Ensure the upper 2 bits have the same value by sign extending it. This is // necessary to be able to use the 31st bit. int value = bits << 1; WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1)); } uint32_t Map::bit_field3() { Object* value = READ_FIELD(this, kBitField3Offset); return Smi::cast(value)->value(); } void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) { Object* back_pointer = GetBackPointer(); if (Heap::ShouldZapGarbage() && HasTransitionArray()) { ZapTransitions(); } WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer); CONDITIONAL_WRITE_BARRIER( heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode); } void Map::AppendDescriptor(Descriptor* desc, const DescriptorArray::WhitenessWitness& witness) { DescriptorArray* descriptors = instance_descriptors(); int number_of_own_descriptors = NumberOfOwnDescriptors(); ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors); descriptors->Append(desc, witness); SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); } Object* Map::GetBackPointer() { Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); if (object->IsDescriptorArray()) { return TransitionArray::cast(object)->back_pointer_storage(); } else { ASSERT(object->IsMap() || object->IsUndefined()); return object; } } bool Map::HasElementsTransition() { return HasTransitionArray() && transitions()->HasElementsTransition(); } bool Map::HasTransitionArray() { Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); return object->IsTransitionArray(); } Map* Map::elements_transition_map() { int index = transitions()->Search(GetHeap()->elements_transition_symbol()); return transitions()->GetTarget(index); } bool Map::CanHaveMoreTransitions() { if (!HasTransitionArray()) return true; return FixedArray::SizeFor(transitions()->length() + TransitionArray::kTransitionSize) <= Page::kMaxNonCodeHeapObjectSize; } MaybeObject* Map::AddTransition(Name* key, Map* target, SimpleTransitionFlag flag) { if (HasTransitionArray()) return transitions()->CopyInsert(key, target); return TransitionArray::NewWith(flag, key, target, GetBackPointer()); } void Map::SetTransition(int transition_index, Map* target) { transitions()->SetTarget(transition_index, target); } Map* Map::GetTransition(int transition_index) { return transitions()->GetTarget(transition_index); } MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) { TransitionArray* transitions; MaybeObject* maybe_transitions = AddTransition( GetHeap()->elements_transition_symbol(), transitioned_map, FULL_TRANSITION); if (!maybe_transitions->To(&transitions)) return maybe_transitions; set_transitions(transitions); return transitions; } FixedArray* Map::GetPrototypeTransitions() { if (!HasTransitionArray()) return GetHeap()->empty_fixed_array(); if (!transitions()->HasPrototypeTransitions()) { return GetHeap()->empty_fixed_array(); } return transitions()->GetPrototypeTransitions(); } MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) { MaybeObject* allow_prototype = EnsureHasTransitionArray(this); if (allow_prototype->IsFailure()) return allow_prototype; int old_number_of_transitions = NumberOfProtoTransitions(); #ifdef DEBUG if (HasPrototypeTransitions()) { ASSERT(GetPrototypeTransitions() != proto_transitions); ZapPrototypeTransitions(); } #endif transitions()->SetPrototypeTransitions(proto_transitions); SetNumberOfProtoTransitions(old_number_of_transitions); return this; } bool Map::HasPrototypeTransitions() { return HasTransitionArray() && transitions()->HasPrototypeTransitions(); } TransitionArray* Map::transitions() { ASSERT(HasTransitionArray()); Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); return TransitionArray::cast(object); } void Map::set_transitions(TransitionArray* transition_array, WriteBarrierMode mode) { // Transition arrays are not shared. When one is replaced, it should not // keep referenced objects alive, so we zap it. // When there is another reference to the array somewhere (e.g. a handle), // not zapping turns from a waste of memory into a source of crashes. if (HasTransitionArray()) { #ifdef DEBUG for (int i = 0; i < transitions()->number_of_transitions(); i++) { Map* target = transitions()->GetTarget(i); if (target->instance_descriptors() == instance_descriptors()) { Name* key = transitions()->GetKey(i); int new_target_index = transition_array->Search(key); ASSERT(new_target_index != TransitionArray::kNotFound); ASSERT(transition_array->GetTarget(new_target_index) == target); } } #endif ASSERT(transitions() != transition_array); ZapTransitions(); } WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array); CONDITIONAL_WRITE_BARRIER( GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode); } void Map::init_back_pointer(Object* undefined) { ASSERT(undefined->IsUndefined()); WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined); } void Map::SetBackPointer(Object* value, WriteBarrierMode mode) { ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE); ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) || (value->IsMap() && GetBackPointer()->IsUndefined())); Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); if (object->IsTransitionArray()) { TransitionArray::cast(object)->set_back_pointer_storage(value); } else { WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value); CONDITIONAL_WRITE_BARRIER( GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode); } } // Can either be Smi (no transitions), normal transition array, or a transition // array with the header overwritten as a Smi (thus iterating). TransitionArray* Map::unchecked_transition_array() { Object* object = *HeapObject::RawField(this, Map::kTransitionsOrBackPointerOffset); TransitionArray* transition_array = static_cast(object); return transition_array; } HeapObject* Map::UncheckedPrototypeTransitions() { ASSERT(HasTransitionArray()); ASSERT(unchecked_transition_array()->HasPrototypeTransitions()); return unchecked_transition_array()->UncheckedPrototypeTransitions(); } ACCESSORS(Map, code_cache, Object, kCodeCacheOffset) ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset) ACCESSORS(Map, constructor, Object, kConstructorOffset) ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset) ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset) ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset) ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset) ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset) ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset) ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset) ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset) ACCESSORS(AccessorInfo, name, Object, kNameOffset) ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset) ACCESSORS(AccessorInfo, expected_receiver_type, Object, kExpectedReceiverTypeOffset) ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray, kSerializedDataOffset) ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor, kDescriptorOffset) ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset) ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset) ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset) ACCESSORS(Box, value, Object, kValueOffset) ACCESSORS(AccessorPair, getter, Object, kGetterOffset) ACCESSORS(AccessorPair, setter, Object, kSetterOffset) ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset) ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset) ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset) ACCESSORS(AccessCheckInfo, data, Object, kDataOffset) ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset) ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset) ACCESSORS(InterceptorInfo, query, Object, kQueryOffset) ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset) ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset) ACCESSORS(InterceptorInfo, data, Object, kDataOffset) ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset) ACCESSORS(CallHandlerInfo, data, Object, kDataOffset) ACCESSORS(TemplateInfo, tag, Object, kTagOffset) ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset) ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset) ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset) ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset) ACCESSORS(FunctionTemplateInfo, prototype_template, Object, kPrototypeTemplateOffset) ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset) ACCESSORS(FunctionTemplateInfo, named_property_handler, Object, kNamedPropertyHandlerOffset) ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object, kIndexedPropertyHandlerOffset) ACCESSORS(FunctionTemplateInfo, instance_template, Object, kInstanceTemplateOffset) ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset) ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset) ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object, kInstanceCallHandlerOffset) ACCESSORS(FunctionTemplateInfo, access_check_info, Object, kAccessCheckInfoOffset) ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset) ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset) ACCESSORS(ObjectTemplateInfo, internal_field_count, Object, kInternalFieldCountOffset) ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset) ACCESSORS(SignatureInfo, args, Object, kArgsOffset) ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset) ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset) ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset) ACCESSORS_TO_SMI(AllocationSite, memento_found_count, kMementoFoundCountOffset) ACCESSORS_TO_SMI(AllocationSite, memento_create_count, kMementoCreateCountOffset) ACCESSORS_TO_SMI(AllocationSite, pretenure_decision, kPretenureDecisionOffset) ACCESSORS(AllocationSite, dependent_code, DependentCode, kDependentCodeOffset) ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset) ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset) ACCESSORS(Script, source, Object, kSourceOffset) ACCESSORS(Script, name, Object, kNameOffset) ACCESSORS(Script, id, Smi, kIdOffset) ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset) ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset) ACCESSORS(Script, data, Object, kDataOffset) ACCESSORS(Script, context_data, Object, kContextOffset) ACCESSORS(Script, wrapper, Foreign, kWrapperOffset) ACCESSORS_TO_SMI(Script, type, kTypeOffset) ACCESSORS(Script, line_ends, Object, kLineEndsOffset) ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset) ACCESSORS_TO_SMI(Script, eval_from_instructions_offset, kEvalFrominstructionsOffsetOffset) ACCESSORS_TO_SMI(Script, flags, kFlagsOffset) BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit) Script::CompilationType Script::compilation_type() { return BooleanBit::get(flags(), kCompilationTypeBit) ? COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST; } void Script::set_compilation_type(CompilationType type) { set_flags(BooleanBit::set(flags(), kCompilationTypeBit, type == COMPILATION_TYPE_EVAL)); } Script::CompilationState Script::compilation_state() { return BooleanBit::get(flags(), kCompilationStateBit) ? COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL; } void Script::set_compilation_state(CompilationState state) { set_flags(BooleanBit::set(flags(), kCompilationStateBit, state == COMPILATION_STATE_COMPILED)); } #ifdef ENABLE_DEBUGGER_SUPPORT ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex) ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex) ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex) ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex) ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex) ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex) ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex) ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex) #endif ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset) ACCESSORS(SharedFunctionInfo, optimized_code_map, Object, kOptimizedCodeMapOffset) ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset) ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset) ACCESSORS(SharedFunctionInfo, instance_class_name, Object, kInstanceClassNameOffset) ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset) ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset) ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset) ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset) SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset) SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset) BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, kHiddenPrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check, kNeedsAccessCheckBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype, kReadOnlyPrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype, kRemovePrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache, kDoNotCacheBit) BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression, kIsExpressionBit) BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel, kIsTopLevelBit) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation, kAllowLazyCompilation) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation_without_context, kAllowLazyCompilationWithoutContext) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_arguments, kUsesArguments) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, has_duplicate_parameters, kHasDuplicateParameters) #if V8_HOST_ARCH_32_BIT SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset) SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count, kFormalParameterCountOffset) SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset) SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type, kStartPositionAndTypeOffset) SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset) SMI_ACCESSORS(SharedFunctionInfo, function_token_position, kFunctionTokenPositionOffset) SMI_ACCESSORS(SharedFunctionInfo, compiler_hints, kCompilerHintsOffset) SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason, kOptCountAndBailoutReasonOffset) SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset) #else #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \ STATIC_ASSERT(holder::offset % kPointerSize == 0); \ int holder::name() { \ int value = READ_INT_FIELD(this, offset); \ ASSERT(kHeapObjectTag == 1); \ ASSERT((value & kHeapObjectTag) == 0); \ return value >> 1; \ } \ void holder::set_##name(int value) { \ ASSERT(kHeapObjectTag == 1); \ ASSERT((value & 0xC0000000) == 0xC0000000 || \ (value & 0xC0000000) == 0x000000000); \ WRITE_INT_FIELD(this, \ offset, \ (value << 1) & ~kHeapObjectTag); \ } #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \ STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \ INT_ACCESSORS(holder, name, offset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, formal_parameter_count, kFormalParameterCountOffset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, start_position_and_type, kStartPositionAndTypeOffset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, function_token_position, kFunctionTokenPositionOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, compiler_hints, kCompilerHintsOffset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, opt_count_and_bailout_reason, kOptCountAndBailoutReasonOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset) #endif int SharedFunctionInfo::construction_count() { return READ_BYTE_FIELD(this, kConstructionCountOffset); } void SharedFunctionInfo::set_construction_count(int value) { ASSERT(0 <= value && value < 256); WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast(value)); } BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, live_objects_may_exist, kLiveObjectsMayExist) bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() { return initial_map() != GetHeap()->undefined_value(); } BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled, kOptimizationDisabled) void SharedFunctionInfo::set_optimization_disabled(bool disable) { set_compiler_hints(BooleanBit::set(compiler_hints(), kOptimizationDisabled, disable)); // If disabling optimizations we reflect that in the code object so // it will not be counted as optimizable code. if ((code()->kind() == Code::FUNCTION) && disable) { code()->set_optimizable(false); } } int SharedFunctionInfo::profiler_ticks() { if (code()->kind() != Code::FUNCTION) return 0; return code()->profiler_ticks(); } LanguageMode SharedFunctionInfo::language_mode() { int hints = compiler_hints(); if (BooleanBit::get(hints, kExtendedModeFunction)) { ASSERT(BooleanBit::get(hints, kStrictModeFunction)); return EXTENDED_MODE; } return BooleanBit::get(hints, kStrictModeFunction) ? STRICT_MODE : CLASSIC_MODE; } void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) { // We only allow language mode transitions that go set the same language mode // again or go up in the chain: // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE. ASSERT(this->language_mode() == CLASSIC_MODE || this->language_mode() == language_mode || language_mode == EXTENDED_MODE); int hints = compiler_hints(); hints = BooleanBit::set( hints, kStrictModeFunction, language_mode != CLASSIC_MODE); hints = BooleanBit::set( hints, kExtendedModeFunction, language_mode == EXTENDED_MODE); set_compiler_hints(hints); } bool SharedFunctionInfo::is_classic_mode() { return !BooleanBit::get(compiler_hints(), kStrictModeFunction); } BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode, kExtendedModeFunction) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin, kInlineBuiltin) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, name_should_print_as_anonymous, kNameShouldPrintAsAnonymous) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize, kDontOptimize) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator) void SharedFunctionInfo::BeforeVisitingPointers() { if (IsInobjectSlackTrackingInProgress()) DetachInitialMap(); } ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset) ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset) ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset) bool Script::HasValidSource() { Object* src = this->source(); if (!src->IsString()) return true; String* src_str = String::cast(src); if (!StringShape(src_str).IsExternal()) return true; if (src_str->IsOneByteRepresentation()) { return ExternalAsciiString::cast(src)->resource() != NULL; } else if (src_str->IsTwoByteRepresentation()) { return ExternalTwoByteString::cast(src)->resource() != NULL; } return true; } void SharedFunctionInfo::DontAdaptArguments() { ASSERT(code()->kind() == Code::BUILTIN); set_formal_parameter_count(kDontAdaptArgumentsSentinel); } int SharedFunctionInfo::start_position() { return start_position_and_type() >> kStartPositionShift; } void SharedFunctionInfo::set_start_position(int start_position) { set_start_position_and_type((start_position << kStartPositionShift) | (start_position_and_type() & ~kStartPositionMask)); } Code* SharedFunctionInfo::code() { return Code::cast(READ_FIELD(this, kCodeOffset)); } void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) { ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION); WRITE_FIELD(this, kCodeOffset, value); CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode); } void SharedFunctionInfo::ReplaceCode(Code* value) { // If the GC metadata field is already used then the function was // enqueued as a code flushing candidate and we remove it now. if (code()->gc_metadata() != NULL) { CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); flusher->EvictCandidate(this); } ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL); set_code(value); } ScopeInfo* SharedFunctionInfo::scope_info() { return reinterpret_cast(READ_FIELD(this, kScopeInfoOffset)); } void SharedFunctionInfo::set_scope_info(ScopeInfo* value, WriteBarrierMode mode) { WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast(value)); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, reinterpret_cast(value), mode); } bool SharedFunctionInfo::is_compiled() { return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); } bool SharedFunctionInfo::IsApiFunction() { return function_data()->IsFunctionTemplateInfo(); } FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() { ASSERT(IsApiFunction()); return FunctionTemplateInfo::cast(function_data()); } bool SharedFunctionInfo::HasBuiltinFunctionId() { return function_data()->IsSmi(); } BuiltinFunctionId SharedFunctionInfo::builtin_function_id() { ASSERT(HasBuiltinFunctionId()); return static_cast(Smi::cast(function_data())->value()); } int SharedFunctionInfo::ic_age() { return ICAgeBits::decode(counters()); } void SharedFunctionInfo::set_ic_age(int ic_age) { set_counters(ICAgeBits::update(counters(), ic_age)); } int SharedFunctionInfo::deopt_count() { return DeoptCountBits::decode(counters()); } void SharedFunctionInfo::set_deopt_count(int deopt_count) { set_counters(DeoptCountBits::update(counters(), deopt_count)); } void SharedFunctionInfo::increment_deopt_count() { int value = counters(); int deopt_count = DeoptCountBits::decode(value); deopt_count = (deopt_count + 1) & DeoptCountBits::kMax; set_counters(DeoptCountBits::update(value, deopt_count)); } int SharedFunctionInfo::opt_reenable_tries() { return OptReenableTriesBits::decode(counters()); } void SharedFunctionInfo::set_opt_reenable_tries(int tries) { set_counters(OptReenableTriesBits::update(counters(), tries)); } int SharedFunctionInfo::opt_count() { return OptCountBits::decode(opt_count_and_bailout_reason()); } void SharedFunctionInfo::set_opt_count(int opt_count) { set_opt_count_and_bailout_reason( OptCountBits::update(opt_count_and_bailout_reason(), opt_count)); } BailoutReason SharedFunctionInfo::DisableOptimizationReason() { BailoutReason reason = static_cast( DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason())); return reason; } bool SharedFunctionInfo::has_deoptimization_support() { Code* code = this->code(); return code->kind() == Code::FUNCTION && code->has_deoptimization_support(); } void SharedFunctionInfo::TryReenableOptimization() { int tries = opt_reenable_tries(); set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax); // We reenable optimization whenever the number of tries is a large // enough power of 2. if (tries >= 16 && (((tries - 1) & tries) == 0)) { set_optimization_disabled(false); set_opt_count(0); set_deopt_count(0); code()->set_optimizable(true); } } bool JSFunction::IsBuiltin() { return context()->global_object()->IsJSBuiltinsObject(); } bool JSFunction::NeedsArgumentsAdaption() { return shared()->formal_parameter_count() != SharedFunctionInfo::kDontAdaptArgumentsSentinel; } bool JSFunction::IsOptimized() { return code()->kind() == Code::OPTIMIZED_FUNCTION; } bool JSFunction::IsOptimizable() { return code()->kind() == Code::FUNCTION && code()->optimizable(); } bool JSFunction::IsMarkedForLazyRecompilation() { return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile); } bool JSFunction::IsMarkedForConcurrentRecompilation() { return code() == GetIsolate()->builtins()->builtin( Builtins::kConcurrentRecompile); } bool JSFunction::IsInRecompileQueue() { return code() == GetIsolate()->builtins()->builtin( Builtins::kInRecompileQueue); } Code* JSFunction::code() { return Code::cast( Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); } void JSFunction::set_code(Code* value) { ASSERT(!GetHeap()->InNewSpace(value)); Address entry = value->entry(); WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast(entry)); GetHeap()->incremental_marking()->RecordWriteOfCodeEntry( this, HeapObject::RawField(this, kCodeEntryOffset), value); } void JSFunction::set_code_no_write_barrier(Code* value) { ASSERT(!GetHeap()->InNewSpace(value)); Address entry = value->entry(); WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast(entry)); } void JSFunction::ReplaceCode(Code* code) { bool was_optimized = IsOptimized(); bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION; if (was_optimized && is_optimized) { shared()->EvictFromOptimizedCodeMap( this->code(), "Replacing with another optimized code"); } set_code(code); // Add/remove the function from the list of optimized functions for this // context based on the state change. if (!was_optimized && is_optimized) { context()->native_context()->AddOptimizedFunction(this); } if (was_optimized && !is_optimized) { // TODO(titzer): linear in the number of optimized functions; fix! context()->native_context()->RemoveOptimizedFunction(this); } } Context* JSFunction::context() { return Context::cast(READ_FIELD(this, kContextOffset)); } void JSFunction::set_context(Object* value) { ASSERT(value->IsUndefined() || value->IsContext()); WRITE_FIELD(this, kContextOffset, value); WRITE_BARRIER(GetHeap(), this, kContextOffset, value); } ACCESSORS(JSFunction, prototype_or_initial_map, Object, kPrototypeOrInitialMapOffset) Map* JSFunction::initial_map() { return Map::cast(prototype_or_initial_map()); } void JSFunction::set_initial_map(Map* value) { set_prototype_or_initial_map(value); } bool JSFunction::has_initial_map() { return prototype_or_initial_map()->IsMap(); } bool JSFunction::has_instance_prototype() { return has_initial_map() || !prototype_or_initial_map()->IsTheHole(); } bool JSFunction::has_prototype() { return map()->has_non_instance_prototype() || has_instance_prototype(); } Object* JSFunction::instance_prototype() { ASSERT(has_instance_prototype()); if (has_initial_map()) return initial_map()->prototype(); // When there is no initial map and the prototype is a JSObject, the // initial map field is used for the prototype field. return prototype_or_initial_map(); } Object* JSFunction::prototype() { ASSERT(has_prototype()); // If the function's prototype property has been set to a non-JSObject // value, that value is stored in the constructor field of the map. if (map()->has_non_instance_prototype()) return map()->constructor(); return instance_prototype(); } bool JSFunction::should_have_prototype() { return map()->function_with_prototype(); } bool JSFunction::is_compiled() { return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); } FixedArray* JSFunction::literals() { ASSERT(!shared()->bound()); return literals_or_bindings(); } void JSFunction::set_literals(FixedArray* literals) { ASSERT(!shared()->bound()); set_literals_or_bindings(literals); } FixedArray* JSFunction::function_bindings() { ASSERT(shared()->bound()); return literals_or_bindings(); } void JSFunction::set_function_bindings(FixedArray* bindings) { ASSERT(shared()->bound()); // Bound function literal may be initialized to the empty fixed array // before the bindings are set. ASSERT(bindings == GetHeap()->empty_fixed_array() || bindings->map() == GetHeap()->fixed_cow_array_map()); set_literals_or_bindings(bindings); } int JSFunction::NumberOfLiterals() { ASSERT(!shared()->bound()); return literals()->length(); } Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) { ASSERT(id < kJSBuiltinsCount); // id is unsigned. return READ_FIELD(this, OffsetOfFunctionWithId(id)); } void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id, Object* value) { ASSERT(id < kJSBuiltinsCount); // id is unsigned. WRITE_FIELD(this, OffsetOfFunctionWithId(id), value); WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value); } Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) { ASSERT(id < kJSBuiltinsCount); // id is unsigned. return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id))); } void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id, Code* value) { ASSERT(id < kJSBuiltinsCount); // id is unsigned. WRITE_FIELD(this, OffsetOfCodeWithId(id), value); ASSERT(!GetHeap()->InNewSpace(value)); } ACCESSORS(JSProxy, handler, Object, kHandlerOffset) ACCESSORS(JSProxy, hash, Object, kHashOffset) ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset) ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset) void JSProxy::InitializeBody(int object_size, Object* value) { ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value)); for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { WRITE_FIELD(this, offset, value); } } ACCESSORS(JSSet, table, Object, kTableOffset) ACCESSORS(JSMap, table, Object, kTableOffset) ACCESSORS(JSWeakCollection, table, Object, kTableOffset) ACCESSORS(JSWeakCollection, next, Object, kNextOffset) Address Foreign::foreign_address() { return AddressFrom(READ_INTPTR_FIELD(this, kForeignAddressOffset)); } void Foreign::set_foreign_address(Address value) { WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value)); } ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset) ACCESSORS(JSGeneratorObject, context, Context, kContextOffset) ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset) SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset) ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset) SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset) JSGeneratorObject* JSGeneratorObject::cast(Object* obj) { ASSERT(obj->IsJSGeneratorObject()); ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize); return reinterpret_cast(obj); } ACCESSORS(JSModule, context, Object, kContextOffset) ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset) JSModule* JSModule::cast(Object* obj) { ASSERT(obj->IsJSModule()); ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize); return reinterpret_cast(obj); } ACCESSORS(JSValue, value, Object, kValueOffset) JSValue* JSValue::cast(Object* obj) { ASSERT(obj->IsJSValue()); ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize); return reinterpret_cast(obj); } ACCESSORS(JSDate, value, Object, kValueOffset) ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset) ACCESSORS(JSDate, year, Object, kYearOffset) ACCESSORS(JSDate, month, Object, kMonthOffset) ACCESSORS(JSDate, day, Object, kDayOffset) ACCESSORS(JSDate, weekday, Object, kWeekdayOffset) ACCESSORS(JSDate, hour, Object, kHourOffset) ACCESSORS(JSDate, min, Object, kMinOffset) ACCESSORS(JSDate, sec, Object, kSecOffset) JSDate* JSDate::cast(Object* obj) { ASSERT(obj->IsJSDate()); ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize); return reinterpret_cast(obj); } ACCESSORS(JSMessageObject, type, String, kTypeOffset) ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset) ACCESSORS(JSMessageObject, script, Object, kScriptOffset) ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset) ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset) SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset) SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset) JSMessageObject* JSMessageObject::cast(Object* obj) { ASSERT(obj->IsJSMessageObject()); ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize); return reinterpret_cast(obj); } INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset) INT_ACCESSORS(Code, prologue_offset, kPrologueOffset) ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset) ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset) ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset) ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset) void Code::WipeOutHeader() { WRITE_FIELD(this, kRelocationInfoOffset, NULL); WRITE_FIELD(this, kHandlerTableOffset, NULL); WRITE_FIELD(this, kDeoptimizationDataOffset, NULL); // Do not wipe out e.g. a minor key. if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) { WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL); } } Object* Code::type_feedback_info() { ASSERT(kind() == FUNCTION); return raw_type_feedback_info(); } void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) { ASSERT(kind() == FUNCTION); set_raw_type_feedback_info(value, mode); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset, value, mode); } Object* Code::next_code_link() { CHECK(kind() == OPTIMIZED_FUNCTION); return raw_type_feedback_info(); } void Code::set_next_code_link(Object* value, WriteBarrierMode mode) { CHECK(kind() == OPTIMIZED_FUNCTION); set_raw_type_feedback_info(value); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset, value, mode); } int Code::stub_info() { ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC || kind() == BINARY_OP_IC || kind() == LOAD_IC); return Smi::cast(raw_type_feedback_info())->value(); } void Code::set_stub_info(int value) { ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC || kind() == BINARY_OP_IC || kind() == STUB || kind() == LOAD_IC || kind() == KEYED_LOAD_IC || kind() == STORE_IC || kind() == KEYED_STORE_IC); set_raw_type_feedback_info(Smi::FromInt(value)); } ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset) INT_ACCESSORS(Code, ic_age, kICAgeOffset) byte* Code::instruction_start() { return FIELD_ADDR(this, kHeaderSize); } byte* Code::instruction_end() { return instruction_start() + instruction_size(); } int Code::body_size() { return RoundUp(instruction_size(), kObjectAlignment); } ByteArray* Code::unchecked_relocation_info() { return reinterpret_cast(READ_FIELD(this, kRelocationInfoOffset)); } byte* Code::relocation_start() { return unchecked_relocation_info()->GetDataStartAddress(); } int Code::relocation_size() { return unchecked_relocation_info()->length(); } byte* Code::entry() { return instruction_start(); } bool Code::contains(byte* inner_pointer) { return (address() <= inner_pointer) && (inner_pointer <= address() + Size()); } ACCESSORS(JSArray, length, Object, kLengthOffset) void* JSArrayBuffer::backing_store() { intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset); return reinterpret_cast(ptr); } void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) { intptr_t ptr = reinterpret_cast(value); WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr); } ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset) ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset) bool JSArrayBuffer::is_external() { return BooleanBit::get(flag(), kIsExternalBit); } void JSArrayBuffer::set_is_external(bool value) { set_flag(BooleanBit::set(flag(), kIsExternalBit, value)); } bool JSArrayBuffer::should_be_freed() { return BooleanBit::get(flag(), kShouldBeFreed); } void JSArrayBuffer::set_should_be_freed(bool value) { set_flag(BooleanBit::set(flag(), kShouldBeFreed, value)); } ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset) ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset) ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset) ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset) ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset) ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset) ACCESSORS(JSTypedArray, length, Object, kLengthOffset) ACCESSORS(JSRegExp, data, Object, kDataOffset) JSRegExp::Type JSRegExp::TypeTag() { Object* data = this->data(); if (data->IsUndefined()) return JSRegExp::NOT_COMPILED; Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex)); return static_cast(smi->value()); } int JSRegExp::CaptureCount() { switch (TypeTag()) { case ATOM: return 0; case IRREGEXP: return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value(); default: UNREACHABLE(); return -1; } } JSRegExp::Flags JSRegExp::GetFlags() { ASSERT(this->data()->IsFixedArray()); Object* data = this->data(); Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex)); return Flags(smi->value()); } String* JSRegExp::Pattern() { ASSERT(this->data()->IsFixedArray()); Object* data = this->data(); String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex)); return pattern; } Object* JSRegExp::DataAt(int index) { ASSERT(TypeTag() != NOT_COMPILED); return FixedArray::cast(data())->get(index); } void JSRegExp::SetDataAt(int index, Object* value) { ASSERT(TypeTag() != NOT_COMPILED); ASSERT(index >= kDataIndex); // Only implementation data can be set this way. FixedArray::cast(data())->set(index, value); } ElementsKind JSObject::GetElementsKind() { ElementsKind kind = map()->elements_kind(); #if DEBUG FixedArrayBase* fixed_array = reinterpret_cast(READ_FIELD(this, kElementsOffset)); // If a GC was caused while constructing this object, the elements // pointer may point to a one pointer filler map. if (ElementsAreSafeToExamine()) { Map* map = fixed_array->map(); ASSERT((IsFastSmiOrObjectElementsKind(kind) && (map == GetHeap()->fixed_array_map() || map == GetHeap()->fixed_cow_array_map())) || (IsFastDoubleElementsKind(kind) && (fixed_array->IsFixedDoubleArray() || fixed_array == GetHeap()->empty_fixed_array())) || (kind == DICTIONARY_ELEMENTS && fixed_array->IsFixedArray() && fixed_array->IsDictionary()) || (kind > DICTIONARY_ELEMENTS)); ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) || (elements()->IsFixedArray() && elements()->length() >= 2)); } #endif return kind; } ElementsAccessor* JSObject::GetElementsAccessor() { return ElementsAccessor::ForKind(GetElementsKind()); } bool JSObject::HasFastObjectElements() { return IsFastObjectElementsKind(GetElementsKind()); } bool JSObject::HasFastSmiElements() { return IsFastSmiElementsKind(GetElementsKind()); } bool JSObject::HasFastSmiOrObjectElements() { return IsFastSmiOrObjectElementsKind(GetElementsKind()); } bool JSObject::HasFastDoubleElements() { return IsFastDoubleElementsKind(GetElementsKind()); } bool JSObject::HasFastHoleyElements() { return IsFastHoleyElementsKind(GetElementsKind()); } bool JSObject::HasFastElements() { return IsFastElementsKind(GetElementsKind()); } bool JSObject::HasDictionaryElements() { return GetElementsKind() == DICTIONARY_ELEMENTS; } bool JSObject::HasNonStrictArgumentsElements() { return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS; } bool JSObject::HasExternalArrayElements() { HeapObject* array = elements(); ASSERT(array != NULL); return array->IsExternalArray(); } #define EXTERNAL_ELEMENTS_CHECK(name, type) \ bool JSObject::HasExternal##name##Elements() { \ HeapObject* array = elements(); \ ASSERT(array != NULL); \ if (!array->IsHeapObject()) \ return false; \ return array->map()->instance_type() == type; \ } EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedShort, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedInt, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Float, EXTERNAL_FLOAT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Double, EXTERNAL_DOUBLE_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE) bool JSObject::HasNamedInterceptor() { return map()->has_named_interceptor(); } bool JSObject::HasIndexedInterceptor() { return map()->has_indexed_interceptor(); } MaybeObject* JSObject::EnsureWritableFastElements() { ASSERT(HasFastSmiOrObjectElements()); FixedArray* elems = FixedArray::cast(elements()); Isolate* isolate = GetIsolate(); if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems; Object* writable_elems; { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap( elems, isolate->heap()->fixed_array_map()); if (!maybe_writable_elems->ToObject(&writable_elems)) { return maybe_writable_elems; } } set_elements(FixedArray::cast(writable_elems)); isolate->counters()->cow_arrays_converted()->Increment(); return writable_elems; } NameDictionary* JSObject::property_dictionary() { ASSERT(!HasFastProperties()); return NameDictionary::cast(properties()); } SeededNumberDictionary* JSObject::element_dictionary() { ASSERT(HasDictionaryElements()); return SeededNumberDictionary::cast(elements()); } bool Name::IsHashFieldComputed(uint32_t field) { return (field & kHashNotComputedMask) == 0; } bool Name::HasHashCode() { return IsHashFieldComputed(hash_field()); } uint32_t Name::Hash() { // Fast case: has hash code already been computed? uint32_t field = hash_field(); if (IsHashFieldComputed(field)) return field >> kHashShift; // Slow case: compute hash code and set it. Has to be a string. return String::cast(this)->ComputeAndSetHash(); } StringHasher::StringHasher(int length, uint32_t seed) : length_(length), raw_running_hash_(seed), array_index_(0), is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize), is_first_char_(true) { ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0); } bool StringHasher::has_trivial_hash() { return length_ > String::kMaxHashCalcLength; } uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) { running_hash += c; running_hash += (running_hash << 10); running_hash ^= (running_hash >> 6); return running_hash; } uint32_t StringHasher::GetHashCore(uint32_t running_hash) { running_hash += (running_hash << 3); running_hash ^= (running_hash >> 11); running_hash += (running_hash << 15); if ((running_hash & String::kHashBitMask) == 0) { return kZeroHash; } return running_hash; } void StringHasher::AddCharacter(uint16_t c) { // Use the Jenkins one-at-a-time hash function to update the hash // for the given character. raw_running_hash_ = AddCharacterCore(raw_running_hash_, c); } bool StringHasher::UpdateIndex(uint16_t c) { ASSERT(is_array_index_); if (c < '0' || c > '9') { is_array_index_ = false; return false; } int d = c - '0'; if (is_first_char_) { is_first_char_ = false; if (c == '0' && length_ > 1) { is_array_index_ = false; return false; } } if (array_index_ > 429496729U - ((d + 2) >> 3)) { is_array_index_ = false; return false; } array_index_ = array_index_ * 10 + d; return true; } template inline void StringHasher::AddCharacters(const Char* chars, int length) { ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2); int i = 0; if (is_array_index_) { for (; i < length; i++) { AddCharacter(chars[i]); if (!UpdateIndex(chars[i])) { i++; break; } } } for (; i < length; i++) { ASSERT(!is_array_index_); AddCharacter(chars[i]); } } template uint32_t StringHasher::HashSequentialString(const schar* chars, int length, uint32_t seed) { StringHasher hasher(length, seed); if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length); return hasher.GetHashField(); } bool Name::AsArrayIndex(uint32_t* index) { return IsString() && String::cast(this)->AsArrayIndex(index); } bool String::AsArrayIndex(uint32_t* index) { uint32_t field = hash_field(); if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) { return false; } return SlowAsArrayIndex(index); } Object* JSReceiver::GetPrototype() { return map()->prototype(); } Object* JSReceiver::GetConstructor() { return map()->constructor(); } bool JSReceiver::HasProperty(Handle object, Handle name) { if (object->IsJSProxy()) { Handle proxy = Handle::cast(object); return JSProxy::HasPropertyWithHandler(proxy, name); } return object->GetPropertyAttribute(*name) != ABSENT; } bool JSReceiver::HasLocalProperty(Handle object, Handle name) { if (object->IsJSProxy()) { Handle proxy = Handle::cast(object); return JSProxy::HasPropertyWithHandler(proxy, name); } return object->GetLocalPropertyAttribute(*name) != ABSENT; } PropertyAttributes JSReceiver::GetPropertyAttribute(Name* key) { uint32_t index; if (IsJSObject() && key->AsArrayIndex(&index)) { return GetElementAttribute(index); } return GetPropertyAttributeWithReceiver(this, key); } PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) { if (IsJSProxy()) { return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index); } return JSObject::cast(this)->GetElementAttributeWithReceiver( this, index, true); } bool JSGlobalObject::IsDetached() { return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this); } bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) { return GetPrototype() != global; } Handle JSReceiver::GetOrCreateIdentityHash(Handle object) { return object->IsJSProxy() ? JSProxy::GetOrCreateIdentityHash(Handle::cast(object)) : JSObject::GetOrCreateIdentityHash(Handle::cast(object)); } Object* JSReceiver::GetIdentityHash() { return IsJSProxy() ? JSProxy::cast(this)->GetIdentityHash() : JSObject::cast(this)->GetIdentityHash(); } bool JSReceiver::HasElement(Handle object, uint32_t index) { if (object->IsJSProxy()) { Handle proxy = Handle::cast(object); return JSProxy::HasElementWithHandler(proxy, index); } return Handle::cast(object)->GetElementAttributeWithReceiver( *object, index, true) != ABSENT; } bool JSReceiver::HasLocalElement(Handle object, uint32_t index) { if (object->IsJSProxy()) { Handle proxy = Handle::cast(object); return JSProxy::HasElementWithHandler(proxy, index); } return Handle::cast(object)->GetElementAttributeWithReceiver( *object, index, false) != ABSENT; } PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) { if (IsJSProxy()) { return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index); } return JSObject::cast(this)->GetElementAttributeWithReceiver( this, index, false); } bool AccessorInfo::all_can_read() { return BooleanBit::get(flag(), kAllCanReadBit); } void AccessorInfo::set_all_can_read(bool value) { set_flag(BooleanBit::set(flag(), kAllCanReadBit, value)); } bool AccessorInfo::all_can_write() { return BooleanBit::get(flag(), kAllCanWriteBit); } void AccessorInfo::set_all_can_write(bool value) { set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value)); } bool AccessorInfo::prohibits_overwriting() { return BooleanBit::get(flag(), kProhibitsOverwritingBit); } void AccessorInfo::set_prohibits_overwriting(bool value) { set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value)); } PropertyAttributes AccessorInfo::property_attributes() { return AttributesField::decode(static_cast(flag()->value())); } void AccessorInfo::set_property_attributes(PropertyAttributes attributes) { set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes))); } bool AccessorInfo::IsCompatibleReceiver(Object* receiver) { Object* function_template = expected_receiver_type(); if (!function_template->IsFunctionTemplateInfo()) return true; return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver); } void AccessorPair::set_access_flags(v8::AccessControl access_control) { int current = access_flags()->value(); current = BooleanBit::set(current, kProhibitsOverwritingBit, access_control & PROHIBITS_OVERWRITING); current = BooleanBit::set(current, kAllCanReadBit, access_control & ALL_CAN_READ); current = BooleanBit::set(current, kAllCanWriteBit, access_control & ALL_CAN_WRITE); set_access_flags(Smi::FromInt(current)); } bool AccessorPair::all_can_read() { return BooleanBit::get(access_flags(), kAllCanReadBit); } bool AccessorPair::all_can_write() { return BooleanBit::get(access_flags(), kAllCanWriteBit); } bool AccessorPair::prohibits_overwriting() { return BooleanBit::get(access_flags(), kProhibitsOverwritingBit); } template void Dictionary::SetEntry(int entry, Object* key, Object* value) { SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0))); } template void Dictionary::SetEntry(int entry, Object* key, Object* value, PropertyDetails details) { ASSERT(!key->IsName() || details.IsDeleted() || details.dictionary_index() > 0); int index = HashTable::EntryToIndex(entry); DisallowHeapAllocation no_gc; WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc); FixedArray::set(index, key, mode); FixedArray::set(index+1, value, mode); FixedArray::set(index+2, details.AsSmi()); } bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) { ASSERT(other->IsNumber()); return key == static_cast(other->Number()); } uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) { return ComputeIntegerHash(key, 0); } uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key, Object* other) { ASSERT(other->IsNumber()); return ComputeIntegerHash(static_cast(other->Number()), 0); } uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) { return ComputeIntegerHash(key, seed); } uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key, uint32_t seed, Object* other) { ASSERT(other->IsNumber()); return ComputeIntegerHash(static_cast(other->Number()), seed); } MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) { return heap->NumberFromUint32(key); } bool NameDictionaryShape::IsMatch(Name* key, Object* other) { // We know that all entries in a hash table had their hash keys created. // Use that knowledge to have fast failure. if (key->Hash() != Name::cast(other)->Hash()) return false; return key->Equals(Name::cast(other)); } uint32_t NameDictionaryShape::Hash(Name* key) { return key->Hash(); } uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) { return Name::cast(other)->Hash(); } MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) { ASSERT(key->IsUniqueName()); return key; } template bool ObjectHashTableShape::IsMatch(Object* key, Object* other) { return key->SameValue(other); } template uint32_t ObjectHashTableShape::Hash(Object* key) { return Smi::cast(key->GetHash())->value(); } template uint32_t ObjectHashTableShape::HashForObject(Object* key, Object* other) { return Smi::cast(other->GetHash())->value(); } template MaybeObject* ObjectHashTableShape::AsObject(Heap* heap, Object* key) { return key; } template bool WeakHashTableShape::IsMatch(Object* key, Object* other) { return key->SameValue(other); } template uint32_t WeakHashTableShape::Hash(Object* key) { intptr_t hash = reinterpret_cast(key); return (uint32_t)(hash & 0xFFFFFFFF); } template uint32_t WeakHashTableShape::HashForObject(Object* key, Object* other) { intptr_t hash = reinterpret_cast(other); return (uint32_t)(hash & 0xFFFFFFFF); } template MaybeObject* WeakHashTableShape::AsObject(Heap* heap, Object* key) { return key; } void Map::ClearCodeCache(Heap* heap) { // No write barrier is needed since empty_fixed_array is not in new space. // Please note this function is used during marking: // - MarkCompactCollector::MarkUnmarkedObject // - IncrementalMarking::Step ASSERT(!heap->InNewSpace(heap->empty_fixed_array())); WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array()); } void JSArray::EnsureSize(int required_size) { ASSERT(HasFastSmiOrObjectElements()); FixedArray* elts = FixedArray::cast(elements()); const int kArraySizeThatFitsComfortablyInNewSpace = 128; if (elts->length() < required_size) { // Doubling in size would be overkill, but leave some slack to avoid // constantly growing. Expand(required_size + (required_size >> 3)); // It's a performance benefit to keep a frequently used array in new-space. } else if (!GetHeap()->new_space()->Contains(elts) && required_size < kArraySizeThatFitsComfortablyInNewSpace) { // Expand will allocate a new backing store in new space even if the size // we asked for isn't larger than what we had before. Expand(required_size); } } void JSArray::set_length(Smi* length) { // Don't need a write barrier for a Smi. set_length(static_cast(length), SKIP_WRITE_BARRIER); } bool JSArray::AllowsSetElementsLength() { bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray(); ASSERT(result == !HasExternalArrayElements()); return result; } MaybeObject* JSArray::SetContent(FixedArrayBase* storage) { MaybeObject* maybe_result = EnsureCanContainElements( storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS); if (maybe_result->IsFailure()) return maybe_result; ASSERT((storage->map() == GetHeap()->fixed_double_array_map() && IsFastDoubleElementsKind(GetElementsKind())) || ((storage->map() != GetHeap()->fixed_double_array_map()) && (IsFastObjectElementsKind(GetElementsKind()) || (IsFastSmiElementsKind(GetElementsKind()) && FixedArray::cast(storage)->ContainsOnlySmisOrHoles())))); set_elements(storage); set_length(Smi::FromInt(storage->length())); return this; } MaybeObject* FixedArray::Copy() { if (length() == 0) return this; return GetHeap()->CopyFixedArray(this); } MaybeObject* FixedDoubleArray::Copy() { if (length() == 0) return this; return GetHeap()->CopyFixedDoubleArray(this); } MaybeObject* ConstantPoolArray::Copy() { if (length() == 0) return this; return GetHeap()->CopyConstantPoolArray(this); } void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) { set(1 + index * 2, Smi::FromInt(id.ToInt())); } TypeFeedbackId TypeFeedbackCells::AstId(int index) { return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value()); } void TypeFeedbackCells::SetCell(int index, Cell* cell) { set(index * 2, cell); } Cell* TypeFeedbackCells::GetCell(int index) { return Cell::cast(get(index * 2)); } Handle TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) { return isolate->factory()->the_hole_value(); } Handle
(code); return result; } Object* Code::GetObjectFromEntryAddress(Address location_of_address) { return HeapObject:: FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); } Object* Map::prototype() { return READ_FIELD(this, kPrototypeOffset); } void Map::set_prototype(Object* value, WriteBarrierMode mode) { ASSERT(value->IsNull() || value->IsJSReceiver()); WRITE_FIELD(this, kPrototypeOffset, value); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode); } // If the descriptor is using the empty transition array, install a new empty // transition array that will have place for an element transition. static MaybeObject* EnsureHasTransitionArray(Map* map) { TransitionArray* transitions; MaybeObject* maybe_transitions; if (!map->HasTransitionArray()) { maybe_transitions = TransitionArray::Allocate(map->GetIsolate(), 0); if (!maybe_transitions->To(&transitions)) return maybe_transitions; transitions->set_back_pointer_storage(map->GetBackPointer()); } else if (!map->transitions()->IsFullTransitionArray()) { maybe_transitions = map->transitions()->ExtendToFullTransitionArray(); if (!maybe_transitions->To(&transitions)) return maybe_transitions; } else { return map; } map->set_transitions(transitions); return transitions; } void Map::InitializeDescriptors(DescriptorArray* descriptors) { int len = descriptors->number_of_descriptors(); set_instance_descriptors(descriptors); SetNumberOfOwnDescriptors(len); } ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) void Map::set_bit_field3(uint32_t bits) { // Ensure the upper 2 bits have the same value by sign extending it. This is // necessary to be able to use the 31st bit. int value = bits << 1; WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1)); } uint32_t Map::bit_field3() { Object* value = READ_FIELD(this, kBitField3Offset); return Smi::cast(value)->value(); } void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) { Object* back_pointer = GetBackPointer(); if (Heap::ShouldZapGarbage() && HasTransitionArray()) { ZapTransitions(); } WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer); CONDITIONAL_WRITE_BARRIER( heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode); } void Map::AppendDescriptor(Descriptor* desc, const DescriptorArray::WhitenessWitness& witness) { DescriptorArray* descriptors = instance_descriptors(); int number_of_own_descriptors = NumberOfOwnDescriptors(); ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors); descriptors->Append(desc, witness); SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); } Object* Map::GetBackPointer() { Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); if (object->IsDescriptorArray()) { return TransitionArray::cast(object)->back_pointer_storage(); } else { ASSERT(object->IsMap() || object->IsUndefined()); return object; } } bool Map::HasElementsTransition() { return HasTransitionArray() && transitions()->HasElementsTransition(); } bool Map::HasTransitionArray() { Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); return object->IsTransitionArray(); } Map* Map::elements_transition_map() { int index = transitions()->Search(GetHeap()->elements_transition_symbol()); return transitions()->GetTarget(index); } bool Map::CanHaveMoreTransitions() { if (!HasTransitionArray()) return true; return FixedArray::SizeFor(transitions()->length() + TransitionArray::kTransitionSize) <= Page::kMaxNonCodeHeapObjectSize; } MaybeObject* Map::AddTransition(Name* key, Map* target, SimpleTransitionFlag flag) { if (HasTransitionArray()) return transitions()->CopyInsert(key, target); return TransitionArray::NewWith(flag, key, target, GetBackPointer()); } void Map::SetTransition(int transition_index, Map* target) { transitions()->SetTarget(transition_index, target); } Map* Map::GetTransition(int transition_index) { return transitions()->GetTarget(transition_index); } MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) { TransitionArray* transitions; MaybeObject* maybe_transitions = AddTransition( GetHeap()->elements_transition_symbol(), transitioned_map, FULL_TRANSITION); if (!maybe_transitions->To(&transitions)) return maybe_transitions; set_transitions(transitions); return transitions; } FixedArray* Map::GetPrototypeTransitions() { if (!HasTransitionArray()) return GetHeap()->empty_fixed_array(); if (!transitions()->HasPrototypeTransitions()) { return GetHeap()->empty_fixed_array(); } return transitions()->GetPrototypeTransitions(); } MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) { MaybeObject* allow_prototype = EnsureHasTransitionArray(this); if (allow_prototype->IsFailure()) return allow_prototype; int old_number_of_transitions = NumberOfProtoTransitions(); #ifdef DEBUG if (HasPrototypeTransitions()) { ASSERT(GetPrototypeTransitions() != proto_transitions); ZapPrototypeTransitions(); } #endif transitions()->SetPrototypeTransitions(proto_transitions); SetNumberOfProtoTransitions(old_number_of_transitions); return this; } bool Map::HasPrototypeTransitions() { return HasTransitionArray() && transitions()->HasPrototypeTransitions(); } TransitionArray* Map::transitions() { ASSERT(HasTransitionArray()); Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); return TransitionArray::cast(object); } void Map::set_transitions(TransitionArray* transition_array, WriteBarrierMode mode) { // Transition arrays are not shared. When one is replaced, it should not // keep referenced objects alive, so we zap it. // When there is another reference to the array somewhere (e.g. a handle), // not zapping turns from a waste of memory into a source of crashes. if (HasTransitionArray()) { #ifdef DEBUG for (int i = 0; i < transitions()->number_of_transitions(); i++) { Map* target = transitions()->GetTarget(i); if (target->instance_descriptors() == instance_descriptors()) { Name* key = transitions()->GetKey(i); int new_target_index = transition_array->Search(key); ASSERT(new_target_index != TransitionArray::kNotFound); ASSERT(transition_array->GetTarget(new_target_index) == target); } } #endif ASSERT(transitions() != transition_array); ZapTransitions(); } WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array); CONDITIONAL_WRITE_BARRIER( GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode); } void Map::init_back_pointer(Object* undefined) { ASSERT(undefined->IsUndefined()); WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined); } void Map::SetBackPointer(Object* value, WriteBarrierMode mode) { ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE); ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) || (value->IsMap() && GetBackPointer()->IsUndefined())); Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); if (object->IsTransitionArray()) { TransitionArray::cast(object)->set_back_pointer_storage(value); } else { WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value); CONDITIONAL_WRITE_BARRIER( GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode); } } // Can either be Smi (no transitions), normal transition array, or a transition // array with the header overwritten as a Smi (thus iterating). TransitionArray* Map::unchecked_transition_array() { Object* object = *HeapObject::RawField(this, Map::kTransitionsOrBackPointerOffset); TransitionArray* transition_array = static_cast(object); return transition_array; } HeapObject* Map::UncheckedPrototypeTransitions() { ASSERT(HasTransitionArray()); ASSERT(unchecked_transition_array()->HasPrototypeTransitions()); return unchecked_transition_array()->UncheckedPrototypeTransitions(); } ACCESSORS(Map, code_cache, Object, kCodeCacheOffset) ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset) ACCESSORS(Map, constructor, Object, kConstructorOffset) ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset) ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset) ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset) ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset) ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset) ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset) ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset) ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset) ACCESSORS(AccessorInfo, name, Object, kNameOffset) ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset) ACCESSORS(AccessorInfo, expected_receiver_type, Object, kExpectedReceiverTypeOffset) ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray, kSerializedDataOffset) ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor, kDescriptorOffset) ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset) ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset) ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset) ACCESSORS(Box, value, Object, kValueOffset) ACCESSORS(AccessorPair, getter, Object, kGetterOffset) ACCESSORS(AccessorPair, setter, Object, kSetterOffset) ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset) ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset) ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset) ACCESSORS(AccessCheckInfo, data, Object, kDataOffset) ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset) ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset) ACCESSORS(InterceptorInfo, query, Object, kQueryOffset) ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset) ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset) ACCESSORS(InterceptorInfo, data, Object, kDataOffset) ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset) ACCESSORS(CallHandlerInfo, data, Object, kDataOffset) ACCESSORS(TemplateInfo, tag, Object, kTagOffset) ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset) ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset) ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset) ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset) ACCESSORS(FunctionTemplateInfo, prototype_template, Object, kPrototypeTemplateOffset) ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset) ACCESSORS(FunctionTemplateInfo, named_property_handler, Object, kNamedPropertyHandlerOffset) ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object, kIndexedPropertyHandlerOffset) ACCESSORS(FunctionTemplateInfo, instance_template, Object, kInstanceTemplateOffset) ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset) ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset) ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object, kInstanceCallHandlerOffset) ACCESSORS(FunctionTemplateInfo, access_check_info, Object, kAccessCheckInfoOffset) ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset) ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset) ACCESSORS(ObjectTemplateInfo, internal_field_count, Object, kInternalFieldCountOffset) ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset) ACCESSORS(SignatureInfo, args, Object, kArgsOffset) ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset) ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset) ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset) ACCESSORS_TO_SMI(AllocationSite, memento_found_count, kMementoFoundCountOffset) ACCESSORS_TO_SMI(AllocationSite, memento_create_count, kMementoCreateCountOffset) ACCESSORS_TO_SMI(AllocationSite, pretenure_decision, kPretenureDecisionOffset) ACCESSORS(AllocationSite, dependent_code, DependentCode, kDependentCodeOffset) ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset) ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset) ACCESSORS(Script, source, Object, kSourceOffset) ACCESSORS(Script, name, Object, kNameOffset) ACCESSORS(Script, id, Smi, kIdOffset) ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset) ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset) ACCESSORS(Script, data, Object, kDataOffset) ACCESSORS(Script, context_data, Object, kContextOffset) ACCESSORS(Script, wrapper, Foreign, kWrapperOffset) ACCESSORS_TO_SMI(Script, type, kTypeOffset) ACCESSORS(Script, line_ends, Object, kLineEndsOffset) ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset) ACCESSORS_TO_SMI(Script, eval_from_instructions_offset, kEvalFrominstructionsOffsetOffset) ACCESSORS_TO_SMI(Script, flags, kFlagsOffset) BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit) Script::CompilationType Script::compilation_type() { return BooleanBit::get(flags(), kCompilationTypeBit) ? COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST; } void Script::set_compilation_type(CompilationType type) { set_flags(BooleanBit::set(flags(), kCompilationTypeBit, type == COMPILATION_TYPE_EVAL)); } Script::CompilationState Script::compilation_state() { return BooleanBit::get(flags(), kCompilationStateBit) ? COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL; } void Script::set_compilation_state(CompilationState state) { set_flags(BooleanBit::set(flags(), kCompilationStateBit, state == COMPILATION_STATE_COMPILED)); } #ifdef ENABLE_DEBUGGER_SUPPORT ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex) ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex) ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex) ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex) ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex) ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex) ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex) ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex) #endif ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset) ACCESSORS(SharedFunctionInfo, optimized_code_map, Object, kOptimizedCodeMapOffset) ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset) ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset) ACCESSORS(SharedFunctionInfo, instance_class_name, Object, kInstanceClassNameOffset) ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset) ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset) ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset) ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset) SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset) SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset) BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, kHiddenPrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check, kNeedsAccessCheckBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype, kReadOnlyPrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype, kRemovePrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache, kDoNotCacheBit) BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression, kIsExpressionBit) BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel, kIsTopLevelBit) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation, kAllowLazyCompilation) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation_without_context, kAllowLazyCompilationWithoutContext) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_arguments, kUsesArguments) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, has_duplicate_parameters, kHasDuplicateParameters) #if V8_HOST_ARCH_32_BIT SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset) SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count, kFormalParameterCountOffset) SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset) SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type, kStartPositionAndTypeOffset) SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset) SMI_ACCESSORS(SharedFunctionInfo, function_token_position, kFunctionTokenPositionOffset) SMI_ACCESSORS(SharedFunctionInfo, compiler_hints, kCompilerHintsOffset) SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason, kOptCountAndBailoutReasonOffset) SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset) #else #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \ STATIC_ASSERT(holder::offset % kPointerSize == 0); \ int holder::name() { \ int value = READ_INT_FIELD(this, offset); \ ASSERT(kHeapObjectTag == 1); \ ASSERT((value & kHeapObjectTag) == 0); \ return value >> 1; \ } \ void holder::set_##name(int value) { \ ASSERT(kHeapObjectTag == 1); \ ASSERT((value & 0xC0000000) == 0xC0000000 || \ (value & 0xC0000000) == 0x000000000); \ WRITE_INT_FIELD(this, \ offset, \ (value << 1) & ~kHeapObjectTag); \ } #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \ STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \ INT_ACCESSORS(holder, name, offset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, formal_parameter_count, kFormalParameterCountOffset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, expected_nof_properties, kExpectedNofPropertiesOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, start_position_and_type, kStartPositionAndTypeOffset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, function_token_position, kFunctionTokenPositionOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, compiler_hints, kCompilerHintsOffset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, opt_count_and_bailout_reason, kOptCountAndBailoutReasonOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset) #endif int SharedFunctionInfo::construction_count() { return READ_BYTE_FIELD(this, kConstructionCountOffset); } void SharedFunctionInfo::set_construction_count(int value) { ASSERT(0 <= value && value < 256); WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast(value)); } BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, live_objects_may_exist, kLiveObjectsMayExist) bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() { return initial_map() != GetHeap()->undefined_value(); } BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled, kOptimizationDisabled) void SharedFunctionInfo::set_optimization_disabled(bool disable) { set_compiler_hints(BooleanBit::set(compiler_hints(), kOptimizationDisabled, disable)); // If disabling optimizations we reflect that in the code object so // it will not be counted as optimizable code. if ((code()->kind() == Code::FUNCTION) && disable) { code()->set_optimizable(false); } } int SharedFunctionInfo::profiler_ticks() { if (code()->kind() != Code::FUNCTION) return 0; return code()->profiler_ticks(); } LanguageMode SharedFunctionInfo::language_mode() { int hints = compiler_hints(); if (BooleanBit::get(hints, kExtendedModeFunction)) { ASSERT(BooleanBit::get(hints, kStrictModeFunction)); return EXTENDED_MODE; } return BooleanBit::get(hints, kStrictModeFunction) ? STRICT_MODE : CLASSIC_MODE; } void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) { // We only allow language mode transitions that go set the same language mode // again or go up in the chain: // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE. ASSERT(this->language_mode() == CLASSIC_MODE || this->language_mode() == language_mode || language_mode == EXTENDED_MODE); int hints = compiler_hints(); hints = BooleanBit::set( hints, kStrictModeFunction, language_mode != CLASSIC_MODE); hints = BooleanBit::set( hints, kExtendedModeFunction, language_mode == EXTENDED_MODE); set_compiler_hints(hints); } bool SharedFunctionInfo::is_classic_mode() { return !BooleanBit::get(compiler_hints(), kStrictModeFunction); } BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode, kExtendedModeFunction) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin, kInlineBuiltin) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, name_should_print_as_anonymous, kNameShouldPrintAsAnonymous) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize, kDontOptimize) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator) void SharedFunctionInfo::BeforeVisitingPointers() { if (IsInobjectSlackTrackingInProgress()) DetachInitialMap(); } ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset) ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset) ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset) bool Script::HasValidSource() { Object* src = this->source(); if (!src->IsString()) return true; String* src_str = String::cast(src); if (!StringShape(src_str).IsExternal()) return true; if (src_str->IsOneByteRepresentation()) { return ExternalAsciiString::cast(src)->resource() != NULL; } else if (src_str->IsTwoByteRepresentation()) { return ExternalTwoByteString::cast(src)->resource() != NULL; } return true; } void SharedFunctionInfo::DontAdaptArguments() { ASSERT(code()->kind() == Code::BUILTIN); set_formal_parameter_count(kDontAdaptArgumentsSentinel); } int SharedFunctionInfo::start_position() { return start_position_and_type() >> kStartPositionShift; } void SharedFunctionInfo::set_start_position(int start_position) { set_start_position_and_type((start_position << kStartPositionShift) | (start_position_and_type() & ~kStartPositionMask)); } Code* SharedFunctionInfo::code() { return Code::cast(READ_FIELD(this, kCodeOffset)); } void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) { ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION); WRITE_FIELD(this, kCodeOffset, value); CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode); } void SharedFunctionInfo::ReplaceCode(Code* value) { // If the GC metadata field is already used then the function was // enqueued as a code flushing candidate and we remove it now. if (code()->gc_metadata() != NULL) { CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); flusher->EvictCandidate(this); } ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL); set_code(value); } ScopeInfo* SharedFunctionInfo::scope_info() { return reinterpret_cast(READ_FIELD(this, kScopeInfoOffset)); } void SharedFunctionInfo::set_scope_info(ScopeInfo* value, WriteBarrierMode mode) { WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast(value)); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, reinterpret_cast(value), mode); } bool SharedFunctionInfo::is_compiled() { return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); } bool SharedFunctionInfo::IsApiFunction() { return function_data()->IsFunctionTemplateInfo(); } FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() { ASSERT(IsApiFunction()); return FunctionTemplateInfo::cast(function_data()); } bool SharedFunctionInfo::HasBuiltinFunctionId() { return function_data()->IsSmi(); } BuiltinFunctionId SharedFunctionInfo::builtin_function_id() { ASSERT(HasBuiltinFunctionId()); return static_cast(Smi::cast(function_data())->value()); } int SharedFunctionInfo::ic_age() { return ICAgeBits::decode(counters()); } void SharedFunctionInfo::set_ic_age(int ic_age) { set_counters(ICAgeBits::update(counters(), ic_age)); } int SharedFunctionInfo::deopt_count() { return DeoptCountBits::decode(counters()); } void SharedFunctionInfo::set_deopt_count(int deopt_count) { set_counters(DeoptCountBits::update(counters(), deopt_count)); } void SharedFunctionInfo::increment_deopt_count() { int value = counters(); int deopt_count = DeoptCountBits::decode(value); deopt_count = (deopt_count + 1) & DeoptCountBits::kMax; set_counters(DeoptCountBits::update(value, deopt_count)); } int SharedFunctionInfo::opt_reenable_tries() { return OptReenableTriesBits::decode(counters()); } void SharedFunctionInfo::set_opt_reenable_tries(int tries) { set_counters(OptReenableTriesBits::update(counters(), tries)); } int SharedFunctionInfo::opt_count() { return OptCountBits::decode(opt_count_and_bailout_reason()); } void SharedFunctionInfo::set_opt_count(int opt_count) { set_opt_count_and_bailout_reason( OptCountBits::update(opt_count_and_bailout_reason(), opt_count)); } BailoutReason SharedFunctionInfo::DisableOptimizationReason() { BailoutReason reason = static_cast( DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason())); return reason; } bool SharedFunctionInfo::has_deoptimization_support() { Code* code = this->code(); return code->kind() == Code::FUNCTION && code->has_deoptimization_support(); } void SharedFunctionInfo::TryReenableOptimization() { int tries = opt_reenable_tries(); set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax); // We reenable optimization whenever the number of tries is a large // enough power of 2. if (tries >= 16 && (((tries - 1) & tries) == 0)) { set_optimization_disabled(false); set_opt_count(0); set_deopt_count(0); code()->set_optimizable(true); } } bool JSFunction::IsBuiltin() { return context()->global_object()->IsJSBuiltinsObject(); } bool JSFunction::NeedsArgumentsAdaption() { return shared()->formal_parameter_count() != SharedFunctionInfo::kDontAdaptArgumentsSentinel; } bool JSFunction::IsOptimized() { return code()->kind() == Code::OPTIMIZED_FUNCTION; } bool JSFunction::IsOptimizable() { return code()->kind() == Code::FUNCTION && code()->optimizable(); } bool JSFunction::IsMarkedForLazyRecompilation() { return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile); } bool JSFunction::IsMarkedForConcurrentRecompilation() { return code() == GetIsolate()->builtins()->builtin( Builtins::kConcurrentRecompile); } bool JSFunction::IsInRecompileQueue() { return code() == GetIsolate()->builtins()->builtin( Builtins::kInRecompileQueue); } Code* JSFunction::code() { return Code::cast( Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); } void JSFunction::set_code(Code* value) { ASSERT(!GetHeap()->InNewSpace(value)); Address entry = value->entry(); WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast(entry)); GetHeap()->incremental_marking()->RecordWriteOfCodeEntry( this, HeapObject::RawField(this, kCodeEntryOffset), value); } void JSFunction::set_code_no_write_barrier(Code* value) { ASSERT(!GetHeap()->InNewSpace(value)); Address entry = value->entry(); WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast(entry)); } void JSFunction::ReplaceCode(Code* code) { bool was_optimized = IsOptimized(); bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION; if (was_optimized && is_optimized) { shared()->EvictFromOptimizedCodeMap( this->code(), "Replacing with another optimized code"); } set_code(code); // Add/remove the function from the list of optimized functions for this // context based on the state change. if (!was_optimized && is_optimized) { context()->native_context()->AddOptimizedFunction(this); } if (was_optimized && !is_optimized) { // TODO(titzer): linear in the number of optimized functions; fix! context()->native_context()->RemoveOptimizedFunction(this); } } Context* JSFunction::context() { return Context::cast(READ_FIELD(this, kContextOffset)); } void JSFunction::set_context(Object* value) { ASSERT(value->IsUndefined() || value->IsContext()); WRITE_FIELD(this, kContextOffset, value); WRITE_BARRIER(GetHeap(), this, kContextOffset, value); } ACCESSORS(JSFunction, prototype_or_initial_map, Object, kPrototypeOrInitialMapOffset) Map* JSFunction::initial_map() { return Map::cast(prototype_or_initial_map()); } void JSFunction::set_initial_map(Map* value) { set_prototype_or_initial_map(value); } bool JSFunction::has_initial_map() { return prototype_or_initial_map()->IsMap(); } bool JSFunction::has_instance_prototype() { return has_initial_map() || !prototype_or_initial_map()->IsTheHole(); } bool JSFunction::has_prototype() { return map()->has_non_instance_prototype() || has_instance_prototype(); } Object* JSFunction::instance_prototype() { ASSERT(has_instance_prototype()); if (has_initial_map()) return initial_map()->prototype(); // When there is no initial map and the prototype is a JSObject, the // initial map field is used for the prototype field. return prototype_or_initial_map(); } Object* JSFunction::prototype() { ASSERT(has_prototype()); // If the function's prototype property has been set to a non-JSObject // value, that value is stored in the constructor field of the map. if (map()->has_non_instance_prototype()) return map()->constructor(); return instance_prototype(); } bool JSFunction::should_have_prototype() { return map()->function_with_prototype(); } bool JSFunction::is_compiled() { return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); } FixedArray* JSFunction::literals() { ASSERT(!shared()->bound()); return literals_or_bindings(); } void JSFunction::set_literals(FixedArray* literals) { ASSERT(!shared()->bound()); set_literals_or_bindings(literals); } FixedArray* JSFunction::function_bindings() { ASSERT(shared()->bound()); return literals_or_bindings(); } void JSFunction::set_function_bindings(FixedArray* bindings) { ASSERT(shared()->bound()); // Bound function literal may be initialized to the empty fixed array // before the bindings are set. ASSERT(bindings == GetHeap()->empty_fixed_array() || bindings->map() == GetHeap()->fixed_cow_array_map()); set_literals_or_bindings(bindings); } int JSFunction::NumberOfLiterals() { ASSERT(!shared()->bound()); return literals()->length(); } Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) { ASSERT(id < kJSBuiltinsCount); // id is unsigned. return READ_FIELD(this, OffsetOfFunctionWithId(id)); } void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id, Object* value) { ASSERT(id < kJSBuiltinsCount); // id is unsigned. WRITE_FIELD(this, OffsetOfFunctionWithId(id), value); WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value); } Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) { ASSERT(id < kJSBuiltinsCount); // id is unsigned. return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id))); } void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id, Code* value) { ASSERT(id < kJSBuiltinsCount); // id is unsigned. WRITE_FIELD(this, OffsetOfCodeWithId(id), value); ASSERT(!GetHeap()->InNewSpace(value)); } ACCESSORS(JSProxy, handler, Object, kHandlerOffset) ACCESSORS(JSProxy, hash, Object, kHashOffset) ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset) ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset) void JSProxy::InitializeBody(int object_size, Object* value) { ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value)); for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { WRITE_FIELD(this, offset, value); } } ACCESSORS(JSSet, table, Object, kTableOffset) ACCESSORS(JSMap, table, Object, kTableOffset) ACCESSORS(JSWeakCollection, table, Object, kTableOffset) ACCESSORS(JSWeakCollection, next, Object, kNextOffset) Address Foreign::foreign_address() { return AddressFrom(READ_INTPTR_FIELD(this, kForeignAddressOffset)); } void Foreign::set_foreign_address(Address value) { WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value)); } ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset) ACCESSORS(JSGeneratorObject, context, Context, kContextOffset) ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset) SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset) ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset) SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset) JSGeneratorObject* JSGeneratorObject::cast(Object* obj) { ASSERT(obj->IsJSGeneratorObject()); ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize); return reinterpret_cast(obj); } ACCESSORS(JSModule, context, Object, kContextOffset) ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset) JSModule* JSModule::cast(Object* obj) { ASSERT(obj->IsJSModule()); ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize); return reinterpret_cast(obj); } ACCESSORS(JSValue, value, Object, kValueOffset) JSValue* JSValue::cast(Object* obj) { ASSERT(obj->IsJSValue()); ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize); return reinterpret_cast(obj); } ACCESSORS(JSDate, value, Object, kValueOffset) ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset) ACCESSORS(JSDate, year, Object, kYearOffset) ACCESSORS(JSDate, month, Object, kMonthOffset) ACCESSORS(JSDate, day, Object, kDayOffset) ACCESSORS(JSDate, weekday, Object, kWeekdayOffset) ACCESSORS(JSDate, hour, Object, kHourOffset) ACCESSORS(JSDate, min, Object, kMinOffset) ACCESSORS(JSDate, sec, Object, kSecOffset) JSDate* JSDate::cast(Object* obj) { ASSERT(obj->IsJSDate()); ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize); return reinterpret_cast(obj); } ACCESSORS(JSMessageObject, type, String, kTypeOffset) ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset) ACCESSORS(JSMessageObject, script, Object, kScriptOffset) ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset) ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset) SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset) SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset) JSMessageObject* JSMessageObject::cast(Object* obj) { ASSERT(obj->IsJSMessageObject()); ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize); return reinterpret_cast(obj); } INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset) INT_ACCESSORS(Code, prologue_offset, kPrologueOffset) ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset) ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset) ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset) ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset) void Code::WipeOutHeader() { WRITE_FIELD(this, kRelocationInfoOffset, NULL); WRITE_FIELD(this, kHandlerTableOffset, NULL); WRITE_FIELD(this, kDeoptimizationDataOffset, NULL); // Do not wipe out e.g. a minor key. if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) { WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL); } } Object* Code::type_feedback_info() { ASSERT(kind() == FUNCTION); return raw_type_feedback_info(); } void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) { ASSERT(kind() == FUNCTION); set_raw_type_feedback_info(value, mode); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset, value, mode); } Object* Code::next_code_link() { CHECK(kind() == OPTIMIZED_FUNCTION); return raw_type_feedback_info(); } void Code::set_next_code_link(Object* value, WriteBarrierMode mode) { CHECK(kind() == OPTIMIZED_FUNCTION); set_raw_type_feedback_info(value); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset, value, mode); } int Code::stub_info() { ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC || kind() == BINARY_OP_IC || kind() == LOAD_IC); return Smi::cast(raw_type_feedback_info())->value(); } void Code::set_stub_info(int value) { ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC || kind() == BINARY_OP_IC || kind() == STUB || kind() == LOAD_IC || kind() == KEYED_LOAD_IC || kind() == STORE_IC || kind() == KEYED_STORE_IC); set_raw_type_feedback_info(Smi::FromInt(value)); } ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset) INT_ACCESSORS(Code, ic_age, kICAgeOffset) byte* Code::instruction_start() { return FIELD_ADDR(this, kHeaderSize); } byte* Code::instruction_end() { return instruction_start() + instruction_size(); } int Code::body_size() { return RoundUp(instruction_size(), kObjectAlignment); } ByteArray* Code::unchecked_relocation_info() { return reinterpret_cast(READ_FIELD(this, kRelocationInfoOffset)); } byte* Code::relocation_start() { return unchecked_relocation_info()->GetDataStartAddress(); } int Code::relocation_size() { return unchecked_relocation_info()->length(); } byte* Code::entry() { return instruction_start(); } bool Code::contains(byte* inner_pointer) { return (address() <= inner_pointer) && (inner_pointer <= address() + Size()); } ACCESSORS(JSArray, length, Object, kLengthOffset) void* JSArrayBuffer::backing_store() { intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset); return reinterpret_cast(ptr); } void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) { intptr_t ptr = reinterpret_cast(value); WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr); } ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset) ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset) bool JSArrayBuffer::is_external() { return BooleanBit::get(flag(), kIsExternalBit); } void JSArrayBuffer::set_is_external(bool value) { set_flag(BooleanBit::set(flag(), kIsExternalBit, value)); } bool JSArrayBuffer::should_be_freed() { return BooleanBit::get(flag(), kShouldBeFreed); } void JSArrayBuffer::set_should_be_freed(bool value) { set_flag(BooleanBit::set(flag(), kShouldBeFreed, value)); } ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset) ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset) ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset) ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset) ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset) ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset) ACCESSORS(JSTypedArray, length, Object, kLengthOffset) ACCESSORS(JSRegExp, data, Object, kDataOffset) JSRegExp::Type JSRegExp::TypeTag() { Object* data = this->data(); if (data->IsUndefined()) return JSRegExp::NOT_COMPILED; Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex)); return static_cast(smi->value()); } int JSRegExp::CaptureCount() { switch (TypeTag()) { case ATOM: return 0; case IRREGEXP: return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value(); default: UNREACHABLE(); return -1; } } JSRegExp::Flags JSRegExp::GetFlags() { ASSERT(this->data()->IsFixedArray()); Object* data = this->data(); Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex)); return Flags(smi->value()); } String* JSRegExp::Pattern() { ASSERT(this->data()->IsFixedArray()); Object* data = this->data(); String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex)); return pattern; } Object* JSRegExp::DataAt(int index) { ASSERT(TypeTag() != NOT_COMPILED); return FixedArray::cast(data())->get(index); } void JSRegExp::SetDataAt(int index, Object* value) { ASSERT(TypeTag() != NOT_COMPILED); ASSERT(index >= kDataIndex); // Only implementation data can be set this way. FixedArray::cast(data())->set(index, value); } ElementsKind JSObject::GetElementsKind() { ElementsKind kind = map()->elements_kind(); #if DEBUG FixedArrayBase* fixed_array = reinterpret_cast(READ_FIELD(this, kElementsOffset)); // If a GC was caused while constructing this object, the elements // pointer may point to a one pointer filler map. if (ElementsAreSafeToExamine()) { Map* map = fixed_array->map(); ASSERT((IsFastSmiOrObjectElementsKind(kind) && (map == GetHeap()->fixed_array_map() || map == GetHeap()->fixed_cow_array_map())) || (IsFastDoubleElementsKind(kind) && (fixed_array->IsFixedDoubleArray() || fixed_array == GetHeap()->empty_fixed_array())) || (kind == DICTIONARY_ELEMENTS && fixed_array->IsFixedArray() && fixed_array->IsDictionary()) || (kind > DICTIONARY_ELEMENTS)); ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) || (elements()->IsFixedArray() && elements()->length() >= 2)); } #endif return kind; } ElementsAccessor* JSObject::GetElementsAccessor() { return ElementsAccessor::ForKind(GetElementsKind()); } bool JSObject::HasFastObjectElements() { return IsFastObjectElementsKind(GetElementsKind()); } bool JSObject::HasFastSmiElements() { return IsFastSmiElementsKind(GetElementsKind()); } bool JSObject::HasFastSmiOrObjectElements() { return IsFastSmiOrObjectElementsKind(GetElementsKind()); } bool JSObject::HasFastDoubleElements() { return IsFastDoubleElementsKind(GetElementsKind()); } bool JSObject::HasFastHoleyElements() { return IsFastHoleyElementsKind(GetElementsKind()); } bool JSObject::HasFastElements() { return IsFastElementsKind(GetElementsKind()); } bool JSObject::HasDictionaryElements() { return GetElementsKind() == DICTIONARY_ELEMENTS; } bool JSObject::HasNonStrictArgumentsElements() { return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS; } bool JSObject::HasExternalArrayElements() { HeapObject* array = elements(); ASSERT(array != NULL); return array->IsExternalArray(); } #define EXTERNAL_ELEMENTS_CHECK(name, type) \ bool JSObject::HasExternal##name##Elements() { \ HeapObject* array = elements(); \ ASSERT(array != NULL); \ if (!array->IsHeapObject()) \ return false; \ return array->map()->instance_type() == type; \ } EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedShort, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(UnsignedInt, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Float, EXTERNAL_FLOAT_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Double, EXTERNAL_DOUBLE_ARRAY_TYPE) EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE) bool JSObject::HasNamedInterceptor() { return map()->has_named_interceptor(); } bool JSObject::HasIndexedInterceptor() { return map()->has_indexed_interceptor(); } MaybeObject* JSObject::EnsureWritableFastElements() { ASSERT(HasFastSmiOrObjectElements()); FixedArray* elems = FixedArray::cast(elements()); Isolate* isolate = GetIsolate(); if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems; Object* writable_elems; { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap( elems, isolate->heap()->fixed_array_map()); if (!maybe_writable_elems->ToObject(&writable_elems)) { return maybe_writable_elems; } } set_elements(FixedArray::cast(writable_elems)); isolate->counters()->cow_arrays_converted()->Increment(); return writable_elems; } NameDictionary* JSObject::property_dictionary() { ASSERT(!HasFastProperties()); return NameDictionary::cast(properties()); } SeededNumberDictionary* JSObject::element_dictionary() { ASSERT(HasDictionaryElements()); return SeededNumberDictionary::cast(elements()); } bool Name::IsHashFieldComputed(uint32_t field) { return (field & kHashNotComputedMask) == 0; } bool Name::HasHashCode() { return IsHashFieldComputed(hash_field()); } uint32_t Name::Hash() { // Fast case: has hash code already been computed? uint32_t field = hash_field(); if (IsHashFieldComputed(field)) return field >> kHashShift; // Slow case: compute hash code and set it. Has to be a string. return String::cast(this)->ComputeAndSetHash(); } StringHasher::StringHasher(int length, uint32_t seed) : length_(length), raw_running_hash_(seed), array_index_(0), is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize), is_first_char_(true) { ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0); } bool StringHasher::has_trivial_hash() { return length_ > String::kMaxHashCalcLength; } uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) { running_hash += c; running_hash += (running_hash << 10); running_hash ^= (running_hash >> 6); return running_hash; } uint32_t StringHasher::GetHashCore(uint32_t running_hash) { running_hash += (running_hash << 3); running_hash ^= (running_hash >> 11); running_hash += (running_hash << 15); if ((running_hash & String::kHashBitMask) == 0) { return kZeroHash; } return running_hash; } void StringHasher::AddCharacter(uint16_t c) { // Use the Jenkins one-at-a-time hash function to update the hash // for the given character. raw_running_hash_ = AddCharacterCore(raw_running_hash_, c); } bool StringHasher::UpdateIndex(uint16_t c) { ASSERT(is_array_index_); if (c < '0' || c > '9') { is_array_index_ = false; return false; } int d = c - '0'; if (is_first_char_) { is_first_char_ = false; if (c == '0' && length_ > 1) { is_array_index_ = false; return false; } } if (array_index_ > 429496729U - ((d + 2) >> 3)) { is_array_index_ = false; return false; } array_index_ = array_index_ * 10 + d; return true; } template inline void StringHasher::AddCharacters(const Char* chars, int length) { ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2); int i = 0; if (is_array_index_) { for (; i < length; i++) { AddCharacter(chars[i]); if (!UpdateIndex(chars[i])) { i++; break; } } } for (; i < length; i++) { ASSERT(!is_array_index_); AddCharacter(chars[i]); } } template uint32_t StringHasher::HashSequentialString(const schar* chars, int length, uint32_t seed) { StringHasher hasher(length, seed); if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length); return hasher.GetHashField(); } bool Name::AsArrayIndex(uint32_t* index) { return IsString() && String::cast(this)->AsArrayIndex(index); } bool String::AsArrayIndex(uint32_t* index) { uint32_t field = hash_field(); if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) { return false; } return SlowAsArrayIndex(index); } Object* JSReceiver::GetPrototype() { return map()->prototype(); } Object* JSReceiver::GetConstructor() { return map()->constructor(); } bool JSReceiver::HasProperty(Handle object, Handle name) { if (object->IsJSProxy()) { Handle proxy = Handle::cast(object); return JSProxy::HasPropertyWithHandler(proxy, name); } return object->GetPropertyAttribute(*name) != ABSENT; } bool JSReceiver::HasLocalProperty(Handle object, Handle name) { if (object->IsJSProxy()) { Handle proxy = Handle::cast(object); return JSProxy::HasPropertyWithHandler(proxy, name); } return object->GetLocalPropertyAttribute(*name) != ABSENT; } PropertyAttributes JSReceiver::GetPropertyAttribute(Name* key) { uint32_t index; if (IsJSObject() && key->AsArrayIndex(&index)) { return GetElementAttribute(index); } return GetPropertyAttributeWithReceiver(this, key); } PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) { if (IsJSProxy()) { return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index); } return JSObject::cast(this)->GetElementAttributeWithReceiver( this, index, true); } bool JSGlobalObject::IsDetached() { return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this); } bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) { return GetPrototype() != global; } Handle JSReceiver::GetOrCreateIdentityHash(Handle object) { return object->IsJSProxy() ? JSProxy::GetOrCreateIdentityHash(Handle::cast(object)) : JSObject::GetOrCreateIdentityHash(Handle::cast(object)); } Object* JSReceiver::GetIdentityHash() { return IsJSProxy() ? JSProxy::cast(this)->GetIdentityHash() : JSObject::cast(this)->GetIdentityHash(); } bool JSReceiver::HasElement(Handle object, uint32_t index) { if (object->IsJSProxy()) { Handle proxy = Handle::cast(object); return JSProxy::HasElementWithHandler(proxy, index); } return Handle::cast(object)->GetElementAttributeWithReceiver( *object, index, true) != ABSENT; } bool JSReceiver::HasLocalElement(Handle object, uint32_t index) { if (object->IsJSProxy()) { Handle proxy = Handle::cast(object); return JSProxy::HasElementWithHandler(proxy, index); } return Handle::cast(object)->GetElementAttributeWithReceiver( *object, index, false) != ABSENT; } PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) { if (IsJSProxy()) { return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index); } return JSObject::cast(this)->GetElementAttributeWithReceiver( this, index, false); } bool AccessorInfo::all_can_read() { return BooleanBit::get(flag(), kAllCanReadBit); } void AccessorInfo::set_all_can_read(bool value) { set_flag(BooleanBit::set(flag(), kAllCanReadBit, value)); } bool AccessorInfo::all_can_write() { return BooleanBit::get(flag(), kAllCanWriteBit); } void AccessorInfo::set_all_can_write(bool value) { set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value)); } bool AccessorInfo::prohibits_overwriting() { return BooleanBit::get(flag(), kProhibitsOverwritingBit); } void AccessorInfo::set_prohibits_overwriting(bool value) { set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value)); } PropertyAttributes AccessorInfo::property_attributes() { return AttributesField::decode(static_cast(flag()->value())); } void AccessorInfo::set_property_attributes(PropertyAttributes attributes) { set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes))); } bool AccessorInfo::IsCompatibleReceiver(Object* receiver) { Object* function_template = expected_receiver_type(); if (!function_template->IsFunctionTemplateInfo()) return true; return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver); } void AccessorPair::set_access_flags(v8::AccessControl access_control) { int current = access_flags()->value(); current = BooleanBit::set(current, kProhibitsOverwritingBit, access_control & PROHIBITS_OVERWRITING); current = BooleanBit::set(current, kAllCanReadBit, access_control & ALL_CAN_READ); current = BooleanBit::set(current, kAllCanWriteBit, access_control & ALL_CAN_WRITE); set_access_flags(Smi::FromInt(current)); } bool AccessorPair::all_can_read() { return BooleanBit::get(access_flags(), kAllCanReadBit); } bool AccessorPair::all_can_write() { return BooleanBit::get(access_flags(), kAllCanWriteBit); } bool AccessorPair::prohibits_overwriting() { return BooleanBit::get(access_flags(), kProhibitsOverwritingBit); } template void Dictionary::SetEntry(int entry, Object* key, Object* value) { SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0))); } template void Dictionary::SetEntry(int entry, Object* key, Object* value, PropertyDetails details) { ASSERT(!key->IsName() || details.IsDeleted() || details.dictionary_index() > 0); int index = HashTable::EntryToIndex(entry); DisallowHeapAllocation no_gc; WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc); FixedArray::set(index, key, mode); FixedArray::set(index+1, value, mode); FixedArray::set(index+2, details.AsSmi()); } bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) { ASSERT(other->IsNumber()); return key == static_cast(other->Number()); } uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) { return ComputeIntegerHash(key, 0); } uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key, Object* other) { ASSERT(other->IsNumber()); return ComputeIntegerHash(static_cast(other->Number()), 0); } uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) { return ComputeIntegerHash(key, seed); } uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key, uint32_t seed, Object* other) { ASSERT(other->IsNumber()); return ComputeIntegerHash(static_cast(other->Number()), seed); } MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) { return heap->NumberFromUint32(key); } bool NameDictionaryShape::IsMatch(Name* key, Object* other) { // We know that all entries in a hash table had their hash keys created. // Use that knowledge to have fast failure. if (key->Hash() != Name::cast(other)->Hash()) return false; return key->Equals(Name::cast(other)); } uint32_t NameDictionaryShape::Hash(Name* key) { return key->Hash(); } uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) { return Name::cast(other)->Hash(); } MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) { ASSERT(key->IsUniqueName()); return key; } template bool ObjectHashTableShape::IsMatch(Object* key, Object* other) { return key->SameValue(other); } template uint32_t ObjectHashTableShape::Hash(Object* key) { return Smi::cast(key->GetHash())->value(); } template uint32_t ObjectHashTableShape::HashForObject(Object* key, Object* other) { return Smi::cast(other->GetHash())->value(); } template MaybeObject* ObjectHashTableShape::AsObject(Heap* heap, Object* key) { return key; } template bool WeakHashTableShape::IsMatch(Object* key, Object* other) { return key->SameValue(other); } template uint32_t WeakHashTableShape::Hash(Object* key) { intptr_t hash = reinterpret_cast(key); return (uint32_t)(hash & 0xFFFFFFFF); } template uint32_t WeakHashTableShape::HashForObject(Object* key, Object* other) { intptr_t hash = reinterpret_cast(other); return (uint32_t)(hash & 0xFFFFFFFF); } template MaybeObject* WeakHashTableShape::AsObject(Heap* heap, Object* key) { return key; } void Map::ClearCodeCache(Heap* heap) { // No write barrier is needed since empty_fixed_array is not in new space. // Please note this function is used during marking: // - MarkCompactCollector::MarkUnmarkedObject // - IncrementalMarking::Step ASSERT(!heap->InNewSpace(heap->empty_fixed_array())); WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array()); } void JSArray::EnsureSize(int required_size) { ASSERT(HasFastSmiOrObjectElements()); FixedArray* elts = FixedArray::cast(elements()); const int kArraySizeThatFitsComfortablyInNewSpace = 128; if (elts->length() < required_size) { // Doubling in size would be overkill, but leave some slack to avoid // constantly growing. Expand(required_size + (required_size >> 3)); // It's a performance benefit to keep a frequently used array in new-space. } else if (!GetHeap()->new_space()->Contains(elts) && required_size < kArraySizeThatFitsComfortablyInNewSpace) { // Expand will allocate a new backing store in new space even if the size // we asked for isn't larger than what we had before. Expand(required_size); } } void JSArray::set_length(Smi* length) { // Don't need a write barrier for a Smi. set_length(static_cast(length), SKIP_WRITE_BARRIER); } bool JSArray::AllowsSetElementsLength() { bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray(); ASSERT(result == !HasExternalArrayElements()); return result; } MaybeObject* JSArray::SetContent(FixedArrayBase* storage) { MaybeObject* maybe_result = EnsureCanContainElements( storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS); if (maybe_result->IsFailure()) return maybe_result; ASSERT((storage->map() == GetHeap()->fixed_double_array_map() && IsFastDoubleElementsKind(GetElementsKind())) || ((storage->map() != GetHeap()->fixed_double_array_map()) && (IsFastObjectElementsKind(GetElementsKind()) || (IsFastSmiElementsKind(GetElementsKind()) && FixedArray::cast(storage)->ContainsOnlySmisOrHoles())))); set_elements(storage); set_length(Smi::FromInt(storage->length())); return this; } MaybeObject* FixedArray::Copy() { if (length() == 0) return this; return GetHeap()->CopyFixedArray(this); } MaybeObject* FixedDoubleArray::Copy() { if (length() == 0) return this; return GetHeap()->CopyFixedDoubleArray(this); } MaybeObject* ConstantPoolArray::Copy() { if (length() == 0) return this; return GetHeap()->CopyConstantPoolArray(this); } void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) { set(1 + index * 2, Smi::FromInt(id.ToInt())); } TypeFeedbackId TypeFeedbackCells::AstId(int index) { return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value()); } void TypeFeedbackCells::SetCell(int index, Cell* cell) { set(index * 2, cell); } Cell* TypeFeedbackCells::GetCell(int index) { return Cell::cast(get(index * 2)); } Handle TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) { return isolate->factory()->the_hole_value(); } Handle